mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-03 17:59:46 +02:00
Replace uses of LongObjectHashTable with Java's HashMap
This commit is contained in:
parent
7af55169c0
commit
e3aebe3adb
22 changed files with 407 additions and 767 deletions
|
@ -33,7 +33,6 @@ import ghidra.program.model.data.*;
|
|||
import ghidra.program.model.data.Enum;
|
||||
import ghidra.program.model.listing.DataTypeChangeSet;
|
||||
import ghidra.util.*;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
|
@ -78,13 +77,13 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
private DataTypeMergePanel dtMergePanel;
|
||||
private int totalConflictCount;
|
||||
private int currentConflictIndex;
|
||||
private LongObjectHashtable<DataType> myResolvedDts; // maps My data type key -> resolved Data type
|
||||
private LongObjectHashtable<DataType> latestResolvedDts; // maps Latest data type key -> resolved Data type
|
||||
private LongObjectHashtable<DataType> origResolvedDts; // maps Original data type key -> resolved Data type
|
||||
private Map<Long, DataType> myResolvedDts; // maps My data type key -> resolved Data type
|
||||
private Map<Long, DataType> latestResolvedDts; // maps Latest data type key -> resolved Data type
|
||||
private Map<Long, DataType> origResolvedDts; // maps Original data type key -> resolved Data type
|
||||
private List<FixUpInfo> fixUpList; // FixUpInfo objects that must be resolved after
|
||||
private HashSet<Long> fixUpIDSet; // track types with fixups
|
||||
// data types have been added and conflicts resolved.
|
||||
private LongObjectHashtable<CleanUpInfo> cleanupPlaceHolderList; // placeholders that need to be removed.
|
||||
private Map<Long, CleanUpInfo> cleanupPlaceHolderList; // placeholders that need to be removed.
|
||||
private int progressIndex; // index for showing progress
|
||||
|
||||
private int categoryChoice = ASK_USER;
|
||||
|
@ -700,7 +699,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* the data type existed
|
||||
*/
|
||||
private DataType updateDataTypeName(long id, DataType dt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType resultDt = dtms[RESULT].getDataType(id);
|
||||
DataType newDt = null;
|
||||
if (resultDt != null) {
|
||||
|
@ -771,7 +770,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType updateDataType(long id, DataTypeManager dtm,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes, boolean updatePath) {
|
||||
Map<Long, DataType> resolvedDataTypes, boolean updatePath) {
|
||||
DataType resultDt = dtms[RESULT].getDataType(id);
|
||||
DataType myDt = dtm.getDataType(id);
|
||||
|
||||
|
@ -816,7 +815,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType updateDataTypeSource(long id, DataTypeManager dtm,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType resultDt = dtms[RESULT].getDataType(id);
|
||||
DataType myDt = dtm.getDataType(id);
|
||||
SourceArchive mySourceArchive = myDt.getSourceArchive();
|
||||
|
@ -832,8 +831,9 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
else {
|
||||
|
||||
SourceArchive resultSourceArchive = resultDt.getSourceArchive();
|
||||
if (!resultSourceArchive.getSourceArchiveID().equals(
|
||||
mySourceArchive.getSourceArchiveID())) {
|
||||
if (!resultSourceArchive.getSourceArchiveID()
|
||||
.equals(
|
||||
mySourceArchive.getSourceArchiveID())) {
|
||||
resultDt.setSourceArchive(mySourceArchive);
|
||||
}
|
||||
}
|
||||
|
@ -850,7 +850,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* @return the resulting data type in this data type manager.
|
||||
*/
|
||||
private DataType addDataType(long dataTypeID, DataType dataType,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
|
||||
DataType existingDt = resolvedDataTypes.get(dataTypeID);
|
||||
if (existingDt != null) {
|
||||
|
@ -899,7 +899,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* @return resolved data type that corresponds to id
|
||||
*/
|
||||
private DataType getResolvedBaseType(long id, DataType dt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataTypeManager dtm = dt.getDataTypeManager();
|
||||
DataType baseDt = getBaseDataType(dt);
|
||||
if (baseDt == DataType.DEFAULT) {
|
||||
|
@ -933,7 +933,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType createPointer(long id, Pointer pointerDt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType innerDt = pointerDt.getDataType();
|
||||
if (innerDt == DataType.DEFAULT) {
|
||||
return pointerDt;
|
||||
|
@ -954,7 +954,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType createTypeDef(long id, TypeDef originalTypeDef,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType innerDataType = originalTypeDef.getDataType();
|
||||
if (innerDataType == DataType.DEFAULT) {
|
||||
return originalTypeDef;
|
||||
|
@ -985,7 +985,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType createArray(long id, Array array,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType dt = array.getDataType();
|
||||
if (dt == DataType.DEFAULT) {
|
||||
return array;
|
||||
|
@ -1006,7 +1006,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType addComposite(long id, Composite myDt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
|
||||
long oldLastChangeTime = myDt.getLastChangeTime();
|
||||
long oldLastChangeTimeInSourceArchive = myDt.getLastChangeTimeInSourceArchive();
|
||||
|
@ -1026,7 +1026,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType addFunctionDef(long id, FunctionDefinition myDt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
FunctionDefinition newDt = (FunctionDefinition) myDt.clone(dtms[RESULT]);
|
||||
setCategoryPath(newDt, myDt.getCategoryPath());
|
||||
updateFunctionDef(id, myDt, newDt, resolvedDataTypes);
|
||||
|
@ -1034,7 +1034,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private void updateHashTables(long id, DataType newDt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
resolvedDataTypes.put(id, newDt);
|
||||
if (!myDtAddedList.contains(Long.valueOf(id))) {
|
||||
if (resolvedDataTypes == myResolvedDts) {
|
||||
|
@ -1053,7 +1053,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private DataType getResolvedComponent(long compID,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType resolvedDt = resolvedDataTypes.get(compID);
|
||||
if (resolvedDt != null) {
|
||||
// if this is a pointer, typedef, or array, check the
|
||||
|
@ -1095,7 +1095,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private void updateFlexArray(long sourceDtID, Structure sourceDt, Structure destStruct,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
|
||||
DataTypeComponent flexDtc = sourceDt.getFlexibleArrayComponent();
|
||||
if (flexDtc == null) {
|
||||
|
@ -1155,7 +1155,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private void updateStructure(long sourceDtID, Structure sourceDt, Structure destStruct,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
|
||||
// NOTE: it is possible for the same destStruct to be updated more than once;
|
||||
// therefor we must cleanup any previous obsolete fixups
|
||||
|
@ -1356,7 +1356,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private void updateUnion(long sourceDtID, Union sourceDt, Union destUnion,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
|
||||
// NOTE: it is possible for the same destUnion to be updated more than once;
|
||||
// therefor we must cleanup any previous obsolete fixups
|
||||
|
@ -1479,7 +1479,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
}
|
||||
|
||||
private void updateComposite(long sourceDtID, Composite sourceDt, Composite destDt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
|
||||
if (sourceDt instanceof Structure) {
|
||||
updateStructure(sourceDtID, (Structure) sourceDt, (Structure) destDt,
|
||||
|
@ -1493,7 +1493,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
|
||||
private void updateFunctionDef(long sourceFunctionDefDtID,
|
||||
FunctionDefinition sourceFunctionDefDt, FunctionDefinition destDt,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
|
||||
// NOTE: it is possible for the same function def to be updated more than once;
|
||||
// therefor we must cleanup any previous obsolete fixups
|
||||
|
@ -1542,7 +1542,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* has not been resolved yet
|
||||
*/
|
||||
private DataType getResolvedParam(long id, long paramDatatypeID, int index,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType resolvedDt = getResolvedComponent(paramDatatypeID, resolvedDataTypes);
|
||||
if (resolvedDt == null) {
|
||||
if (!myDtAddedList.contains(Long.valueOf(paramDatatypeID))) {
|
||||
|
@ -2216,7 +2216,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* data type manager.
|
||||
*/
|
||||
private boolean equivalentDataTypeFound(long myDtID, DataType myDt) {
|
||||
if (myResolvedDts.contains(myDtID)) {
|
||||
if (myResolvedDts.containsKey(myDtID)) {
|
||||
return true;
|
||||
}
|
||||
DataType resultDt = dtms[RESULT].getDataType(myDt.getCategoryPath(), myDt.getName());
|
||||
|
@ -2226,8 +2226,9 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
UniversalID resultDtUniversalID = resultDt.getUniversalID();
|
||||
UniversalID myDtUniversalID = myDt.getUniversalID();
|
||||
// UniversalID can be null if data type is BuiltIn.
|
||||
if (!resultSourceArchive.getSourceArchiveID().equals(
|
||||
mySourceArchive.getSourceArchiveID()) ||
|
||||
if (!resultSourceArchive.getSourceArchiveID()
|
||||
.equals(
|
||||
mySourceArchive.getSourceArchiveID()) ||
|
||||
!SystemUtilities.isEqual(resultDtUniversalID, myDtUniversalID)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -2241,7 +2242,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
|
||||
private void cleanUpDataTypes() {
|
||||
// clean up data types
|
||||
long[] keys = cleanupPlaceHolderList.getKeys();
|
||||
List<Long> keys = new ArrayList<Long>(cleanupPlaceHolderList.keySet());
|
||||
for (long key : keys) {
|
||||
CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(key);
|
||||
cleanUpInfo.cleanUp();
|
||||
|
@ -2566,7 +2567,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
private void addToCleanupList(FixUpInfo info) {
|
||||
long id = info.id;
|
||||
int index = info.index;
|
||||
LongObjectHashtable<DataType> ht = info.ht;
|
||||
Map<Long, DataType> ht = info.ht;
|
||||
CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(id);
|
||||
if (cleanUpInfo == null) {
|
||||
cleanUpInfo = new CleanUpInfo(id);
|
||||
|
@ -2685,7 +2686,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* @return
|
||||
*/
|
||||
private DataType resolve(long id, DataTypeManager dtm,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
DataType dt = getResolvedComponent(id, resolvedDataTypes);
|
||||
if (dt == null) {
|
||||
DataType otherDt = dtm.getDataType(id);
|
||||
|
@ -2961,15 +2962,15 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
|
||||
origDtConflictList = new ArrayList<>(dtConflictList);
|
||||
|
||||
myResolvedDts = new LongObjectHashtable<>();
|
||||
latestResolvedDts = new LongObjectHashtable<>();
|
||||
origResolvedDts = new LongObjectHashtable<>();
|
||||
myResolvedDts = new HashMap<>();
|
||||
latestResolvedDts = new HashMap<>();
|
||||
origResolvedDts = new HashMap<>();
|
||||
|
||||
fixUpList = new ArrayList<>();
|
||||
fixUpIDSet = new HashSet<>();
|
||||
totalConflictCount += dtConflictList.size();
|
||||
|
||||
cleanupPlaceHolderList = new LongObjectHashtable<>();
|
||||
cleanupPlaceHolderList = new HashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -3212,7 +3213,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
return new String[][] { DATA_TYPES_PHASE };
|
||||
}
|
||||
|
||||
private DataTypeManager getDataTypeManager(LongObjectHashtable<DataType> dataTypeMap) {
|
||||
private DataTypeManager getDataTypeManager(Map<Long, DataType> dataTypeMap) {
|
||||
if (dataTypeMap == origResolvedDts) {
|
||||
return dtms[ORIGINAL];
|
||||
}
|
||||
|
@ -3238,7 +3239,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
long id;
|
||||
long compID;
|
||||
int index;
|
||||
LongObjectHashtable<DataType> ht;
|
||||
Map<Long, DataType> ht;
|
||||
|
||||
// bitfield info
|
||||
int bitOffset = -1;
|
||||
|
@ -3255,7 +3256,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* @param resolvedDataTypes hashtable used for resolving the data type
|
||||
*/
|
||||
FixUpInfo(long id, long compID, int index,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
this.id = id;
|
||||
this.compID = compID;
|
||||
this.index = index;
|
||||
|
@ -3271,7 +3272,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* @param resolvedDataTypes hashtable used for resolving the data type
|
||||
*/
|
||||
FixUpInfo(long id, long compID, DataTypeComponent sourceDtc,
|
||||
LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
Map<Long, DataType> resolvedDataTypes) {
|
||||
this(id, compID, getComponentFixupIndex(sourceDtc), resolvedDataTypes);
|
||||
if (sourceDtc.isBitFieldComponent()) {
|
||||
BitFieldDataType bfDt = (BitFieldDataType) sourceDtc.getDataType();
|
||||
|
@ -3347,7 +3348,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
*/
|
||||
private class CleanUpInfo {
|
||||
long id;
|
||||
HashMap<LongObjectHashtable<DataType>, int[]> map; // resolvedDataTypesMap, indexArray
|
||||
Map<Map<Long, DataType>, int[]> map; // resolvedDataTypesMap, indexArray
|
||||
|
||||
/**
|
||||
* Construct info needed to clean up place holder data types after base types
|
||||
|
@ -3364,7 +3365,7 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
* structure; for other data types, offset is not used (specify -1)
|
||||
* @param resolvedDataTypes hashtable used for resolving the data type
|
||||
*/
|
||||
public void add(int index, LongObjectHashtable<DataType> resolvedDataTypes) {
|
||||
public void add(int index, Map<Long, DataType> resolvedDataTypes) {
|
||||
if (map == null) {
|
||||
map = new HashMap<>();
|
||||
}
|
||||
|
@ -3383,10 +3384,10 @@ public class DataTypeMergeManager implements MergeResolver {
|
|||
if (map == null) {
|
||||
return;
|
||||
}
|
||||
Set<LongObjectHashtable<DataType>> keySet = map.keySet();
|
||||
Iterator<LongObjectHashtable<DataType>> iterator = keySet.iterator();
|
||||
Set<Map<Long, DataType>> keySet = map.keySet();
|
||||
Iterator<Map<Long, DataType>> iterator = keySet.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
LongObjectHashtable<DataType> ht = iterator.next();
|
||||
Map<Long, DataType> ht = iterator.next();
|
||||
DataType dt = ht.get(id);
|
||||
if (dt instanceof Composite) {
|
||||
int[] indexArray = map.get(ht);
|
||||
|
|
|
@ -39,7 +39,6 @@ import ghidra.program.model.symbol.*;
|
|||
import ghidra.program.util.DiffUtility;
|
||||
import ghidra.program.util.ProgramMerge;
|
||||
import ghidra.util.*;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.datastruct.ObjectIntHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
@ -122,9 +121,9 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
|||
|
||||
protected AddressFactory resultAddressFactory;
|
||||
|
||||
protected LongObjectHashtable<DataType> latestResolvedDts; // maps data type ID -> resolved Data type
|
||||
protected LongObjectHashtable<DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
||||
protected LongObjectHashtable<DataType> origResolvedDts;
|
||||
protected Map<Long, DataType> latestResolvedDts; // maps data type ID -> resolved Data type
|
||||
protected Map<Long, DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
||||
protected Map<Long, DataType> origResolvedDts;
|
||||
|
||||
// mergePanel is a panel for listing merge conflicts.
|
||||
// listings in CENTER, conflictInfoPanel in NORTH, mergeConflicts in SOUTH.
|
||||
|
@ -267,9 +266,10 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
|||
// See if both changed to same value.
|
||||
switch (type) {
|
||||
case FUNC_RETURN_ADDRESS_OFFSET:
|
||||
return (latestStack.getReturnAddressOffset() == myStack.getReturnAddressOffset())
|
||||
? 0
|
||||
: type;
|
||||
return (latestStack.getReturnAddressOffset() == myStack
|
||||
.getReturnAddressOffset())
|
||||
? 0
|
||||
: type;
|
||||
// For now, we are not allowing you to set the parameter offset or local size outright.
|
||||
// case FUNC_PARAMETER_OFFSET:
|
||||
// return (latestStack.getParameterOffset() == myStack.getParameterOffset()) ? 0
|
||||
|
@ -277,9 +277,10 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
|||
// case FUNC_LOCAL_SIZE:
|
||||
// return (latestStack.getLocalSize() == myStack.getLocalSize()) ? 0 : type;
|
||||
case FUNC_STACK_PURGE_SIZE:
|
||||
return (functions[LATEST].getStackPurgeSize() == functions[MY].getStackPurgeSize())
|
||||
? 0
|
||||
: type;
|
||||
return (functions[LATEST].getStackPurgeSize() == functions[MY]
|
||||
.getStackPurgeSize())
|
||||
? 0
|
||||
: type;
|
||||
case FUNC_NAME:
|
||||
return hasUnresolvedFunctionNameConflict(functions, monitor) ? type : 0;
|
||||
case FUNC_INLINE:
|
||||
|
@ -292,12 +293,14 @@ abstract class AbstractFunctionMerger implements ListingMergeConstants {
|
|||
// return (functions[LATEST].hasCustomVariableStorage() == functions[MY].hasCustomVariableStorage()) ? 0
|
||||
// : type;
|
||||
case FUNC_CALLING_CONVENTION:
|
||||
return (functions[LATEST].getCallingConventionName().equals(
|
||||
functions[MY].getCallingConventionName())) ? 0 : type;
|
||||
return (functions[LATEST].getCallingConventionName()
|
||||
.equals(
|
||||
functions[MY].getCallingConventionName())) ? 0 : type;
|
||||
case FUNC_SIGNATURE_SOURCE:
|
||||
return (functions[LATEST].getSignatureSource() == functions[MY].getSignatureSource())
|
||||
? 0
|
||||
: type;
|
||||
return (functions[LATEST].getSignatureSource() == functions[MY]
|
||||
.getSignatureSource())
|
||||
? 0
|
||||
: type;
|
||||
default:
|
||||
throw new IllegalArgumentException("type = " + type);
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ package ghidra.app.merge.listing;
|
|||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.swing.SwingUtilities;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
|
@ -35,7 +36,6 @@ import ghidra.program.model.mem.MemoryAccessException;
|
|||
import ghidra.program.model.util.CodeUnitInsertionException;
|
||||
import ghidra.program.util.*;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.CancelledException;
|
||||
import ghidra.util.exception.NotYetImplementedException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
@ -100,8 +100,8 @@ class CodeUnitMerger extends AbstractListingMerger {
|
|||
ProgramMerge mergeLatest;
|
||||
ProgramMerge mergeOriginal;
|
||||
|
||||
private LongObjectHashtable<DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
||||
private LongObjectHashtable<DataType> origResolvedDts;
|
||||
private Map<Long, DataType> myResolvedDts; // maps data type ID -> resolved Data type
|
||||
private Map<Long, DataType> origResolvedDts;
|
||||
|
||||
/**
|
||||
* Manages code unit changes and conflicts between the latest versioned
|
||||
|
@ -147,9 +147,9 @@ class CodeUnitMerger extends AbstractListingMerger {
|
|||
mergeLatest = listingMergeMgr.mergeLatest;
|
||||
mergeOriginal = listingMergeMgr.mergeOriginal;
|
||||
|
||||
myResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
myResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_MY_DTS);
|
||||
origResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
origResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
||||
|
||||
mergedCodeUnits = new AddressSet();
|
||||
|
|
|
@ -36,7 +36,8 @@ import ghidra.program.model.mem.MemoryAccessException;
|
|||
import ghidra.program.model.symbol.*;
|
||||
import ghidra.program.util.*;
|
||||
import ghidra.util.*;
|
||||
import ghidra.util.datastruct.*;
|
||||
import ghidra.util.datastruct.LongLongHashtable;
|
||||
import ghidra.util.datastruct.ObjectIntHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
|
@ -454,11 +455,11 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
|||
throws ProgramConflictException, MemoryAccessException, CancelledException {
|
||||
|
||||
if (mergeManager != null) {
|
||||
latestResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
latestResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_LATEST_DTS);
|
||||
myResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
myResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_MY_DTS);
|
||||
origResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
origResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
||||
|
||||
latestResolvedSymbols = (LongLongHashtable) mergeManager.getResolveInformation(
|
||||
|
@ -1816,8 +1817,9 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
|||
}
|
||||
if (originalImportedName != null) {
|
||||
try {
|
||||
resultExternalLocation.getSymbol().setNameAndNamespace(externalLocation.getLabel(),
|
||||
resolvedNamespace, externalLocation.getSource());
|
||||
resultExternalLocation.getSymbol()
|
||||
.setNameAndNamespace(externalLocation.getLabel(),
|
||||
resolvedNamespace, externalLocation.getSource());
|
||||
}
|
||||
catch (CircularDependencyException e) {
|
||||
throw new AssertException(e);
|
||||
|
@ -2141,13 +2143,15 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
|||
}
|
||||
// If we have a function variable storage choice then a "Use For All" has already occurred.
|
||||
if (variableStorageChoice != ASK_USER) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||
.getOverlappingVariables()) {
|
||||
monitor.checkCanceled();
|
||||
mergeVariableStorage(functions, pair, variableStorageChoice, monitor);
|
||||
}
|
||||
}
|
||||
else if (askUser && mergeManager != null) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||
.getOverlappingVariables()) {
|
||||
monitor.checkCanceled();
|
||||
boolean useForAll = (variableStorageChoice != ASK_USER);
|
||||
if (useForAll) {
|
||||
|
@ -2164,7 +2168,8 @@ public class ExternalFunctionMerger extends AbstractFunctionMerger implements Li
|
|||
}
|
||||
}
|
||||
else {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||
.getOverlappingVariables()) {
|
||||
monitor.checkCanceled();
|
||||
mergeVariableStorage(functions, pair, currentConflictOption, monitor);
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ import ghidra.program.model.symbol.Namespace;
|
|||
import ghidra.program.util.*;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.SystemUtilities;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
|
@ -213,11 +212,11 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
|||
public void autoMerge(int progressMin, int progressMax, TaskMonitor monitor)
|
||||
throws ProgramConflictException, MemoryAccessException, CancelledException {
|
||||
|
||||
latestResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
latestResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_LATEST_DTS);
|
||||
myResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
myResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_MY_DTS);
|
||||
origResolvedDts = (LongObjectHashtable<DataType>) mergeManager.getResolveInformation(
|
||||
origResolvedDts = (Map<Long, DataType>) mergeManager.getResolveInformation(
|
||||
MergeConstants.RESOLVED_ORIGINAL_DTS);
|
||||
|
||||
initializeAutoMerge("Auto-merging Functions and determining conflicts.", progressMin,
|
||||
|
@ -999,13 +998,15 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
|||
}
|
||||
// If we have a function variable storage choice then a "Use For All" has already occurred.
|
||||
if (variableStorageChoice != ASK_USER) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||
.getOverlappingVariables()) {
|
||||
monitor.checkCanceled();
|
||||
mergeVariableStorage(addr, pair, variableStorageChoice, monitor);
|
||||
}
|
||||
}
|
||||
else if (askUser && mergeManager != null) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||
.getOverlappingVariables()) {
|
||||
monitor.checkCanceled();
|
||||
boolean useForAll = (variableStorageChoice != ASK_USER);
|
||||
if (useForAll) {
|
||||
|
@ -1022,7 +1023,8 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
|||
}
|
||||
}
|
||||
else {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts.getOverlappingVariables()) {
|
||||
for (Pair<List<Variable>, List<Variable>> pair : variableStorageConflicts
|
||||
.getOverlappingVariables()) {
|
||||
monitor.checkCanceled();
|
||||
mergeVariableStorage(addr, pair, currentConflictOption, monitor);
|
||||
}
|
||||
|
@ -1355,8 +1357,10 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
|||
protected void mergeParameters(Address entryPtAddress, int chosenConflictOption,
|
||||
TaskMonitor monitor) {
|
||||
Function resultFunction =
|
||||
listingMergeManager.mergeLatest.getResultProgram().getFunctionManager().getFunctionAt(
|
||||
entryPtAddress);
|
||||
listingMergeManager.mergeLatest.getResultProgram()
|
||||
.getFunctionManager()
|
||||
.getFunctionAt(
|
||||
entryPtAddress);
|
||||
if (resultFunction == null) {
|
||||
return;
|
||||
}
|
||||
|
@ -1878,7 +1882,8 @@ class FunctionMerger extends AbstractFunctionMerger implements ListingMerger {
|
|||
if (function1.getStackPurgeSize() != function2.getStackPurgeSize()) {
|
||||
return false;
|
||||
}
|
||||
if (function1.getStackFrame().getReturnAddressOffset() != function2.getStackFrame().getReturnAddressOffset()) {
|
||||
if (function1.getStackFrame().getReturnAddressOffset() != function2.getStackFrame()
|
||||
.getReturnAddressOffset()) {
|
||||
return false;
|
||||
}
|
||||
if (!function1.getCallingConventionName().equals(function2.getCallingConventionName())) {
|
||||
|
|
|
@ -15,16 +15,14 @@
|
|||
*/
|
||||
package ghidra.app.plugin.match;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import ghidra.program.model.address.Address;
|
||||
import ghidra.program.model.address.AddressSetView;
|
||||
import ghidra.program.model.listing.*;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.CancelledException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This class does the work of matching subroutines. Every subroutine
|
||||
* in the current program is hashed and the start address is put into a
|
||||
|
@ -44,12 +42,13 @@ public class MatchFunctions {
|
|||
boolean includeOneToOne, boolean includeNonOneToOne, FunctionHasher hasher,
|
||||
TaskMonitor monitor) throws CancelledException {
|
||||
|
||||
LongObjectHashtable<Match> functionHashes = new LongObjectHashtable<Match>();
|
||||
Map<Long, Match> functionHashes = new HashMap<>();
|
||||
List<MatchedFunctions> functionMatches = new ArrayList<MatchedFunctions>();
|
||||
FunctionIterator aProgfIter = aProgram.getFunctionManager().getFunctions(setA, true);
|
||||
FunctionIterator bProgfIter = bProgram.getFunctionManager().getFunctions(setB, true);
|
||||
monitor.setIndeterminate(false);
|
||||
monitor.initialize(2 * (aProgram.getFunctionManager().getFunctionCount() + bProgram.getFunctionManager().getFunctionCount()));
|
||||
monitor.initialize(2 * (aProgram.getFunctionManager().getFunctionCount() +
|
||||
bProgram.getFunctionManager().getFunctionCount()));
|
||||
monitor.setMessage("Hashing functions in " + aProgram.getName());
|
||||
|
||||
// Hash functions in program A
|
||||
|
@ -73,17 +72,15 @@ public class MatchFunctions {
|
|||
|
||||
//Find the remaining hash matches ---> unique code match left and THERE is no symbol that matches
|
||||
//in the other program.
|
||||
long[] keys = functionHashes.getKeys();
|
||||
final long progress = monitor.getProgress();
|
||||
monitor.setMaximum(progress + keys.length);
|
||||
monitor.setMaximum(progress + functionHashes.size());
|
||||
monitor.setProgress(progress);
|
||||
monitor.setMessage("Finding function matches");
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
for (Match match : functionHashes.values()) {
|
||||
monitor.incrementProgress(1);
|
||||
if (monitor.isCancelled()) {
|
||||
break;
|
||||
}
|
||||
Match match = functionHashes.get(keys[i]);
|
||||
ArrayList<Address> aProgAddrs = match.aAddresses;
|
||||
ArrayList<Address> bProgAddrs = match.bAddresses;
|
||||
if ((includeOneToOne && aProgAddrs.size() == 1 && bProgAddrs.size() == 1) ||
|
||||
|
@ -103,15 +100,17 @@ public class MatchFunctions {
|
|||
}
|
||||
|
||||
public static List<MatchedFunctions> matchOneFunction(Program aProgram, Address aEntryPoint,
|
||||
Program bProgram, FunctionHasher hasher, TaskMonitor monitor) throws CancelledException {
|
||||
Program bProgram, FunctionHasher hasher, TaskMonitor monitor)
|
||||
throws CancelledException {
|
||||
return matchOneFunction(aProgram, aEntryPoint, bProgram, null, hasher, monitor);
|
||||
}
|
||||
|
||||
// Finds all matches in program B to the function in Program A
|
||||
public static List<MatchedFunctions> matchOneFunction(Program aProgram, Address aEntryPoint,
|
||||
Program bProgram, AddressSetView bAddressSet, FunctionHasher hasher, TaskMonitor monitor)
|
||||
Program bProgram, AddressSetView bAddressSet, FunctionHasher hasher,
|
||||
TaskMonitor monitor)
|
||||
throws CancelledException {
|
||||
LongObjectHashtable<Match> functionHashes = new LongObjectHashtable<Match>();
|
||||
Map<Long, Match> functionHashes = new HashMap<>();
|
||||
List<MatchedFunctions> functionMatches = new ArrayList<MatchedFunctions>();
|
||||
|
||||
Function aFunc = aProgram.getFunctionManager().getFunctionContaining(aEntryPoint);
|
||||
|
@ -131,12 +130,12 @@ public class MatchFunctions {
|
|||
|
||||
//Find the remaining hash matches ---> unique code match left and THERE is no symbol that matches
|
||||
//in the other program.
|
||||
long[] keys = functionHashes.getKeys();
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
List<Long> keys = new ArrayList<>(functionHashes.keySet());
|
||||
for (long key : keys) {
|
||||
if (monitor.isCancelled()) {
|
||||
break;
|
||||
}
|
||||
Match match = functionHashes.get(keys[i]);
|
||||
Match match = functionHashes.get(key);
|
||||
ArrayList<Address> aProgAddrs = match.aAddresses;
|
||||
ArrayList<Address> bProgAddrs = match.bAddresses;
|
||||
|
||||
|
@ -149,7 +148,7 @@ public class MatchFunctions {
|
|||
"Code Only Match");
|
||||
functionMatches.add(functionMatch);
|
||||
}
|
||||
functionHashes.remove(keys[i]);
|
||||
functionHashes.remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,7 +156,7 @@ public class MatchFunctions {
|
|||
}
|
||||
|
||||
private static void hashFunction(TaskMonitor monitor,
|
||||
LongObjectHashtable<Match> functionHashes, Function function, FunctionHasher hasher,
|
||||
Map<Long, Match> functionHashes, Function function, FunctionHasher hasher,
|
||||
boolean isProgA) throws CancelledException {
|
||||
|
||||
long hash = hasher.hash(function, monitor);
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,13 +15,13 @@
|
|||
*/
|
||||
package ghidra.server.remote;
|
||||
|
||||
import ghidra.util.Msg;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.ServerSocket;
|
||||
import java.net.Socket;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import ghidra.util.Msg;
|
||||
|
||||
/**
|
||||
* <code>RMIClassServer</code> provides a server for serializing classes to an
|
||||
* RMI client as needed. This implementation starts a new listener thread each
|
||||
|
|
|
@ -479,7 +479,7 @@ public class AssociationDatabaseManager implements VTAssociationManager {
|
|||
|
||||
if (status == ACCEPTED) {
|
||||
associationDB.setStatus(AVAILABLE);
|
||||
associationCache.invalidate(associationDB.getKey());
|
||||
associationDB.setInvalid();
|
||||
unblockRelatedAssociations(associationDB);
|
||||
for (AssociationHook hook : associationHooks) {
|
||||
hook.associationCleared(associationDB);
|
||||
|
@ -545,7 +545,7 @@ public class AssociationDatabaseManager implements VTAssociationManager {
|
|||
case AVAILABLE:
|
||||
throw new AssertException("Attempted to unblock a non-blocked association!");
|
||||
case BLOCKED:
|
||||
associationCache.invalidate(relatedAssociation.getKey());
|
||||
relatedAssociation.setInvalid();
|
||||
relatedAssociation.setStatus(computeBlockedStatus(relatedAssociation));
|
||||
break;
|
||||
case REJECTED:
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.jdom.input.SAXBuilder;
|
|||
import org.jdom.output.XMLOutputter;
|
||||
|
||||
import ghidra.framework.store.*;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.xml.GenericXMLOutputter;
|
||||
import ghidra.util.xml.XmlUtilities;
|
||||
|
||||
|
@ -40,7 +39,7 @@ class CheckoutManager {
|
|||
private long nextCheckoutId = 1;
|
||||
|
||||
// checkouts maps long checkoutId to ItemCheckoutStatus objects
|
||||
private LongObjectHashtable<ItemCheckoutStatus> checkouts;
|
||||
private Map<Long, ItemCheckoutStatus> checkouts;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
|
@ -53,7 +52,7 @@ class CheckoutManager {
|
|||
CheckoutManager(LocalFolderItem item, boolean create) throws IOException {
|
||||
this.item = item;
|
||||
if (create) {
|
||||
checkouts = new LongObjectHashtable<ItemCheckoutStatus>();
|
||||
checkouts = new HashMap<>();
|
||||
writeCheckoutsFile();
|
||||
}
|
||||
}
|
||||
|
@ -164,8 +163,7 @@ class CheckoutManager {
|
|||
*/
|
||||
synchronized boolean isCheckedOut(int version) throws IOException {
|
||||
validate();
|
||||
long[] ids = checkouts.getKeys();
|
||||
for (long id : ids) {
|
||||
for (long id : checkouts.keySet()) {
|
||||
ItemCheckoutStatus coStatus = checkouts.get(id);
|
||||
if (coStatus.getCheckoutVersion() == version) {
|
||||
return true;
|
||||
|
@ -199,13 +197,9 @@ class CheckoutManager {
|
|||
*/
|
||||
synchronized ItemCheckoutStatus[] getAllCheckouts() throws IOException {
|
||||
validate();
|
||||
long[] ids = checkouts.getKeys();
|
||||
Arrays.sort(ids);
|
||||
ItemCheckoutStatus[] list = new ItemCheckoutStatus[ids.length];
|
||||
for (int i = 0; i < ids.length; i++) {
|
||||
list[i] = checkouts.get(ids[i]);
|
||||
}
|
||||
return list;
|
||||
List<ItemCheckoutStatus> list = new ArrayList<>(checkouts.values());
|
||||
Collections.sort(list, (a, b) -> (int) (a.getCheckoutId() - b.getCheckoutId()));
|
||||
return list.toArray(new ItemCheckoutStatus[list.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -219,7 +213,6 @@ class CheckoutManager {
|
|||
checkouts = null;
|
||||
}
|
||||
if (checkouts == null) {
|
||||
LongObjectHashtable<ItemCheckoutStatus> oldCheckouts = checkouts;
|
||||
long oldNextCheckoutId = nextCheckoutId;
|
||||
boolean success = false;
|
||||
try {
|
||||
|
@ -229,7 +222,7 @@ class CheckoutManager {
|
|||
finally {
|
||||
if (!success) {
|
||||
nextCheckoutId = oldNextCheckoutId;
|
||||
checkouts = oldCheckouts;
|
||||
checkouts = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -243,7 +236,7 @@ class CheckoutManager {
|
|||
@SuppressWarnings("unchecked")
|
||||
private void readCheckoutsFile() throws IOException {
|
||||
|
||||
checkouts = new LongObjectHashtable<ItemCheckoutStatus>();
|
||||
checkouts = new HashMap<>();
|
||||
|
||||
File checkoutsFile = getCheckoutsFile();
|
||||
if (!checkoutsFile.exists()) {
|
||||
|
@ -317,8 +310,7 @@ class CheckoutManager {
|
|||
Element root = new Element("CHECKOUT_LIST");
|
||||
root.setAttribute("NEXT_ID", Long.toString(nextCheckoutId));
|
||||
|
||||
long[] ids = checkouts.getKeys();
|
||||
for (long id : ids) {
|
||||
for (long id : checkouts.keySet()) {
|
||||
ItemCheckoutStatus coStatus = checkouts.get(id);
|
||||
// TRANSIENT checkout data must not be persisted - the existence
|
||||
// of such checkouts is retained in-memory only
|
||||
|
|
|
@ -15,10 +15,9 @@
|
|||
*/
|
||||
package ghidra.util.datastruct;
|
||||
|
||||
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.util.LinkedList;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* <code>ObjectClass</code> provides a fixed-size long-key-based object cache.
|
||||
|
@ -30,23 +29,23 @@ import java.util.LinkedList;
|
|||
* an object in the weak cache.
|
||||
*/
|
||||
public class ObjectCache {
|
||||
|
||||
private LongObjectHashtable<Object> hashTable;
|
||||
|
||||
private Map<Long, KeyedSoftReference<?>> hashTable;
|
||||
private ReferenceQueue<Object> refQueue;
|
||||
private LinkedList<Object> hardCache;
|
||||
private int hardCacheSize;
|
||||
|
||||
|
||||
/**
|
||||
* Construct a keyed-object cache of size hardCacheSize.
|
||||
* @param hardCacheSize hard cache size.
|
||||
*/
|
||||
public ObjectCache(int hardCacheSize) {
|
||||
this.hardCacheSize = hardCacheSize;
|
||||
hashTable = new LongObjectHashtable<>();
|
||||
hashTable = new HashMap<>();
|
||||
refQueue = new ReferenceQueue<Object>();
|
||||
hardCache = new LinkedList<Object>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Determine if the keyed-object exists in the cache.
|
||||
* @param key object key
|
||||
|
@ -54,34 +53,35 @@ public class ObjectCache {
|
|||
*/
|
||||
public synchronized boolean contains(long key) {
|
||||
processQueue();
|
||||
return hashTable.contains(key);
|
||||
return hashTable.containsKey(key);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the object from cache which corresponds to the specified key.
|
||||
* @param key object key
|
||||
* @return cached object
|
||||
*/
|
||||
public synchronized Object get(long key) {
|
||||
WeakReference<?> ref = (WeakReference<?>)hashTable.get(key);
|
||||
WeakReference<?> ref = hashTable.get(key);
|
||||
if (ref != null) {
|
||||
Object obj = ref.get();
|
||||
if (obj == null) {
|
||||
hashTable.remove(key);
|
||||
}
|
||||
addToHardCache(obj);
|
||||
return obj;
|
||||
return obj;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return the hard cache size
|
||||
* @return the hard cache size
|
||||
*/
|
||||
public int size() {
|
||||
return hardCacheSize;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adjust the hard cache size
|
||||
* @param size new hard cache size
|
||||
|
@ -92,7 +92,7 @@ public class ObjectCache {
|
|||
}
|
||||
this.hardCacheSize = size;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add an object to the cache
|
||||
* @param key object key
|
||||
|
@ -111,16 +111,14 @@ public class ObjectCache {
|
|||
*/
|
||||
public synchronized void clear() {
|
||||
processQueue();
|
||||
long[] keys = hashTable.getKeys();
|
||||
for ( long element : keys ) {
|
||||
KeyedSoftReference<?> ref = (KeyedSoftReference<?>)hashTable.get(element);
|
||||
for (KeyedSoftReference<?> ref : hashTable.values()) {
|
||||
ref.clear();
|
||||
}
|
||||
|
||||
hashTable.removeAll();
|
||||
|
||||
hashTable.clear();
|
||||
refQueue = new ReferenceQueue<Object>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Remove the specified range of keyed objects from both hard and weak caches.
|
||||
* A cache range should be cleared when the corresponding objects have become invalid.
|
||||
|
@ -128,21 +126,16 @@ public class ObjectCache {
|
|||
* @param endKey maximum object key value
|
||||
*/
|
||||
public synchronized void remove(long startKey, long endKey) {
|
||||
if ((endKey>>1)-(startKey>>1) < (hashTable.size()>>1)) {
|
||||
for(long i=startKey;i<=endKey;i++) {
|
||||
if ((endKey >> 1) - (startKey >> 1) < (hashTable.size() >> 1)) {
|
||||
for (long i = startKey; i <= endKey; i++) {
|
||||
remove(i);
|
||||
}
|
||||
}
|
||||
else {
|
||||
long[] keys = hashTable.getKeys();
|
||||
for ( long element : keys ) {
|
||||
if (element >= startKey && element <= endKey) {
|
||||
remove(element);
|
||||
}
|
||||
}
|
||||
hashTable.keySet().removeIf(key -> (key >= startKey && key <= endKey));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Remove the specified keyed object from both hard and weak caches.
|
||||
* An object should be removed from the cache when it becomes invalid.
|
||||
|
@ -150,13 +143,13 @@ public class ObjectCache {
|
|||
*/
|
||||
public synchronized void remove(long key) {
|
||||
processQueue();
|
||||
KeyedSoftReference<?> ref = (KeyedSoftReference<?>)hashTable.get(key);
|
||||
KeyedSoftReference<?> ref = hashTable.get(key);
|
||||
if (ref != null) {
|
||||
ref.clear();
|
||||
hashTable.remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add the specified object to the hard cache.
|
||||
* @param obj object
|
||||
|
@ -167,23 +160,23 @@ public class ObjectCache {
|
|||
hardCache.removeFirst();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Cleanup weak cache
|
||||
*/
|
||||
private void processQueue() {
|
||||
KeyedSoftReference<?> ref;
|
||||
while((ref = (KeyedSoftReference<?>)refQueue.poll()) != null) {
|
||||
hashTable.remove(ref.getKey());
|
||||
while ((ref = (KeyedSoftReference<?>) refQueue.poll()) != null) {
|
||||
hashTable.remove(ref.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Provides a weak wrapper for a keyed-object
|
||||
*/
|
||||
private class KeyedSoftReference<T> extends WeakReference<T> {
|
||||
private long key;
|
||||
|
||||
|
||||
/**
|
||||
* Construct a keyed-object reference
|
||||
* @param key object key
|
||||
|
@ -194,13 +187,14 @@ public class ObjectCache {
|
|||
super(obj, queue);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return object key
|
||||
* @return object key
|
||||
*/
|
||||
long getKey() {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,163 +0,0 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.util.datastruct;
|
||||
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.SoftReference;
|
||||
|
||||
/**
|
||||
* Soft reference cache class that caches objects for long keys. This cache will
|
||||
* store at most "cacheSize" number of entries, but since it uses soft references
|
||||
* for the cached values, those object may be reclaimed.
|
||||
*/
|
||||
|
||||
public class SoftCacheLongKeyMap {
|
||||
private int cacheSize;
|
||||
private ReferenceQueue<Object> refQueue;
|
||||
private Entry head;
|
||||
private LongObjectHashtable<Object> map;
|
||||
|
||||
/**
|
||||
* Construct a new SoftCacheLongKeyMap that caches at most cacheSize number of entries
|
||||
* @param cacheSize the max number of entries to cache.
|
||||
*/
|
||||
public SoftCacheLongKeyMap(int cacheSize) {
|
||||
this.cacheSize = Math.max(cacheSize, 10);
|
||||
map = new LongObjectHashtable<>();
|
||||
head = new Entry(0, null);
|
||||
head.nextEntry = head;
|
||||
head.prevEntry = head;
|
||||
refQueue = new ReferenceQueue<>();
|
||||
}
|
||||
/**
|
||||
* Caches the given value for the given key
|
||||
* @param key the key
|
||||
* @param value the cached value for the given key
|
||||
* @return any previous object that is cached for the given key.
|
||||
*/
|
||||
public Object put(long key, Object value) {
|
||||
processQueue();
|
||||
if (map.size() == cacheSize) {
|
||||
remove(head.nextEntry.key);
|
||||
}
|
||||
Object obj = map.remove(key);
|
||||
Entry entry = new Entry(key, value);
|
||||
head.addBefore(entry);
|
||||
map.put(key, entry);
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the cached value for the given key, if it exists.
|
||||
* @param key the key for which to get a cached value.
|
||||
* @return the object that was cached for that key, or null if none exists.
|
||||
*/
|
||||
public Object get(long key) {
|
||||
processQueue();
|
||||
Entry entry = (Entry)map.get(key);
|
||||
if (entry != null) {
|
||||
entry.delete();
|
||||
head.addBefore(entry);
|
||||
return entry.get();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of items in the cache. Can change from one call to
|
||||
* the next even if no entries were added or deleted.
|
||||
*/
|
||||
public int size() {
|
||||
processQueue();
|
||||
return map.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all entries from the cache
|
||||
*/
|
||||
public void clear() {
|
||||
map.removeAll();
|
||||
refQueue = new ReferenceQueue<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the cache is empty. If true, it will remain empty until a new
|
||||
* entry is added. However if false, it may return true even if nothing was removed
|
||||
*/
|
||||
public boolean isEmpty() {
|
||||
processQueue();
|
||||
return map.size() == 0;
|
||||
}
|
||||
/**
|
||||
* Returns true if the cache currently contains the given key. Not useful since even
|
||||
* if it returns true, there is no guarentee that a get will work after containsKey
|
||||
* returns true.
|
||||
* @param key the Key to check
|
||||
*/
|
||||
public boolean containsKey(long key) {
|
||||
processQueue();
|
||||
return map.contains(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes any cached value for the given key.
|
||||
* @param key the key for which to remove cached values.
|
||||
* @return the cached object that was stored for the given key, or null
|
||||
*/
|
||||
public Object remove(long key) {
|
||||
Entry entry = (Entry)map.remove(key);
|
||||
if (entry != null) {
|
||||
entry.delete();
|
||||
return entry.get();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of all current keys.
|
||||
*/
|
||||
public long[] getKeys() {
|
||||
processQueue();
|
||||
return map.getKeys();
|
||||
}
|
||||
private void processQueue() {
|
||||
Entry entry;
|
||||
while((entry = (Entry)refQueue.poll()) != null) {
|
||||
remove(entry.key);
|
||||
}
|
||||
}
|
||||
|
||||
class Entry extends SoftReference<Object> {
|
||||
long key;
|
||||
Entry nextEntry;
|
||||
Entry prevEntry;
|
||||
Entry(long key, Object value) {
|
||||
super(value, refQueue);
|
||||
this.key = key;
|
||||
}
|
||||
void addBefore(Entry entry) {
|
||||
entry.nextEntry = this;
|
||||
entry.prevEntry = this.prevEntry;
|
||||
this.prevEntry.nextEntry = entry;
|
||||
this.prevEntry = entry;
|
||||
}
|
||||
void delete() {
|
||||
prevEntry.nextEntry = nextEntry;
|
||||
nextEntry.prevEntry = prevEntry;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -15,8 +15,9 @@
|
|||
*/
|
||||
package ghidra.util.graph.attributes;
|
||||
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.NoValueException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import ghidra.util.graph.KeyIndexableSet;
|
||||
import ghidra.util.graph.KeyedObject;
|
||||
|
||||
|
@ -25,7 +26,7 @@ import ghidra.util.graph.KeyedObject;
|
|||
*/
|
||||
public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||
//private Object[] values;
|
||||
private LongObjectHashtable<Object> values;
|
||||
private Map<Long, Object> values;
|
||||
private static String attributeType = AttributeManager.OBJECT_TYPE;
|
||||
|
||||
/** Constructor.
|
||||
|
@ -36,7 +37,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
public ObjectAttribute(String name, KeyIndexableSet<T> set) {
|
||||
super(name, set);
|
||||
//this.values = new Object[set.capacity()];
|
||||
values = new LongObjectHashtable<Object>();
|
||||
values = new HashMap<>();
|
||||
}
|
||||
|
||||
/** Set the value of this attribute for the specified KeyedObject.
|
||||
|
@ -93,7 +94,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
/** Removes all assigned values of this attribute. */
|
||||
@Override
|
||||
public void clear() {
|
||||
values.removeAll();
|
||||
values.clear();
|
||||
}
|
||||
|
||||
/** Return the attribute of the specified KeyedObject as a String.
|
||||
|
@ -101,7 +102,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
@Override
|
||||
public String getValueAsString(KeyedObject o) {
|
||||
Object v;
|
||||
if (values.contains(o.key())) {
|
||||
if (values.containsKey(o.key())) {
|
||||
v = getValue(o);
|
||||
if (v != null) {
|
||||
return v.toString();
|
||||
|
|
|
@ -15,20 +15,17 @@
|
|||
*/
|
||||
package ghidra.util.graph.attributes;
|
||||
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.NoValueException;
|
||||
import java.util.*;
|
||||
|
||||
import ghidra.util.graph.KeyIndexableSet;
|
||||
import ghidra.util.graph.KeyedObject;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
||||
/** This class provides a storage mechanism for String-valued information about
|
||||
* the elements of a KeyIndexableSet, e.g. the vertices of a DirectedGraph.
|
||||
*/
|
||||
public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||
//private String[] values;
|
||||
private ghidra.util.datastruct.LongObjectHashtable values;
|
||||
private Map<Long, String> values;
|
||||
private static String attributeType = AttributeManager.STRING_TYPE;
|
||||
|
||||
/** Constructor.
|
||||
|
@ -38,7 +35,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
*/
|
||||
public StringAttribute(String name, KeyIndexableSet<T> set) {
|
||||
super(name, set);
|
||||
this.values = new LongObjectHashtable(set.capacity());// String[set.capacity()];
|
||||
this.values = new HashMap<>(set.capacity());// String[set.capacity()];
|
||||
}
|
||||
|
||||
/** Set the value of this attribute for the specified KeyedObject.
|
||||
|
@ -49,8 +46,9 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
* not a member of the owningSet.
|
||||
*/
|
||||
public boolean setValue(T o, String value) {
|
||||
if (value == null)
|
||||
if (value == null) {
|
||||
return false;
|
||||
}
|
||||
if (owningSet().contains(o)) {
|
||||
//values[ owningSet().index( o ) ] = value;
|
||||
values.put(o.key(), value);
|
||||
|
@ -66,7 +64,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
public String getValue(KeyedObject o) //throws NoValueException
|
||||
{
|
||||
//return values[ owningSet().index( o ) ];
|
||||
return (String) values.get(o.key());
|
||||
return values.get(o.key());
|
||||
}
|
||||
|
||||
// /** Debug printing. */
|
||||
|
@ -129,8 +127,9 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
else if ((ko1.key() - ko2.key()) > 0) {
|
||||
return +1;
|
||||
}
|
||||
else
|
||||
else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
//ko1 is ok, ko2 fails.
|
||||
return -1;
|
||||
|
@ -144,8 +143,9 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
else if ((ko1.key() - ko2.key()) > 0) {
|
||||
return +1;
|
||||
}
|
||||
else
|
||||
else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -160,7 +160,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
/** Removes all assigned values of this attribute. */
|
||||
@Override
|
||||
public void clear() {
|
||||
values.removeAll();
|
||||
values.clear();
|
||||
}
|
||||
|
||||
/** Return the attribute of the specified KeyedObject as a String.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,39 +15,39 @@
|
|||
*/
|
||||
package ghidra.util.prop;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import ghidra.util.LongIterator;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.datastruct.NoSuchIndexException;
|
||||
import ghidra.util.exception.AssertException;
|
||||
import ghidra.util.exception.NoValueException;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
/**
|
||||
* Base class for managing properties that are accessed by an index. Property
|
||||
* values are determined by the derived class.
|
||||
*/
|
||||
public abstract class PropertySet implements Serializable {
|
||||
private final static long serialVersionUID = 1;
|
||||
private final static long serialVersionUID = 1;
|
||||
protected static final NoValueException noValueException = new NoValueException();
|
||||
private final static int DEFAULT_NUMBER_PAGE_BITS=12;
|
||||
private final static int DEFAULT_NUMBER_PAGE_BITS = 12;
|
||||
private final static int MIN_NUMBER_PAGE_BITS = 8;
|
||||
private final static int MAX_NUMBER_PAGE_BITS = 15; // must be kept less than
|
||||
// size of a short
|
||||
|
||||
private String name;
|
||||
protected PropertyPageIndex propertyPageIndex; // table of pageIDs
|
||||
private String name;
|
||||
protected PropertyPageIndex propertyPageIndex; // table of pageIDs
|
||||
private int numPageBits; // number of bits from long used as page offset
|
||||
private long pageMask; // a mask for the offset bits, i.e. has a 1 if and only if
|
||||
// the bit is part of the offset
|
||||
// the bit is part of the offset
|
||||
protected short pageSize; // max elements in each page
|
||||
protected int numProperties;
|
||||
private LongObjectHashtable<PropertyPage> ht;
|
||||
private Class<?> objectClass;
|
||||
|
||||
protected int numProperties;
|
||||
private Map<Long, PropertyPage> ht;
|
||||
private Class<?> objectClass;
|
||||
|
||||
protected PropertySet(String name, Class<?> objectClass) {
|
||||
this(name, DEFAULT_NUMBER_PAGE_BITS, objectClass);
|
||||
this(name, DEFAULT_NUMBER_PAGE_BITS, objectClass);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -59,22 +58,22 @@ public abstract class PropertySet implements Serializable {
|
|||
* more than 15.
|
||||
*/
|
||||
protected PropertySet(String name, int numPageBits, Class<?> objectClass) {
|
||||
this.objectClass = objectClass;
|
||||
ht = new LongObjectHashtable<PropertyPage>();
|
||||
this.objectClass = objectClass;
|
||||
ht = new HashMap<>();
|
||||
|
||||
this.name = name;
|
||||
if(numPageBits > MAX_NUMBER_PAGE_BITS) {
|
||||
if (numPageBits > MAX_NUMBER_PAGE_BITS) {
|
||||
numPageBits = MAX_NUMBER_PAGE_BITS;
|
||||
}
|
||||
else if(numPageBits < MIN_NUMBER_PAGE_BITS) {
|
||||
else if (numPageBits < MIN_NUMBER_PAGE_BITS) {
|
||||
numPageBits = MIN_NUMBER_PAGE_BITS;
|
||||
}
|
||||
this.numPageBits = numPageBits;
|
||||
// compute the page mask
|
||||
pageMask = -1L;
|
||||
pageMask = pageMask >>> (64-numPageBits); // 64 = size of long
|
||||
pageMask = pageMask >>> (64 - numPageBits); // 64 = size of long
|
||||
|
||||
pageSize = (short)(pageMask + 1);
|
||||
pageSize = (short) (pageMask + 1);
|
||||
propertyPageIndex = new PropertyPageIndex();
|
||||
}
|
||||
|
||||
|
@ -84,15 +83,15 @@ public abstract class PropertySet implements Serializable {
|
|||
* @return the size (in bytes) of the data that is stored in this property
|
||||
* set.
|
||||
*/
|
||||
public abstract int getDataSize();
|
||||
|
||||
/**
|
||||
public abstract int getDataSize();
|
||||
|
||||
/**
|
||||
* Get the name for this property manager.
|
||||
*/
|
||||
public synchronized String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns property object class associated with this set.
|
||||
*/
|
||||
|
@ -100,19 +99,19 @@ public abstract class PropertySet implements Serializable {
|
|||
return objectClass;
|
||||
}
|
||||
|
||||
protected PropertyPage getPage(long pageId) {
|
||||
return ht.get(pageId);
|
||||
}
|
||||
|
||||
protected PropertyPage getOrCreatePage(long pageID) {
|
||||
PropertyPage page = getPage(pageID);
|
||||
if(page == null) {
|
||||
page = new PropertyPage(pageSize,pageID,getDataSize(),objectClass);
|
||||
ht.put(pageID,page);
|
||||
protected PropertyPage getPage(long pageId) {
|
||||
return ht.get(pageId);
|
||||
}
|
||||
|
||||
protected PropertyPage getOrCreatePage(long pageID) {
|
||||
PropertyPage page = getPage(pageID);
|
||||
if (page == null) {
|
||||
page = new PropertyPage(pageSize, pageID, getDataSize(), objectClass);
|
||||
ht.put(pageID, page);
|
||||
propertyPageIndex.add(pageID);
|
||||
}
|
||||
return page;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given two indices it indicates whether there is an index in
|
||||
|
@ -123,21 +122,22 @@ public abstract class PropertySet implements Serializable {
|
|||
* @return boolean true if at least one index in the range
|
||||
* has the property, false otherwise.
|
||||
*/
|
||||
public boolean intersects(long start,long end) {
|
||||
if (hasProperty(start)) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
long index = this.getNextPropertyIndex(start);
|
||||
if (index <= end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch(NoSuchIndexException e) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
public boolean intersects(long start, long end) {
|
||||
if (hasProperty(start)) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
long index = this.getNextPropertyIndex(start);
|
||||
if (index <= end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch (NoSuchIndexException e) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all property values within a given range.
|
||||
* @param start begin range
|
||||
|
@ -147,23 +147,23 @@ public abstract class PropertySet implements Serializable {
|
|||
*/
|
||||
public synchronized boolean removeRange(long start, long end) {
|
||||
|
||||
boolean status=false;
|
||||
// go from start to end
|
||||
// get the page starting at start
|
||||
// get page start index and end index
|
||||
// subtract page.getSize() from numProperties
|
||||
// remove the entire page
|
||||
// increment start by size of page
|
||||
// else
|
||||
// for (i<endofPage; start++)
|
||||
// call slow remove(index);
|
||||
boolean status = false;
|
||||
// go from start to end
|
||||
// get the page starting at start
|
||||
// get page start index and end index
|
||||
// subtract page.getSize() from numProperties
|
||||
// remove the entire page
|
||||
// increment start by size of page
|
||||
// else
|
||||
// for (i<endofPage; start++)
|
||||
// call slow remove(index);
|
||||
|
||||
while (start <= end) {
|
||||
// get page containing start
|
||||
long pageID = getPageID(start);
|
||||
short offset = getPageOffset(start);
|
||||
while (start <= end) {
|
||||
// get page containing start
|
||||
long pageID = getPageID(start);
|
||||
short offset = getPageOffset(start);
|
||||
|
||||
PropertyPage page = getPage(pageID);
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
if (page == null) {
|
||||
long nextPageId = propertyPageIndex.getNext(pageID);
|
||||
|
@ -174,27 +174,27 @@ public abstract class PropertySet implements Serializable {
|
|||
continue;
|
||||
}
|
||||
|
||||
// if start is beginning of page && end of page is still less than start
|
||||
if (offset == 0 && (pageSize+start) <= end) {
|
||||
// if start is beginning of page && end of page is still less than start
|
||||
if (offset == 0 && (pageSize + start) <= end) {
|
||||
|
||||
// decrement # properties on a page
|
||||
this.numProperties -= page.getSize();
|
||||
// decrement # properties on a page
|
||||
this.numProperties -= page.getSize();
|
||||
|
||||
// remove the entire page
|
||||
ht.remove(pageID);
|
||||
propertyPageIndex.remove(pageID);
|
||||
// remove the entire page
|
||||
ht.remove(pageID);
|
||||
propertyPageIndex.remove(pageID);
|
||||
|
||||
status = true;
|
||||
long nextPageId = propertyPageIndex.getNext(pageID);
|
||||
start = nextPageId << numPageBits;
|
||||
}
|
||||
else {
|
||||
// start at offset, and remove each property
|
||||
for (; offset < pageSize && start <= end; offset++, start++) {
|
||||
status |= removeFromPage(page, pageID, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
status = true;
|
||||
long nextPageId = propertyPageIndex.getNext(pageID);
|
||||
start = nextPageId << numPageBits;
|
||||
}
|
||||
else {
|
||||
// start at offset, and remove each property
|
||||
for (; offset < pageSize && start <= end; offset++, start++) {
|
||||
status |= removeFromPage(page, pageID, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
@ -211,35 +211,36 @@ public abstract class PropertySet implements Serializable {
|
|||
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
return removeFromPage(page, pageID, offset);
|
||||
}
|
||||
return removeFromPage(page, pageID, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the property on page at offset. If Page is now empty, remove it.
|
||||
*/
|
||||
private boolean removeFromPage(PropertyPage page, long pageID, short offset) {
|
||||
if(page != null) {
|
||||
/**
|
||||
* Remove the property on page at offset. If Page is now empty, remove it.
|
||||
*/
|
||||
private boolean removeFromPage(PropertyPage page, long pageID, short offset) {
|
||||
if (page != null) {
|
||||
|
||||
boolean removed = page.remove(offset);
|
||||
if (removed) {
|
||||
numProperties--;
|
||||
}
|
||||
if (removed) {
|
||||
numProperties--;
|
||||
}
|
||||
|
||||
if(page.isEmpty()) {
|
||||
ht.remove(pageID);
|
||||
if (page.isEmpty()) {
|
||||
ht.remove(pageID);
|
||||
propertyPageIndex.remove(pageID);
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns whether there is a property value at index.
|
||||
* @param index the long representation of an address.
|
||||
*/
|
||||
public synchronized boolean hasProperty(long index) {
|
||||
PropertyPage page = getPage(getPageID(index));
|
||||
if(page == null) {
|
||||
if (page == null) {
|
||||
return false;
|
||||
}
|
||||
return page.hasProperty(getPageOffset(index));
|
||||
|
@ -256,29 +257,29 @@ public abstract class PropertySet implements Serializable {
|
|||
short offset = getPageOffset(index);
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short nextOffset = page.getNext(offset);
|
||||
|
||||
if(nextOffset >= 0) {
|
||||
if (nextOffset >= 0) {
|
||||
return getIndex(pageID, nextOffset);
|
||||
}
|
||||
}
|
||||
|
||||
pageID = propertyPageIndex.getNext(pageID);
|
||||
|
||||
if(pageID >= 0) {
|
||||
if (pageID >= 0) {
|
||||
page = getPage(pageID);
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short nextOffset = page.getFirst();
|
||||
if (nextOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'first' offset");
|
||||
}
|
||||
if (nextOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'first' offset");
|
||||
}
|
||||
return getIndex(pageID, nextOffset);
|
||||
}
|
||||
}
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -295,36 +296,37 @@ public abstract class PropertySet implements Serializable {
|
|||
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short prevOffset = page.getPrevious(offset);
|
||||
if(prevOffset >= 0) {
|
||||
if (prevOffset >= 0) {
|
||||
return getIndex(pageID, prevOffset);
|
||||
}
|
||||
}
|
||||
|
||||
pageID = propertyPageIndex.getPrevious(pageID);
|
||||
|
||||
if(pageID >= 0) {
|
||||
if (pageID >= 0) {
|
||||
page = getPage(pageID);
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short prevOffset = page.getLast();
|
||||
if (prevOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'last' offset");
|
||||
}
|
||||
if (prevOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'last' offset");
|
||||
}
|
||||
return getIndex(pageID, prevOffset);
|
||||
}
|
||||
}
|
||||
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first index where a property value exists.
|
||||
* @throws NoSuchIndexException when there is no property value for any index.
|
||||
*/
|
||||
public synchronized long getFirstPropertyIndex() throws NoSuchIndexException {
|
||||
if(hasProperty(0)) {
|
||||
if (hasProperty(0)) {
|
||||
return 0;
|
||||
}
|
||||
return getNextPropertyIndex(0);
|
||||
|
@ -337,18 +339,19 @@ public abstract class PropertySet implements Serializable {
|
|||
*/
|
||||
public synchronized long getLastPropertyIndex() throws NoSuchIndexException {
|
||||
// -1 should be the highest possible address
|
||||
if(hasProperty(-1)) {
|
||||
if (hasProperty(-1)) {
|
||||
return -1;
|
||||
}
|
||||
return getPreviousPropertyIndex(-1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of properties in the set.
|
||||
* @return the number of properties
|
||||
*/
|
||||
public int getSize() {
|
||||
return numProperties;
|
||||
}
|
||||
public int getSize() {
|
||||
return numProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the page ID from the given index.
|
||||
|
@ -364,7 +367,7 @@ public abstract class PropertySet implements Serializable {
|
|||
* @param index the long representation of an address.
|
||||
*/
|
||||
protected final short getPageOffset(long index) {
|
||||
return(short)(index & pageMask);
|
||||
return (short) (index & pageMask);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -372,9 +375,9 @@ public abstract class PropertySet implements Serializable {
|
|||
* @return the long representation of an address.
|
||||
*/
|
||||
protected final long getIndex(long pageID, short offset) {
|
||||
return(pageID << numPageBits) | offset;
|
||||
return (pageID << numPageBits) | offset;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Move the range of properties to the newStart index.
|
||||
* @param start the beginning of the property range to move
|
||||
|
@ -382,48 +385,50 @@ public abstract class PropertySet implements Serializable {
|
|||
* @param newStart the new beginning of the property range after the move
|
||||
*/
|
||||
public void moveRange(long start, long end, long newStart) {
|
||||
if (newStart < start) {
|
||||
long clearSize = end-start+1;
|
||||
long offset = start - newStart;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
removeRange(newStart, newStart+clearSize-1);
|
||||
LongIterator it = getPropertyIterator(start, end);
|
||||
while(it.hasNext()) {
|
||||
long index = it.next();
|
||||
moveIndex(index, index-offset);
|
||||
}
|
||||
}
|
||||
else {
|
||||
long clearSize = end-start+1;
|
||||
long offset = newStart - start;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
if (newStart > end) {
|
||||
removeRange(newStart, newStart+clearSize-1);
|
||||
}
|
||||
else {
|
||||
removeRange(end+1, end+clearSize);
|
||||
}
|
||||
if (newStart < start) {
|
||||
long clearSize = end - start + 1;
|
||||
long offset = start - newStart;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
removeRange(newStart, newStart + clearSize - 1);
|
||||
LongIterator it = getPropertyIterator(start, end);
|
||||
while (it.hasNext()) {
|
||||
long index = it.next();
|
||||
moveIndex(index, index - offset);
|
||||
}
|
||||
}
|
||||
else {
|
||||
long clearSize = end - start + 1;
|
||||
long offset = newStart - start;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
if (newStart > end) {
|
||||
removeRange(newStart, newStart + clearSize - 1);
|
||||
}
|
||||
else {
|
||||
removeRange(end + 1, end + clearSize);
|
||||
}
|
||||
|
||||
LongIterator it = getPropertyIterator(end+1);
|
||||
while(it.hasPrevious()) {
|
||||
long index = it.previous();
|
||||
if (index < start) {
|
||||
break;
|
||||
}
|
||||
moveIndex(index, index+offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
LongIterator it = getPropertyIterator(end + 1);
|
||||
while (it.hasPrevious()) {
|
||||
long index = it.previous();
|
||||
if (index < start) {
|
||||
break;
|
||||
}
|
||||
moveIndex(index, index + offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void moveIndex(long from, long to);
|
||||
protected abstract void saveProperty(ObjectOutputStream oos, long addr)
|
||||
throws IOException;
|
||||
protected abstract void restoreProperty(ObjectInputStream ois, long addr)
|
||||
throws IOException, ClassNotFoundException;
|
||||
|
||||
protected abstract void saveProperty(ObjectOutputStream oos, long addr)
|
||||
throws IOException;
|
||||
|
||||
protected abstract void restoreProperty(ObjectInputStream ois, long addr)
|
||||
throws IOException, ClassNotFoundException;
|
||||
|
||||
/**
|
||||
* Creates an iterator over all the indexes that have this property within
|
||||
|
@ -433,10 +438,10 @@ public abstract class PropertySet implements Serializable {
|
|||
* @param end The end address to search
|
||||
* @return LongIterator Iterator over indexes that have properties.
|
||||
*/
|
||||
public LongIterator getPropertyIterator(long start,long end) {
|
||||
public LongIterator getPropertyIterator(long start, long end) {
|
||||
return new LongIteratorImpl(this, start, end);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates an iterator over all the indexes that have this property within
|
||||
* the given range.
|
||||
|
@ -451,6 +456,7 @@ public abstract class PropertySet implements Serializable {
|
|||
public LongIterator getPropertyIterator(long start, long end, boolean atStart) {
|
||||
return new LongIteratorImpl(this, start, end, atStart);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an iterator over the indices having the given property
|
||||
* value.
|
||||
|
@ -478,7 +484,6 @@ public abstract class PropertySet implements Serializable {
|
|||
return new LongIteratorImpl(this, start, before);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Saves all property values between start and end to the output stream
|
||||
* @param oos the output stream
|
||||
|
@ -487,15 +492,15 @@ public abstract class PropertySet implements Serializable {
|
|||
* @throws IOException if an I/O error occurs on the write.
|
||||
*/
|
||||
public void saveProperties(ObjectOutputStream oos, long start, long end)
|
||||
throws IOException{
|
||||
throws IOException {
|
||||
|
||||
oos.writeLong(start);
|
||||
oos.writeLong(end);
|
||||
if (hasProperty(start)) {
|
||||
oos.writeLong(start);
|
||||
oos.writeLong(end);
|
||||
if (hasProperty(start)) {
|
||||
oos.writeByte(1);
|
||||
oos.writeLong(start);
|
||||
saveProperty(oos, start);
|
||||
}
|
||||
}
|
||||
try {
|
||||
long index = start;
|
||||
while ((index = getNextPropertyIndex(index)) <= end) {
|
||||
|
@ -504,10 +509,11 @@ public abstract class PropertySet implements Serializable {
|
|||
saveProperty(oos, index);
|
||||
}
|
||||
}
|
||||
catch(NoSuchIndexException e) {}
|
||||
catch (NoSuchIndexException e) {
|
||||
}
|
||||
oos.writeByte(0);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Restores all the properties from the input stream. Any existing
|
||||
* properties will first be removed.
|
||||
|
@ -516,56 +522,22 @@ public abstract class PropertySet implements Serializable {
|
|||
* @throws ClassNotFoundException if the a class cannot be determined for
|
||||
* the property value.
|
||||
*/
|
||||
public void restoreProperties(ObjectInputStream ois) throws
|
||||
IOException, ClassNotFoundException {
|
||||
long start = ois.readLong();
|
||||
long end = ois.readLong();
|
||||
this.removeRange(start,end);
|
||||
while(ois.readByte() != 0) {
|
||||
public void restoreProperties(ObjectInputStream ois)
|
||||
throws IOException, ClassNotFoundException {
|
||||
long start = ois.readLong();
|
||||
long end = ois.readLong();
|
||||
this.removeRange(start, end);
|
||||
while (ois.readByte() != 0) {
|
||||
long index = ois.readLong();
|
||||
restoreProperty(ois, index);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves all properties to the given output stream.
|
||||
* @param out the output stream.
|
||||
* @throws IOException I/O error occurs while writing output.
|
||||
*/
|
||||
public void saveAll(ObjectOutputStream out) throws IOException {
|
||||
out.writeObject(name);
|
||||
out.writeObject(propertyPageIndex);
|
||||
out.writeInt(numPageBits);
|
||||
out.writeLong(pageMask);
|
||||
out.writeShort(pageSize);
|
||||
out.writeInt(numProperties);
|
||||
out.writeObject(ht);
|
||||
out.writeObject(objectClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores all properties values from the input stream.
|
||||
* @param in the input stream.
|
||||
* @throws IOException if I/O error occurs while reading from stream.
|
||||
* @throws ClassNotFoundException if the a class cannot be determined for
|
||||
* the property value.
|
||||
*/
|
||||
@SuppressWarnings("unchecked") // the type must match or it is a bug
|
||||
public void restoreAll(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
||||
name = (String)in.readObject();
|
||||
propertyPageIndex = (PropertyPageIndex)in.readObject();
|
||||
numPageBits = in.readInt();
|
||||
pageMask = in.readLong();
|
||||
pageSize = in.readShort();
|
||||
numProperties = in.readInt();
|
||||
ht = (LongObjectHashtable<PropertyPage>) in.readObject();
|
||||
objectClass = (Class<?>)in.readObject();
|
||||
}
|
||||
/**
|
||||
* Based upon the type of property manager that this is, the appropriate
|
||||
* visit() method will be called within the PropertyVisitor.
|
||||
* @param visitor object implementing the PropertyVisitor interface.
|
||||
* @param addr the address of where to visit (get) the property.
|
||||
* Based upon the type of property manager that this is, the appropriate
|
||||
* visit() method will be called within the PropertyVisitor.
|
||||
* @param visitor object implementing the PropertyVisitor interface.
|
||||
* @param addr the address of where to visit (get) the property.
|
||||
*/
|
||||
public abstract void applyValue(PropertyVisitor visitor, long addr);
|
||||
|
||||
|
|
|
@ -1,108 +0,0 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.util.datastruct;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import generic.test.AbstractGenericTest;
|
||||
|
||||
public class SoftCacheLongKeyMapTest extends AbstractGenericTest {
|
||||
SoftCacheLongKeyMap cache;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public SoftCacheLongKeyMapTest() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMap() {
|
||||
cache = new SoftCacheLongKeyMap(20);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
|
||||
assertEquals(3, cache.size());
|
||||
assertEquals("aaa", cache.get(0));
|
||||
assertEquals("bbb", cache.get(1));
|
||||
assertEquals("ccc", cache.get(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testlru() {
|
||||
cache = new SoftCacheLongKeyMap(10);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
cache.put(3, "ddd");
|
||||
cache.put(4, "eee");
|
||||
cache.put(5, "fff");
|
||||
cache.put(6, "ggg");
|
||||
cache.put(7, "hhh");
|
||||
cache.put(8, "iii");
|
||||
cache.put(9, "jjj");
|
||||
|
||||
assertEquals(10, cache.size());
|
||||
cache.put(10, "kkk");
|
||||
assertEquals(10, cache.size());
|
||||
assertNull(cache.get(0));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testlru2() {
|
||||
cache = new SoftCacheLongKeyMap(10);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
cache.put(3, "ddd");
|
||||
cache.put(4, "eee");
|
||||
cache.put(5, "fff");
|
||||
cache.put(6, "ggg");
|
||||
cache.put(7, "hhh");
|
||||
cache.put(8, "iii");
|
||||
cache.put(9, "jjj");
|
||||
cache.get(0);
|
||||
assertEquals(10, cache.size());
|
||||
cache.put(10, "kkk");
|
||||
assertEquals(10, cache.size());
|
||||
assertEquals("aaa", cache.get(0));
|
||||
assertNull(cache.get(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemove() {
|
||||
cache = new SoftCacheLongKeyMap(10);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
cache.put(3, "ddd");
|
||||
cache.remove(1);
|
||||
cache.remove(0);
|
||||
cache.remove(3);
|
||||
cache.remove(2);
|
||||
assertEquals(0, cache.size());
|
||||
cache.put(5, "zzz");
|
||||
assertEquals(1, cache.size());
|
||||
cache.remove(5);
|
||||
assertEquals(0, cache.size());
|
||||
|
||||
}
|
||||
}
|
|
@ -21,7 +21,6 @@ package ghidra.program.database;
|
|||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import db.Record;
|
||||
import ghidra.program.model.address.KeyRange;
|
||||
|
@ -199,18 +198,21 @@ public class DBObjectCache<T extends DatabaseObject> {
|
|||
* @param keyRanges key ranges to delete
|
||||
*/
|
||||
private void deleteLargeKeyRanges(List<KeyRange> keyRanges) {
|
||||
map.keySet()
|
||||
.stream()
|
||||
.filter(key -> keyRangesContain(keyRanges, key))
|
||||
.collect(Collectors.toList())
|
||||
.forEach(key -> {
|
||||
KeyedSoftReference ref = map.remove(key);
|
||||
DatabaseObject obj = ref.get();
|
||||
if (obj != null) {
|
||||
obj.setDeleted();
|
||||
ref.clear();
|
||||
}
|
||||
});
|
||||
map.values().removeIf(ref -> checkRef(ref, keyRanges));
|
||||
}
|
||||
|
||||
private boolean checkRef(KeyedSoftReference ref, List<KeyRange> keyRanges) {
|
||||
long key = ref.getKey();
|
||||
if (keyRangesContain(keyRanges, key)) {
|
||||
DatabaseObject obj = ref.get();
|
||||
if (obj != null) {
|
||||
obj.setDeleted();
|
||||
ref.clear();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -267,27 +269,6 @@ public class DBObjectCache<T extends DatabaseObject> {
|
|||
return invalidateCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates a range of objects in the cache.
|
||||
* @param startKey the first key in the range to invalidate.
|
||||
* @param endKey the last key in the range to invalidate.
|
||||
*/
|
||||
public synchronized void invalidate(long startKey, long endKey) {
|
||||
processQueue();
|
||||
if (endKey - startKey < map.size()) {
|
||||
for (long i = startKey; i <= endKey; i++) {
|
||||
doInvalidate(i);
|
||||
}
|
||||
}
|
||||
else {
|
||||
map.keySet()
|
||||
.stream()
|
||||
.filter(key -> (key >= startKey && key <= endKey))
|
||||
.collect(Collectors.toList())
|
||||
.forEach(key -> doInvalidate(key));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the object with the given key from the cache.
|
||||
* @param key the key of the object to remove.
|
||||
|
@ -305,25 +286,6 @@ public class DBObjectCache<T extends DatabaseObject> {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates the object with given key.
|
||||
* @param key the key of the object to invalidate.
|
||||
*/
|
||||
public synchronized void invalidate(long key) {
|
||||
processQueue();
|
||||
doInvalidate(key);
|
||||
}
|
||||
|
||||
private void doInvalidate(long key) {
|
||||
KeyedSoftReference ref = map.get(key);
|
||||
if (ref != null) {
|
||||
T obj = ref.get();
|
||||
if (obj != null) {
|
||||
obj.setInvalid();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addToHardCache(T obj) {
|
||||
hardCache.addLast(obj);
|
||||
if (hardCache.size() > hardCacheSize) {
|
||||
|
|
|
@ -16,13 +16,14 @@
|
|||
package ghidra.program.database;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import db.*;
|
||||
import ghidra.program.model.address.AddressSpace;
|
||||
import ghidra.program.model.address.OverlayAddressSpace;
|
||||
import ghidra.program.model.lang.Language;
|
||||
import ghidra.program.util.LanguageTranslator;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.AssertException;
|
||||
import ghidra.util.exception.DuplicateNameException;
|
||||
|
||||
|
@ -115,7 +116,7 @@ class OverlaySpaceAdapterDB {
|
|||
}
|
||||
|
||||
void updateOverlaySpaces(ProgramAddressFactory factory) throws IOException {
|
||||
LongObjectHashtable<OverlayAddressSpace> map = new LongObjectHashtable<>();
|
||||
Map<Long, OverlayAddressSpace> map = new HashMap<>();
|
||||
for (AddressSpace space : factory.getAllAddressSpaces()) {
|
||||
if (space instanceof OverlayAddressSpace) {
|
||||
OverlayAddressSpace os = (OverlayAddressSpace) space;
|
||||
|
@ -162,11 +163,10 @@ class OverlaySpaceAdapterDB {
|
|||
}
|
||||
}
|
||||
if (map.size() != 0) {
|
||||
long[] keys = map.getKeys();
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
OverlayAddressSpace space = map.remove(keys[i]);
|
||||
for (OverlayAddressSpace space : map.values()) {
|
||||
factory.removeOverlaySpace(space.getName());
|
||||
}
|
||||
map.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ import ghidra.program.model.lang.CompilerSpec;
|
|||
import ghidra.util.*;
|
||||
import ghidra.util.classfinder.ClassTranslator;
|
||||
import ghidra.util.datastruct.FixedSizeHashMap;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
|
@ -3490,15 +3489,15 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
}
|
||||
|
||||
class IdsToDataTypeMap {
|
||||
private Map<UniversalID, LongObjectHashtable<DataType>> map = new HashMap<>();
|
||||
private Map<UniversalID, Map<Long, DataType>> map = new HashMap<>();
|
||||
|
||||
DataType getDataType(UniversalID sourceID, UniversalID dataTypeID) {
|
||||
if (sourceID == null || sourceID.equals(universalID)) {
|
||||
sourceID = LOCAL_ARCHIVE_UNIVERSAL_ID;
|
||||
}
|
||||
LongObjectHashtable<DataType> idMap = map.get(sourceID);
|
||||
Map<Long, DataType> idMap = map.get(sourceID);
|
||||
if (idMap == null) {
|
||||
idMap = new LongObjectHashtable<>();
|
||||
idMap = new HashMap<>();
|
||||
map.put(sourceID, idMap);
|
||||
}
|
||||
DataType dt = idMap.get(dataTypeID.getValue());
|
||||
|
@ -3527,7 +3526,7 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
else {
|
||||
sourceID = sourceArchive.getSourceArchiveID();
|
||||
}
|
||||
LongObjectHashtable<DataType> idMap = map.get(sourceID);
|
||||
Map<Long, DataType> idMap = map.get(sourceID);
|
||||
if (idMap != null) {
|
||||
idMap.remove(dataTypeID.getValue());
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import ghidra.program.model.symbol.*;
|
|||
import ghidra.program.util.LanguageTranslator;
|
||||
import ghidra.util.Lock;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
|
@ -122,7 +121,7 @@ public class ExternalManagerDB implements ManagerDB, ExternalManager {
|
|||
monitor.initialize(oldNameAdapter.getRecordCount());
|
||||
int cnt = 0;
|
||||
|
||||
LongObjectHashtable<String> nameMap = new LongObjectHashtable<>();
|
||||
Map<Long, String> nameMap = new HashMap<>();
|
||||
|
||||
RecordIterator iter = oldNameAdapter.getRecords();
|
||||
while (iter.hasNext()) {
|
||||
|
|
|
@ -18,7 +18,8 @@ package ghidra.program.database.map;
|
|||
import java.util.*;
|
||||
|
||||
import ghidra.program.model.address.*;
|
||||
import ghidra.util.datastruct.*;
|
||||
import ghidra.util.datastruct.Range;
|
||||
import ghidra.util.datastruct.SortedRangeList;
|
||||
|
||||
/**
|
||||
* AddressSetView implementation that handles image base changes. NOTE: THIS IMPLEMENTATION
|
||||
|
@ -33,8 +34,7 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
|
||||
private AddressMap addrMap;
|
||||
|
||||
private LongObjectHashtable<SortedRangeList> baseLists =
|
||||
new LongObjectHashtable<SortedRangeList>();
|
||||
private Map<Long, SortedRangeList> baseLists = new HashMap<>();
|
||||
private ArrayList<Long> bases = new ArrayList<Long>();
|
||||
|
||||
private Comparator<Long> baseComparator = new Comparator<Long>() {
|
||||
|
@ -108,7 +108,7 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
* Removes all addresses from this set.
|
||||
*/
|
||||
public void clear() {
|
||||
baseLists = new LongObjectHashtable<SortedRangeList>();
|
||||
baseLists = new HashMap<>();
|
||||
bases = new ArrayList<Long>();
|
||||
}
|
||||
|
||||
|
@ -251,9 +251,9 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
@Override
|
||||
public int getNumAddressRanges() {
|
||||
int n = 0;
|
||||
long[] keys = baseLists.getKeys();
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
SortedRangeList list = baseLists.get(keys[i]);
|
||||
|
||||
for (long key : baseLists.keySet()) {
|
||||
SortedRangeList list = baseLists.get(key);
|
||||
n += list.getNumRanges();
|
||||
}
|
||||
return n;
|
||||
|
@ -286,9 +286,8 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
@Override
|
||||
public long getNumAddresses() {
|
||||
long n = 0;
|
||||
long[] keys = baseLists.getKeys();
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
SortedRangeList list = baseLists.get(keys[i]);
|
||||
for (long key : baseLists.keySet()) {
|
||||
SortedRangeList list = baseLists.get(key);
|
||||
n += list.getNumValues();
|
||||
}
|
||||
return n;
|
||||
|
|
|
@ -32,9 +32,7 @@ import ghidra.program.model.listing.ProgramContext;
|
|||
import ghidra.program.util.RangeMapAdapter;
|
||||
import ghidra.program.util.RegisterValueStore;
|
||||
import ghidra.util.Lock;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.CancelledException;
|
||||
import ghidra.util.exception.VersionException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
/**
|
||||
|
@ -61,7 +59,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
|||
* address ranges using the PropertyMap utilities.
|
||||
*/
|
||||
private HashMap<String, Register> registersMap;
|
||||
private LongObjectHashtable<AddressRangeMapDB> valueMaps;
|
||||
private Map<Integer, AddressRangeMapDB> valueMaps;
|
||||
private Register baseContextRegister;
|
||||
protected Map<Register, RegisterValueStore> defaultRegisterValueMap;
|
||||
|
||||
|
@ -88,7 +86,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
|||
defaultRegisterValueMap = new HashMap<Register, RegisterValueStore>();
|
||||
|
||||
registersMap = new HashMap<String, Register>();
|
||||
valueMaps = new LongObjectHashtable<AddressRangeMapDB>();
|
||||
valueMaps = new HashMap<>();
|
||||
registerSpaceSize = 0;
|
||||
|
||||
for (Register register : registers) {
|
||||
|
@ -350,7 +348,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
|||
public void invalidateCache(boolean all) throws IOException {
|
||||
lock.acquire();
|
||||
try {
|
||||
valueMaps.removeAll();
|
||||
valueMaps.clear();
|
||||
}
|
||||
finally {
|
||||
lock.release();
|
||||
|
|
|
@ -306,29 +306,11 @@ public abstract class DefaultPropertyMap implements PropertyMap {
|
|||
* @throws ClassNotFoundException if the class for the object being
|
||||
* read is not in the class path
|
||||
*/
|
||||
public void restoreProperties(ObjectInputStream ois) throws IOException, ClassNotFoundException {
|
||||
public void restoreProperties(ObjectInputStream ois)
|
||||
throws IOException, ClassNotFoundException {
|
||||
propertyMgr.restoreProperties(ois);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write all properties in the map to the given output stream.
|
||||
* @throws IOException if there is a problem writing to the stream
|
||||
*/
|
||||
public void saveAll(ObjectOutputStream out) throws IOException {
|
||||
propertyMgr.saveAll(out);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore properties read from the given input stream.
|
||||
* @param in input stream
|
||||
* @throws IOException if there is a problem reading from the stream
|
||||
* @throws ClassNotFoundException if the class for the object being
|
||||
* read is not in the class path
|
||||
*/
|
||||
public void restoreAll(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
||||
propertyMgr.restoreAll(in);
|
||||
}
|
||||
|
||||
private class AddressPropertyIterator implements AddressIterator {
|
||||
|
||||
private LongIterator iter;
|
||||
|
@ -350,7 +332,8 @@ public abstract class DefaultPropertyMap implements PropertyMap {
|
|||
|
||||
AddressPropertyIterator(Address start, Address end, boolean forward) {
|
||||
iter =
|
||||
propertyMgr.getPropertyIterator(addrMap.getKey(start), addrMap.getKey(end), forward);
|
||||
propertyMgr.getPropertyIterator(addrMap.getKey(start), addrMap.getKey(end),
|
||||
forward);
|
||||
this.forward = forward;
|
||||
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue