mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-04 18:29:37 +02:00
Merge branch 'master' of https://github.com/NationalSecurityAgency/ghidra into DocFix
This commit is contained in:
commit
8c35703cc4
77 changed files with 4546 additions and 2442 deletions
|
@ -149,7 +149,7 @@ public class AssemblySentential<NT extends AssemblyNonTerminal> extends
|
|||
return Collections.singleton(new WhiteSpaceParseToken(grammar, this, ""));
|
||||
}
|
||||
if (Character.isLetterOrDigit(buffer.charAt(b)) &&
|
||||
Character.isLetterOrDigit(buffer.charAt(b - 1))) {
|
||||
(b == 0 || Character.isLetterOrDigit(buffer.charAt(b - 1)))) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -282,7 +282,7 @@ public class ProgramDB extends DomainObjectAdapterDB implements Program, ChangeM
|
|||
if (monitor == null) {
|
||||
monitor = TaskMonitorAdapter.DUMMY;
|
||||
}
|
||||
|
||||
|
||||
boolean success = false;
|
||||
try {
|
||||
int id = startTransaction("create program");
|
||||
|
@ -2141,8 +2141,9 @@ public class ProgramDB extends DomainObjectAdapterDB implements Program, ChangeM
|
|||
* Translate language
|
||||
* @param translator language translator, if null only re-disassembly will occur.
|
||||
* @param newCompilerSpecID new compiler specification which corresponds to new language, may be null.
|
||||
* @param monitor
|
||||
* @throws LockException
|
||||
* @param forceRedisassembly if true a redisassembly will be forced even if not required
|
||||
* @param monitor task monitor
|
||||
* @throws LockException if exclusive access is missing
|
||||
*/
|
||||
public void setLanguage(LanguageTranslator translator, CompilerSpecID newCompilerSpecID,
|
||||
boolean forceRedisassembly, TaskMonitor monitor) throws LockException {
|
||||
|
@ -2153,7 +2154,7 @@ public class ProgramDB extends DomainObjectAdapterDB implements Program, ChangeM
|
|||
try {
|
||||
setEventsEnabled(false);
|
||||
try {
|
||||
boolean notifyCodeManager = true;
|
||||
boolean redisassemblyRequired = true;
|
||||
int oldLanguageVersion = languageVersion;
|
||||
int oldLanguageMinorVersion = languageMinorVersion;
|
||||
if (translator != null) {
|
||||
|
@ -2168,7 +2169,7 @@ public class ProgramDB extends DomainObjectAdapterDB implements Program, ChangeM
|
|||
}
|
||||
else if (!forceRedisassembly && language.getVersion() == languageVersion &&
|
||||
language.getMinorVersion() == languageMinorVersion) {
|
||||
notifyCodeManager = false; // compiler spec change only
|
||||
redisassemblyRequired = false; // compiler spec change only
|
||||
Msg.info(this, "Setting compiler spec for Program " + getName() + ": " +
|
||||
compilerSpecID + " -> " + newCompilerSpecID);
|
||||
}
|
||||
|
@ -2207,15 +2208,14 @@ public class ProgramDB extends DomainObjectAdapterDB implements Program, ChangeM
|
|||
monitor.setProgress(0);
|
||||
ProgramRegisterContextDB contextMgr =
|
||||
(ProgramRegisterContextDB) getProgramContext();
|
||||
if (translator != null) {
|
||||
if (redisassemblyRequired) {
|
||||
contextMgr.setLanguage(translator, compilerSpec, memoryManager, monitor);
|
||||
}
|
||||
else {
|
||||
// force re-initialization
|
||||
contextMgr.initializeDefaultValues(language, compilerSpec);
|
||||
}
|
||||
|
||||
if (notifyCodeManager) {
|
||||
if (redisassemblyRequired) {
|
||||
Disassembler.clearUnimplementedPcodeWarnings(this, null, monitor);
|
||||
repairContext(oldLanguageVersion, oldLanguageMinorVersion, translator, monitor);
|
||||
monitor.setMessage("Updating instructions...");
|
||||
|
|
|
@ -16,8 +16,7 @@
|
|||
package ghidra.program.database.data;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import db.Record;
|
||||
|
@ -26,6 +25,7 @@ import ghidra.program.database.DatabaseObject;
|
|||
import ghidra.program.model.data.*;
|
||||
import ghidra.program.model.data.DataTypeConflictHandler.ConflictResult;
|
||||
import ghidra.util.InvalidNameException;
|
||||
import ghidra.util.Lock;
|
||||
import ghidra.util.exception.AssertException;
|
||||
import ghidra.util.exception.DuplicateNameException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
@ -41,6 +41,7 @@ class CategoryDB extends DatabaseObject implements Category {
|
|||
|
||||
private LazyLoadingCachingMap<String, CategoryDB> subcategoryMap;
|
||||
private LazyLoadingCachingMap<String, DataType> dataTypeMap;
|
||||
private ConflictMap conflictMap;
|
||||
|
||||
/**
|
||||
* Category Constructor
|
||||
|
@ -57,19 +58,19 @@ class CategoryDB extends DatabaseObject implements Category {
|
|||
this.name = name;
|
||||
this.parent = parent;
|
||||
|
||||
subcategoryMap = new LazyLoadingCachingMap<>(mgr.lock, CategoryDB.class) {
|
||||
subcategoryMap = new LazyLoadingCachingMap<>(mgr.lock) {
|
||||
@Override
|
||||
public Map<String, CategoryDB> loadMap() {
|
||||
return buildSubcategoryMap();
|
||||
}
|
||||
};
|
||||
dataTypeMap = new LazyLoadingCachingMap<>(mgr.lock, DataType.class) {
|
||||
dataTypeMap = new LazyLoadingCachingMap<>(mgr.lock) {
|
||||
@Override
|
||||
public Map<String, DataType> loadMap() {
|
||||
return createDataTypeMap();
|
||||
}
|
||||
};
|
||||
|
||||
conflictMap = new ConflictMap(mgr.lock);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -102,6 +103,7 @@ class CategoryDB extends DatabaseObject implements Category {
|
|||
protected boolean refresh(Record rec) {
|
||||
subcategoryMap.clear();
|
||||
dataTypeMap.clear();
|
||||
conflictMap.clear();
|
||||
|
||||
if (isRoot()) {
|
||||
return true;
|
||||
|
@ -210,13 +212,26 @@ class CategoryDB extends DatabaseObject implements Category {
|
|||
return map;
|
||||
}
|
||||
|
||||
private String getBaseName(String dataTypeName) {
|
||||
int indexOf = dataTypeName.indexOf(DataType.CONFLICT_SUFFIX);
|
||||
if (indexOf <= 0) {
|
||||
return dataTypeName;
|
||||
}
|
||||
return dataTypeName.substring(0, indexOf);
|
||||
}
|
||||
|
||||
private boolean isConflictName(String dataTypeName) {
|
||||
return dataTypeName.contains(DataType.CONFLICT_SUFFIX);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.data.Category#getCategories()
|
||||
*/
|
||||
@Override
|
||||
public Category[] getCategories() {
|
||||
validate(mgr.lock);
|
||||
return subcategoryMap.valuesToArray();
|
||||
Collection<CategoryDB> categories = subcategoryMap.values();
|
||||
return categories.toArray(new Category[categories.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -225,7 +240,8 @@ class CategoryDB extends DatabaseObject implements Category {
|
|||
@Override
|
||||
public DataType[] getDataTypes() {
|
||||
validate(mgr.lock);
|
||||
return dataTypeMap.valuesToArray();
|
||||
Collection<DataType> dataTypes = dataTypeMap.values();
|
||||
return dataTypes.toArray(new DataType[dataTypes.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -587,19 +603,144 @@ class CategoryDB extends DatabaseObject implements Category {
|
|||
}
|
||||
|
||||
void dataTypeRenamed(DataType childDataType, String oldName) {
|
||||
dataTypeMap.remove(oldName);
|
||||
dataTypeMap.put(childDataType.getName(), childDataType);
|
||||
dataTypeRemoved(oldName);
|
||||
dataTypeAdded(childDataType);
|
||||
}
|
||||
|
||||
void dataTypeAdded(DataType childDataType) {
|
||||
dataTypeMap.put(childDataType.getName(), childDataType);
|
||||
void dataTypeAdded(DataType dataType) {
|
||||
String dtName = dataType.getName();
|
||||
dataTypeMap.put(dtName, dataType);
|
||||
if (isConflictName(dtName)) {
|
||||
conflictMap.addDataType(dataType);
|
||||
}
|
||||
}
|
||||
|
||||
void dataTypeRemoved(String dataTypeName) {
|
||||
dataTypeMap.remove(dataTypeName);
|
||||
if (isConflictName(dataTypeName)) {
|
||||
conflictMap.removeDataTypeName(dataTypeName);
|
||||
}
|
||||
}
|
||||
|
||||
void categoryAdded(CategoryDB cat) {
|
||||
subcategoryMap.put(cat.getName(), cat);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DataType> getDataTypesByBaseName(String dataTypeName) {
|
||||
List<DataType> list = new ArrayList<>();
|
||||
String baseName = getBaseName(dataTypeName);
|
||||
|
||||
DataType baseType = dataTypeMap.get(baseName);
|
||||
if (baseType != null) {
|
||||
list.add(baseType);
|
||||
}
|
||||
|
||||
List<DataType> relatedNameDataTypes = conflictMap.getDataTypesByBaseName(baseName);
|
||||
list.addAll(relatedNameDataTypes);
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to handle the complexities of having a map as the value in a LazyLoadingCachingMap
|
||||
* This map uses the data type's base name as the key (i.e. all .conflict suffixes stripped off.)
|
||||
* The value is another map that maps the actual data type's name to the data type. This map
|
||||
* effectively provides an efficient way to get all data types in a category that have the
|
||||
* same name, but possibly have had their name modified (by appending .conflict) to get around
|
||||
* the requirement that names have to be unique in the same category.
|
||||
*/
|
||||
private class ConflictMap extends LazyLoadingCachingMap<String, Map<String, DataType>> {
|
||||
|
||||
ConflictMap(Lock lock) {
|
||||
super(lock);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a map of all data types whose name has a .conflict suffix where the key
|
||||
* is the base name and {@link LazyLoadingCachingMap} the value is a map of actual name
|
||||
* to data type. This mapping is
|
||||
* maintained as a lazy cache map. This is only called by the super class when the
|
||||
* cached needs to be populated and we are depending on it to acquire the necessary
|
||||
* database lock. (See {@link LazyLoadingCachingMap#loadMap()}
|
||||
* @return the loaded map
|
||||
*/
|
||||
@Override
|
||||
protected Map<String, Map<String, DataType>> loadMap() {
|
||||
Map<String, Map<String, DataType>> map = new HashMap<>();
|
||||
Collection<DataType> values = dataTypeMap.values();
|
||||
for (DataType dataType : values) {
|
||||
String dataTypeName = dataType.getName();
|
||||
if (isConflictName(dataTypeName)) {
|
||||
String baseName = getBaseName(dataTypeName);
|
||||
Map<String, DataType> innerMap =
|
||||
map.computeIfAbsent(baseName, b -> new HashMap<>());
|
||||
innerMap.put(dataTypeName, dataType);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the data type to the conflict mapping structure. If the mapping is currently not
|
||||
* loaded then this method can safely do nothing. This method is synchronized to provide
|
||||
* thread safe access/manipulation of the map.
|
||||
* @param dataType the data type to add to the mapping if the mapping is already loaded
|
||||
*/
|
||||
synchronized void addDataType(DataType dataType) {
|
||||
// if the cache is not currently populated, don't need to do anything
|
||||
Map<String, Map<String, DataType>> map = getMap();
|
||||
if (map == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String dataTypeName = dataType.getName();
|
||||
String baseName = getBaseName(dataTypeName);
|
||||
Map<String, DataType> innerMap = map.computeIfAbsent(baseName, b -> new HashMap<>());
|
||||
innerMap.put(dataTypeName, dataType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the data type with the given name from the conflict mapping structure. If the
|
||||
* mapping is currently not loaded then this method can safely do nothing. This method is
|
||||
* synchronized to provide thread safe access/manipulate of the map.
|
||||
* @param dataTypeName the name of the data type to remove from this mapping
|
||||
*/
|
||||
synchronized void removeDataTypeName(String dataTypeName) {
|
||||
Map<String, Map<String, DataType>> map = getMap();
|
||||
if (map == null) {
|
||||
return;
|
||||
}
|
||||
String baseName = getBaseName(dataTypeName);
|
||||
Map<String, DataType> innerMap = map.get(baseName);
|
||||
if (innerMap == null) {
|
||||
return;
|
||||
}
|
||||
innerMap.remove(dataTypeName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of all data types that have conflict names for the given base name
|
||||
* @param baseName the data type base name to search for (i.e. the .conflict suffix removed)
|
||||
* @return a list of all conflict named data types that would have the given base name if
|
||||
* no conflicts existed
|
||||
*/
|
||||
List<DataType> getDataTypesByBaseName(String baseName) {
|
||||
|
||||
// Note that the following call to get MUST NOT be in a synchronized block because
|
||||
// it may trigger a loading of the cache which requires a database lock and you
|
||||
// can't be synchronized on this class when acquiring a database lock or else a
|
||||
// deadlock will occur.
|
||||
Map<String, DataType> map = get(baseName);
|
||||
if (map == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
// the following must be synchronized so that the implied iterator can complete without
|
||||
// another thread changing the map's values.
|
||||
synchronized (this) {
|
||||
return new ArrayList<>(map.values());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -141,7 +141,7 @@ class DataTypeComponentDB implements InternalDataTypeComponent {
|
|||
}
|
||||
|
||||
@Override
|
||||
public DataType getParent() {
|
||||
public Composite getParent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
|
|
|
@ -774,7 +774,7 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
private DataType resolveBitFieldDataType(BitFieldDataType bitFieldDataType,
|
||||
DataTypeConflictHandler handler) {
|
||||
|
||||
// NOTE: When a bit-field is getting adding added it will get resolved more than once.
|
||||
// NOTE: When a bit-field is getting added it will get resolved more than once.
|
||||
// The first time we will ensure that the base data type, which may be a TypeDef, gets
|
||||
// resolved. If the bit-offset is too large it will be set to 0
|
||||
// with the expectation that it will get corrected during subsequent packing.
|
||||
|
@ -788,7 +788,8 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
int storageSizeBits = 8 * storageSize;
|
||||
if ((bitOffset + bitSize) > storageSizeBits) {
|
||||
// should get recomputed during packing when used within aligned structure
|
||||
bitOffset = getDataOrganization().isBigEndian() ? baseLengthBits - bitSize : 0;
|
||||
int effectiveBitSize = Math.min(bitSize, baseLengthBits);
|
||||
bitOffset = getDataOrganization().isBigEndian() ? baseLengthBits - effectiveBitSize : 0;
|
||||
storageSize = baseLength;
|
||||
}
|
||||
try {
|
||||
|
@ -933,14 +934,12 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
if (category == null) {
|
||||
return null;
|
||||
}
|
||||
String namePrefix = dtName + DataType.CONFLICT_SUFFIX;
|
||||
DataType[] dataTypes = category.getDataTypes();
|
||||
for (DataType candidate : dataTypes) {
|
||||
List<DataType> relatedByName = category.getDataTypesByBaseName(dtName);
|
||||
|
||||
for (DataType candidate : relatedByName) {
|
||||
String candidateName = candidate.getName();
|
||||
if (candidateName.startsWith(namePrefix)) {
|
||||
if (!candidateName.equals(excludedName) && candidate.isEquivalent(dataType)) {
|
||||
return candidate;
|
||||
}
|
||||
if (!candidateName.equals(excludedName) && candidate.isEquivalent(dataType)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
@ -3207,13 +3206,12 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
lock.acquire();
|
||||
try {
|
||||
long[] ids = parentChildAdapter.getParentIds(childID);
|
||||
// TODO: consider deduping ids using Set
|
||||
List<DataType> dts = new ArrayList<>();
|
||||
for (int i = 0; i < ids.length; i++) {
|
||||
DataType dt = getDataType(ids[i]);
|
||||
for (long id : ids) {
|
||||
DataType dt = getDataType(id);
|
||||
if (dt == null) {
|
||||
// cleanup invalid records for missing parent
|
||||
attemptRecordRemovalForParent(ids[i]);
|
||||
attemptRecordRemovalForParent(id);
|
||||
}
|
||||
else {
|
||||
dts.add(dt);
|
||||
|
|
|
@ -16,8 +16,7 @@
|
|||
package ghidra.program.database.data;
|
||||
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import ghidra.util.Lock;
|
||||
|
||||
|
@ -39,15 +38,14 @@ public abstract class LazyLoadingCachingMap<K, V> {
|
|||
|
||||
private Lock lock;
|
||||
private SoftReference<Map<K, V>> softRef;
|
||||
private Class<V> valueClass;
|
||||
|
||||
protected LazyLoadingCachingMap(Lock lock, Class<V> valueClass) {
|
||||
protected LazyLoadingCachingMap(Lock lock) {
|
||||
this.lock = lock;
|
||||
this.valueClass = valueClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will reload the map data from scratch.
|
||||
* This method will reload the map data from scratch. Subclass may assume that the database
|
||||
* lock has been acquired.
|
||||
* @return a map containing all current key, value pairs.
|
||||
*/
|
||||
protected abstract Map<K, V> loadMap();
|
||||
|
@ -96,13 +94,13 @@ public abstract class LazyLoadingCachingMap<K, V> {
|
|||
}
|
||||
}
|
||||
|
||||
public V[] valuesToArray() {
|
||||
/**
|
||||
* Returns an unmodifiable view of the values in this map.
|
||||
* @return an unmodifiable view of the values in this map.
|
||||
*/
|
||||
public Collection<V> values() {
|
||||
Map<K, V> map = getOrLoadMap();
|
||||
synchronized (this) {
|
||||
@SuppressWarnings("unchecked")
|
||||
V[] array = (V[]) Array.newInstance(valueClass, map.size());
|
||||
return map.values().toArray(array);
|
||||
}
|
||||
return Collections.unmodifiableCollection(map.values());
|
||||
}
|
||||
|
||||
private Map<K, V> getOrLoadMap() {
|
||||
|
@ -113,6 +111,15 @@ public abstract class LazyLoadingCachingMap<K, V> {
|
|||
return map;
|
||||
}
|
||||
}
|
||||
|
||||
// We must get the database lock before calling loadMap(). Also, we can't get the
|
||||
// database lock while having the synchronization lock for this class or a deadlock can
|
||||
// occur, since the other methods may be called while the client has the db lock.
|
||||
// Note: all other places where the map is being used or manipulated, it must be done
|
||||
// while having the class's synchronization lock since the map itself is not thread safe.
|
||||
// It should be safe here since it creates a new map and then in one operation it sets it
|
||||
// as the map to be used elsewhere.
|
||||
|
||||
lock.acquire();
|
||||
try {
|
||||
map = getMap();
|
||||
|
@ -132,11 +139,12 @@ public abstract class LazyLoadingCachingMap<K, V> {
|
|||
* "lock".
|
||||
* @return the underlying map of key,value pairs or null if it is currently not loaded.
|
||||
*/
|
||||
private Map<K, V> getMap() {
|
||||
protected Map<K, V> getMap() {
|
||||
if (softRef == null) {
|
||||
return null;
|
||||
}
|
||||
return softRef.get();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import ghidra.program.model.data.*;
|
|||
import ghidra.program.model.data.AlignedStructurePacker.StructurePackResult;
|
||||
import ghidra.program.model.mem.MemBuffer;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.exception.AssertException;
|
||||
import ghidra.util.exception.InvalidInputException;
|
||||
|
||||
/**
|
||||
|
@ -640,6 +641,13 @@ class StructureDB extends CompositeDB implements Structure {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create copy of structure for target dtm (source archive information is discarded).
|
||||
* WARNING! copying unaligned structures which contain bitfields can produce
|
||||
* invalid results when switching endianess due to the differences in packing order.
|
||||
* @param dtm target data type manager
|
||||
* @return cloned structure
|
||||
*/
|
||||
@Override
|
||||
public DataType copy(DataTypeManager dtm) {
|
||||
StructureDataType struct =
|
||||
|
@ -649,6 +657,13 @@ class StructureDB extends CompositeDB implements Structure {
|
|||
return struct;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create cloned structure for target dtm preserving source archive information.
|
||||
* WARNING! cloning unaligned structures which contain bitfields can produce
|
||||
* invalid results when switching endianess due to the differences in packing order.
|
||||
* @param dtm target data type manager
|
||||
* @return cloned structure
|
||||
*/
|
||||
@Override
|
||||
public DataType clone(DataTypeManager dtm) {
|
||||
StructureDataType struct =
|
||||
|
@ -891,12 +906,28 @@ class StructureDB extends CompositeDB implements Structure {
|
|||
@Override
|
||||
public DataTypeComponent insertAtOffset(int offset, DataType dataType, int length, String name,
|
||||
String comment) {
|
||||
|
||||
if (offset < 0) {
|
||||
throw new IllegalArgumentException("Offset cannot be negative.");
|
||||
}
|
||||
|
||||
if (dataType instanceof BitFieldDataType) {
|
||||
BitFieldDataType bfDt = (BitFieldDataType) dataType;
|
||||
if (length <= 0) {
|
||||
length = dataType.getLength();
|
||||
}
|
||||
try {
|
||||
return insertBitFieldAt(offset, length, bfDt.getBitOffset(), bfDt.getBaseDataType(),
|
||||
bfDt.getDeclaredBitSize(), name, comment);
|
||||
}
|
||||
catch (InvalidDataTypeException e) {
|
||||
throw new AssertException(e);
|
||||
}
|
||||
}
|
||||
|
||||
lock.acquire();
|
||||
try {
|
||||
checkDeleted();
|
||||
if (offset < 0) {
|
||||
throw new IllegalArgumentException("Offset cannot be negative.");
|
||||
}
|
||||
validateDataType(dataType);
|
||||
|
||||
dataType = resolve(dataType);
|
||||
|
@ -966,6 +997,7 @@ class StructureDB extends CompositeDB implements Structure {
|
|||
if (ordinal < 0 || ordinal >= numComponents) {
|
||||
throw new ArrayIndexOutOfBoundsException(ordinal);
|
||||
}
|
||||
|
||||
validateDataType(dataType);
|
||||
|
||||
DataTypeComponent origDtc = getComponent(ordinal);
|
||||
|
@ -1009,9 +1041,7 @@ class StructureDB extends CompositeDB implements Structure {
|
|||
throw new IllegalArgumentException(
|
||||
"Offset " + offset + " is beyond end of structure (" + structLength + ").");
|
||||
}
|
||||
if (dataType instanceof BitFieldDataType) {
|
||||
throw new IllegalArgumentException("Components may not be replaced with a bit-field");
|
||||
}
|
||||
|
||||
lock.acquire();
|
||||
try {
|
||||
checkDeleted();
|
||||
|
@ -1085,19 +1115,14 @@ class StructureDB extends CompositeDB implements Structure {
|
|||
componentAdapter.removeRecord(dtc.getKey());
|
||||
}
|
||||
components.clear();
|
||||
numComponents = 0;
|
||||
structLength = 0;
|
||||
|
||||
if (flexibleArrayComponent != null) {
|
||||
flexibleArrayComponent.getDataType().removeParent(this);
|
||||
componentAdapter.removeRecord(flexibleArrayComponent.getKey());
|
||||
flexibleArrayComponent = null;
|
||||
}
|
||||
if (struct.isNotYetDefined()) {
|
||||
numComponents = 0;
|
||||
structLength = 0;
|
||||
}
|
||||
else {
|
||||
structLength = struct.getLength();
|
||||
numComponents = isInternallyAligned() ? 0 : structLength;
|
||||
}
|
||||
|
||||
setAlignment(struct, false);
|
||||
|
||||
|
@ -1154,14 +1179,17 @@ class StructureDB extends CompositeDB implements Structure {
|
|||
|
||||
private void doReplaceWithUnaligned(Structure struct) throws IOException {
|
||||
// assumes components is clear and that alignment characteristics have been set.
|
||||
if (struct.isNotYetDefined()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// NOTE: unaligned bitfields should remain unchanged when
|
||||
// transitioning endianess even though it makes little sense.
|
||||
// Unaligned structures are not intended to be portable!
|
||||
structLength = struct.getLength();
|
||||
numComponents = structLength;
|
||||
|
||||
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
|
||||
for (int i = 0; i < otherComponents.length; i++) {
|
||||
DataTypeComponent dtc = otherComponents[i];
|
||||
|
||||
DataType dt = resolve(dtc.getDataType());
|
||||
checkAncestry(dt);
|
||||
|
||||
|
|
|
@ -72,12 +72,12 @@ public class ProgramRegisterContextDB extends AbstractStoredProgramContext imple
|
|||
}
|
||||
|
||||
if (openMode == DBConstants.UPGRADE && oldContextDataExists) {
|
||||
// TODO: Make sure upgrade is working correctly before uncommenting
|
||||
// try {
|
||||
// OldProgramContextDB.removeOldContextData(dbHandle);
|
||||
// } catch (IOException e) {
|
||||
// errorHandler.dbError(e);
|
||||
// }
|
||||
try {
|
||||
OldProgramContextDB.removeOldContextData(dbHandle);
|
||||
}
|
||||
catch (IOException e) {
|
||||
errorHandler.dbError(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -161,6 +161,12 @@ public class ProgramRegisterContextDB extends AbstractStoredProgramContext imple
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Intialize context with default values defined by pspec and cspec.
|
||||
* NOTE: cspec values take precedence
|
||||
* @param lang processor language
|
||||
* @param compilerSpec compiler specification
|
||||
*/
|
||||
public void initializeDefaultValues(Language lang, CompilerSpec compilerSpec) {
|
||||
defaultRegisterValueMap.clear();
|
||||
lang.applyContextSettings(this);
|
||||
|
@ -288,9 +294,31 @@ public class ProgramRegisterContextDB extends AbstractStoredProgramContext imple
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform context upgrade due to a language change
|
||||
* @param translator language translator required by major upgrades (may be null)
|
||||
* @param newCompilerSpec new compiler specification
|
||||
* @param programMemory program memory
|
||||
* @param monitor task monitor
|
||||
* @throws CancelledException thrown if monitor cancelled
|
||||
*/
|
||||
public void setLanguage(LanguageTranslator translator, CompilerSpec newCompilerSpec,
|
||||
AddressSetView programMemory, TaskMonitor monitor) throws CancelledException {
|
||||
|
||||
if (translator == null) {
|
||||
Language lang = program.getLanguage();
|
||||
boolean clearContext = Boolean.valueOf(
|
||||
lang.getProperty(GhidraLanguagePropertyKeys.RESET_CONTEXT_ON_UPGRADE));
|
||||
if (clearContext) {
|
||||
RegisterValueStore store = registerValueMap.get(baseContextRegister);
|
||||
if (store != null) {
|
||||
store.clearAll();
|
||||
}
|
||||
}
|
||||
initializeDefaultValues(lang, newCompilerSpec);
|
||||
return;
|
||||
}
|
||||
|
||||
Language newLanguage = translator.getNewLanguage();
|
||||
|
||||
// Sort the registers by size so that largest come first.
|
||||
|
@ -309,8 +337,11 @@ public class ProgramRegisterContextDB extends AbstractStoredProgramContext imple
|
|||
continue;
|
||||
}
|
||||
|
||||
boolean clearContext = register.isProcessorContext() && Boolean.valueOf(
|
||||
newLanguage.getProperty(GhidraLanguagePropertyKeys.RESET_CONTEXT_ON_UPGRADE));
|
||||
|
||||
// Update storage range map
|
||||
if (!store.setLanguage(translator, monitor)) {
|
||||
if (clearContext || !store.setLanguage(translator, monitor)) {
|
||||
// Clear and remove old register value store
|
||||
Msg.warn(this,
|
||||
"WARNING! Discarding all context for register " + register.getName());
|
||||
|
|
|
@ -1150,12 +1150,12 @@ public class Disassembler implements DisassemblerConflictHandler {
|
|||
throws InsufficientBytesException, UnknownInstructionException,
|
||||
AddressOverflowException, NestedDelaySlotException {
|
||||
|
||||
List<PseudoInstruction> delaySlotList = parseDelaySlots(inst, blockMemBuffer, block);
|
||||
|
||||
if (followFlow) {
|
||||
processInstructionFlows(inst, block);
|
||||
}
|
||||
|
||||
List<PseudoInstruction> delaySlotList = parseDelaySlots(inst, blockMemBuffer, block);
|
||||
|
||||
block.addInstruction(inst);
|
||||
|
||||
if (delaySlotList != null) {
|
||||
|
|
|
@ -174,9 +174,6 @@ class DisassemblerQueue {
|
|||
branchFlow = currentBranchQueue.first();
|
||||
currentBranchQueue.remove(branchFlow);
|
||||
}
|
||||
if (processedBranchFlows.contains(branchFlow)) {
|
||||
continue;
|
||||
}
|
||||
processedBranchFlows.add(branchFlow);
|
||||
|
||||
Address blockAddr = branchFlow.getDestinationAddress();
|
||||
|
|
|
@ -15,6 +15,8 @@
|
|||
*/
|
||||
package ghidra.program.model.data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import ghidra.util.InvalidNameException;
|
||||
import ghidra.util.exception.DuplicateNameException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
@ -48,6 +50,19 @@ public interface Category extends Comparable<Category> {
|
|||
*/
|
||||
public abstract DataType[] getDataTypes();
|
||||
|
||||
/**
|
||||
* Get all data types in this category whose base name matches the base name of the given name.
|
||||
* The base name of a name is the first part of the string up to where the first ".conflict"
|
||||
* occurs. In other words, finds all data types whose name matches the given name once
|
||||
* any conflict suffixes have been removed from both the given name and the data types
|
||||
* that are being scanned.
|
||||
* @param name the name for which to get conflict related data types in this category. Note: the
|
||||
* name that is passed in will be normalized to its base name, so you may pass in names with .conflict
|
||||
* appended as a convenience.
|
||||
* @return a list of data types that have the same base name as the base name of the given name
|
||||
*/
|
||||
public abstract List<DataType> getDataTypesByBaseName(String name);
|
||||
|
||||
/**
|
||||
* Adds the given datatype to this category.
|
||||
* @param dt the datatype to add to this category.
|
||||
|
|
|
@ -196,9 +196,8 @@ public interface Structure extends Composite {
|
|||
public void deleteAtOffset(int offset);
|
||||
|
||||
/**
|
||||
* Remove all components from this structure, effectively setting the
|
||||
* length to zero.
|
||||
*
|
||||
* Remove all components from this structure (including flex-array),
|
||||
* effectively setting the length to zero.
|
||||
*/
|
||||
public void deleteAll();
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import ghidra.program.model.data.AlignedStructurePacker.StructurePackResult;
|
|||
import ghidra.program.model.mem.MemBuffer;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.UniversalID;
|
||||
import ghidra.util.exception.AssertException;
|
||||
import ghidra.util.exception.InvalidInputException;
|
||||
|
||||
/**
|
||||
|
@ -297,9 +298,25 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
@Override
|
||||
public DataTypeComponentImpl insertAtOffset(int offset, DataType dataType, int length,
|
||||
String componentName, String comment) {
|
||||
|
||||
if (offset < 0) {
|
||||
throw new IllegalArgumentException("Offset cannot be negative.");
|
||||
}
|
||||
|
||||
if (dataType instanceof BitFieldDataType) {
|
||||
BitFieldDataType bfDt = (BitFieldDataType) dataType;
|
||||
if (length <= 0) {
|
||||
length = dataType.getLength();
|
||||
}
|
||||
try {
|
||||
return insertBitFieldAt(offset, length, bfDt.getBitOffset(), bfDt.getBaseDataType(),
|
||||
bfDt.getDeclaredBitSize(), componentName, comment);
|
||||
}
|
||||
catch (InvalidDataTypeException e) {
|
||||
throw new AssertException(e);
|
||||
}
|
||||
}
|
||||
|
||||
validateDataType(dataType);
|
||||
|
||||
dataType = dataType.clone(dataMgr);
|
||||
|
@ -524,7 +541,7 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
}
|
||||
|
||||
@Override
|
||||
public DataTypeComponent insertBitFieldAt(int byteOffset, int byteWidth, int bitOffset,
|
||||
public DataTypeComponentImpl insertBitFieldAt(int byteOffset, int byteWidth, int bitOffset,
|
||||
DataType baseDataType, int bitSize, String componentName, String comment)
|
||||
throws InvalidDataTypeException {
|
||||
|
||||
|
@ -847,6 +864,13 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
return available;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create copy of structure for target dtm (source archive information is discarded).
|
||||
* WARNING! copying unaligned structures which contain bitfields can produce
|
||||
* invalid results when switching endianess due to the differences in packing order.
|
||||
* @param dtm target data type manager
|
||||
* @return cloned structure
|
||||
*/
|
||||
@Override
|
||||
public DataType copy(DataTypeManager dtm) {
|
||||
StructureDataType struct = new StructureDataType(categoryPath, getName(), getLength(), dtm);
|
||||
|
@ -855,6 +879,13 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
return struct;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create cloned structure for target dtm preserving source archive information.
|
||||
* WARNING! cloning unaligned structures which contain bitfields can produce
|
||||
* invalid results when switching endianess due to the differences in packing order.
|
||||
* @param dtm target data type manager
|
||||
* @return cloned structure
|
||||
*/
|
||||
@Override
|
||||
public DataType clone(DataTypeManager dtm) {
|
||||
if (dataMgr == dtm) {
|
||||
|
@ -907,15 +938,9 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
int oldLength = structLength;
|
||||
|
||||
components.clear();
|
||||
structLength = 0;
|
||||
numComponents = 0;
|
||||
flexibleArrayComponent = null;
|
||||
if (struct.isNotYetDefined()) {
|
||||
structLength = 0;
|
||||
numComponents = 0;
|
||||
}
|
||||
else {
|
||||
structLength = struct.getLength();
|
||||
numComponents = isInternallyAligned() ? 0 : structLength;
|
||||
}
|
||||
|
||||
setAlignment(struct);
|
||||
|
||||
|
@ -950,14 +975,17 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
|
||||
private void doReplaceWithUnaligned(Structure struct) {
|
||||
// assumes components is clear and that alignment characteristics have been set.
|
||||
if (struct.isNotYetDefined()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// NOTE: unaligned bitfields should remain unchanged when
|
||||
// transitioning endianess even though it makes little sense.
|
||||
// Unaligned structures are not intended to be portable!
|
||||
structLength = struct.getLength();
|
||||
numComponents = structLength;
|
||||
|
||||
DataTypeComponent[] otherComponents = struct.getDefinedComponents();
|
||||
for (int i = 0; i < otherComponents.length; i++) {
|
||||
DataTypeComponent dtc = otherComponents[i];
|
||||
|
||||
DataType dt = dtc.getDataType().clone(dataMgr);
|
||||
checkAncestry(dt);
|
||||
|
||||
|
@ -1126,9 +1154,6 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
if (index < 0 || index >= numComponents) {
|
||||
throw new ArrayIndexOutOfBoundsException(index);
|
||||
}
|
||||
if (dataType instanceof BitFieldDataType) {
|
||||
throw new IllegalArgumentException("Components may not be replaced with a bit-field");
|
||||
}
|
||||
|
||||
validateDataType(dataType);
|
||||
|
||||
|
@ -1168,9 +1193,6 @@ public class StructureDataType extends CompositeDataTypeImpl implements Structur
|
|||
throw new IllegalArgumentException(
|
||||
"Offset " + offset + " is beyond end of structure (" + structLength + ").");
|
||||
}
|
||||
if (dataType instanceof BitFieldDataType) {
|
||||
throw new IllegalArgumentException("Components may not be replaced with a bit-field");
|
||||
}
|
||||
|
||||
validateDataType(dataType);
|
||||
|
||||
|
|
|
@ -112,4 +112,11 @@ public final class GhidraLanguagePropertyKeys {
|
|||
* following the call. Non-returning functions can be detected in many cases.
|
||||
*/
|
||||
public static final String ENABLE_NO_RETURN_ANALYSIS = "enableNoReturnAnalysis";
|
||||
|
||||
/**
|
||||
* Property to indicate that all stored instruction context should be cleared
|
||||
* during a language upgrade operation which requires redisassembly.
|
||||
* NOTE: This is an experimental concept which may be removed in the future
|
||||
*/
|
||||
public static final String RESET_CONTEXT_ON_UPGRADE = "resetContextOnUpgrade";
|
||||
}
|
||||
|
|
|
@ -120,16 +120,22 @@ public class VariableOffset {
|
|||
}
|
||||
|
||||
/**
|
||||
* Get list of markup objects
|
||||
* @return list of markup objects
|
||||
* Returns the data type access portion of this variable offset as a string
|
||||
* @return the text
|
||||
*/
|
||||
public List<Object> getObjects() {
|
||||
public String getDataTypeDisplayText() {
|
||||
List<Object> objects = getObjects(false);
|
||||
LabelString labelString = (LabelString) objects.get(0);
|
||||
return labelString.toString();
|
||||
}
|
||||
|
||||
private List<Object> getObjects(boolean showScalarAdjustment) {
|
||||
|
||||
DataType dt = variable.getDataType();
|
||||
StringBuffer name = new StringBuffer(variable.getName());
|
||||
|
||||
long scalarAdjustment = 0;
|
||||
if (includeScalarAdjustment && (replacedElement instanceof Scalar)) {
|
||||
if (showScalarAdjustment && (replacedElement instanceof Scalar)) {
|
||||
Scalar s = (Scalar) replacedElement;
|
||||
scalarAdjustment = variable.isStackVariable() ? s.getSignedValue() : s.getValue();
|
||||
scalarAdjustment -= offset;
|
||||
|
@ -214,6 +220,14 @@ public class VariableOffset {
|
|||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of markup objects
|
||||
* @return list of markup objects
|
||||
*/
|
||||
public List<Object> getObjects() {
|
||||
return getObjects(includeScalarAdjustment);
|
||||
}
|
||||
|
||||
public Variable getVariable() {
|
||||
return variable;
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue