Merge remote-tracking branch 'origin/GP-2-dragonmacher-spelling-fixes'

This commit is contained in:
Ryan Kurtz 2021-10-27 13:38:40 -04:00
commit 2ef4f26120
16 changed files with 312 additions and 301 deletions

View file

@ -47,7 +47,7 @@ public class DataTypeMergeManager implements MergeResolver {
// Each of the following is a choice or possible resolution when merging data types. // Each of the following is a choice or possible resolution when merging data types.
static final int CANCELED = -2; // user canceled the merge operation static final int CANCELED = -2; // user canceled the merge operation
static final int ASK_USER = -1;// prompt the user to choose resolution static final int ASK_USER = -1;// prompt the user to choose resolution
static final int OPTION_LATEST = 0; // Latest static final int OPTION_LATEST = 0; // Latest
static final int OPTION_MY = 1; // My change static final int OPTION_MY = 1; // My change
static final int OPTION_ORIGINAL = 2; // Original static final int OPTION_ORIGINAL = 2; // Original
@ -96,7 +96,7 @@ public class DataTypeMergeManager implements MergeResolver {
* @param myDomainObject the program requesting to be checked in. * @param myDomainObject the program requesting to be checked in.
* @param originalDomainObject the program that was checked out. * @param originalDomainObject the program that was checked out.
* @param latestDomainObject the latest checked-in version of the program. * @param latestDomainObject the latest checked-in version of the program.
* @param latestChanges the address set of changes between original and latest versioned program. * @param latestChanges the address set of changes between original and latest versioned program.
* @param myChanges the address set of changes between original and my modified program. * @param myChanges the address set of changes between original and my modified program.
*/ */
public DataTypeMergeManager(DomainObjectMergeManager mergeManager, public DataTypeMergeManager(DomainObjectMergeManager mergeManager,
@ -132,7 +132,7 @@ public class DataTypeMergeManager implements MergeResolver {
public void apply() { public void apply() {
if (catMergePanel != null && catMergePanel.isVisible()) { if (catMergePanel != null && catMergePanel.isVisible()) {
conflictOption = catMergePanel.getSelectedOption(); conflictOption = catMergePanel.getSelectedOption();
// If the "Use For All" check box is selected // If the "Use For All" check box is selected
// then save the option chosen for this conflict type. // then save the option chosen for this conflict type.
if (catMergePanel.getUseForAll()) { if (catMergePanel.getUseForAll()) {
categoryChoice = conflictOption; categoryChoice = conflictOption;
@ -140,7 +140,7 @@ public class DataTypeMergeManager implements MergeResolver {
} }
else if (dtMergePanel != null && dtMergePanel.isVisible()) { else if (dtMergePanel != null && dtMergePanel.isVisible()) {
conflictOption = dtMergePanel.getSelectedOption(); conflictOption = dtMergePanel.getSelectedOption();
// If the "Use For All" check box is selected // If the "Use For All" check box is selected
// then save the option chosen for this conflict type. // then save the option chosen for this conflict type.
if (dtMergePanel.getUseForAll()) { if (dtMergePanel.getUseForAll()) {
dataTypeChoice = conflictOption; dataTypeChoice = conflictOption;
@ -148,7 +148,7 @@ public class DataTypeMergeManager implements MergeResolver {
} }
else { else {
conflictOption = archiveMergePanel.getSelectedOption(); conflictOption = archiveMergePanel.getSelectedOption();
// If the "Use For All" check box is selected // If the "Use For All" check box is selected
// then save the option chosen for this conflict type. // then save the option chosen for this conflict type.
if (archiveMergePanel.getUseForAll()) { if (archiveMergePanel.getUseForAll()) {
sourceArchiveChoice = conflictOption; sourceArchiveChoice = conflictOption;
@ -259,7 +259,7 @@ public class DataTypeMergeManager implements MergeResolver {
/** /**
* For JUnit testing only, set the option for resolving a conflict. * For JUnit testing only, set the option for resolving a conflict.
* @param option forced conflict resolution option * @param option forced conflict resolution option
*/ */
void setConflictResolution(int option) { void setConflictResolution(int option) {
conflictOption = option; conflictOption = option;
@ -548,7 +548,7 @@ public class DataTypeMergeManager implements MergeResolver {
changeSourceArchive(id); changeSourceArchive(id);
} }
// Make sure the change time is updated (even if keeping the Latest version) // Make sure the change time is updated (even if keeping the Latest version)
// since a conflict was resolved for the data type. // since a conflict was resolved for the data type.
DataType resultDt = dtms[RESULT].getDataType(id); DataType resultDt = dtms[RESULT].getDataType(id);
if (resultDt != null) { if (resultDt != null) {
@ -687,7 +687,7 @@ public class DataTypeMergeManager implements MergeResolver {
} }
/** /**
* Update the data type name/category path in RESULT if it exists. * Update the data type name/category path in RESULT if it exists.
* If it does not exist, add it to RESULT. * If it does not exist, add it to RESULT.
* @param id id of data type * @param id id of data type
* @param dt data type to use as the source name and category path * @param dt data type to use as the source name and category path
@ -751,7 +751,7 @@ public class DataTypeMergeManager implements MergeResolver {
/** /**
* Set category path. If name conflict occurs within new category * Set category path. If name conflict occurs within new category
* the specified dt will remain within its current category * the specified dt will remain within its current category
* @param dt datatype whoose category is to changed * @param dt datatype whose category is to changed
* @param newPath new category path * @param newPath new category path
*/ */
private void setCategoryPath(DataType dt, CategoryPath newPath) { private void setCategoryPath(DataType dt, CategoryPath newPath) {
@ -842,7 +842,7 @@ public class DataTypeMergeManager implements MergeResolver {
* *
* @param dataTypeID the ID (key) of the data type to be added. * @param dataTypeID the ID (key) of the data type to be added.
* @param dataType the data type to be added. * @param dataType the data type to be added.
* @param resolvedDataTypes table which maps the dataTypeID to the resulting data type within * @param resolvedDataTypes table which maps the dataTypeID to the resulting data type within
* this data type manager. * this data type manager.
* @return the resulting data type in this data type manager. * @return the resulting data type in this data type manager.
*/ */
@ -888,7 +888,7 @@ public class DataTypeMergeManager implements MergeResolver {
} }
/** /**
* Get the resolved data type from the given table; * Get the resolved data type from the given table;
* If the data type has not been resolved yet, then use the one from * If the data type has not been resolved yet, then use the one from
* the results if the id was not added in MY program. * the results if the id was not added in MY program.
* @param id id of data type * @param id id of data type
@ -906,12 +906,12 @@ public class DataTypeMergeManager implements MergeResolver {
DataType resolvedDt = resolvedDataTypes.get(baseID); DataType resolvedDt = resolvedDataTypes.get(baseID);
if (resolvedDt == null) { if (resolvedDt == null) {
// Haven't resolved this yet. // Haven't resolved this yet.
// use dt from results // use dt from results
if (!myDtAddedList.contains(Long.valueOf(baseID))) { if (!myDtAddedList.contains(Long.valueOf(baseID))) {
resolvedDt = dtms[RESULT].getDataType(baseID); resolvedDt = dtms[RESULT].getDataType(baseID);
if (resolvedDt == null) { if (resolvedDt == null) {
if (origDtConflictList.contains(Long.valueOf(baseID))) { if (origDtConflictList.contains(Long.valueOf(baseID))) {
// was deleted, but add it back so we can create // was deleted, but add it back so we can create
// data types depending on it; will get resolved later // data types depending on it; will get resolved later
resolvedDt = addDataType(baseID, baseDt, resolvedDataTypes); resolvedDt = addDataType(baseID, baseDt, resolvedDataTypes);
} }
@ -1156,7 +1156,7 @@ public class DataTypeMergeManager implements MergeResolver {
else { else {
// must have been deleted in LATEST // must have been deleted in LATEST
// put an entry in the fixup list if this is a conflict. // put an entry in the fixup list if this is a conflict.
// NOTE: This may also be caused by a replaced datatype but // NOTE: This may also be caused by a replaced datatype but
// we have no indication as to what the replacment was // we have no indication as to what the replacment was
deletedInLatest = true; deletedInLatest = true;
} }
@ -1583,7 +1583,7 @@ public class DataTypeMergeManager implements MergeResolver {
} }
/** /**
* Process categories that were moved in MY program, but are not * Process categories that were moved in MY program, but are not
* conflicts, i.e., not renamed, moved, or deleted in LATEST. * conflicts, i.e., not renamed, moved, or deleted in LATEST.
* @param id category ID * @param id category ID
*/ */
@ -1608,7 +1608,7 @@ public class DataTypeMergeManager implements MergeResolver {
} }
/** /**
* Process categories that were deleted in MY program, but are not * Process categories that were deleted in MY program, but are not
* conflicts, i.e., not renamed, moved, or deleted in LATEST. * conflicts, i.e., not renamed, moved, or deleted in LATEST.
* @param id category ID * @param id category ID
*/ */
@ -1617,7 +1617,7 @@ public class DataTypeMergeManager implements MergeResolver {
if (myCat == null) { if (myCat == null) {
Category resultCat = dtms[RESULT].getCategory(id); Category resultCat = dtms[RESULT].getCategory(id);
if (resultCat != null) { if (resultCat != null) {
// check added data types that have this category path as // check added data types that have this category path as
// the parent // the parent
if (!isParent(resultCat.getCategoryPath())) { if (!isParent(resultCat.getCategoryPath())) {
resultCat.getParent().removeCategory(resultCat.getName(), currentMonitor); resultCat.getParent().removeCategory(resultCat.getName(), currentMonitor);
@ -1662,7 +1662,7 @@ public class DataTypeMergeManager implements MergeResolver {
throw new AssertException("Got DuplicateNameException"); throw new AssertException("Got DuplicateNameException");
} }
catch (IllegalArgumentException e) { catch (IllegalArgumentException e) {
// cannot move category // cannot move category
return; return;
} }
} }
@ -2040,7 +2040,7 @@ public class DataTypeMergeManager implements MergeResolver {
private void deleteLatestCategory(Category latestCat) { private void deleteLatestCategory(Category latestCat) {
// delete the category from results program if the // delete the category from results program if the
// paths on the data types in LATEST are different // paths on the data types in LATEST are different
// from path on the data types in MY; // from path on the data types in MY;
DataType[] dts = latestCat.getDataTypes(); DataType[] dts = latestCat.getDataTypes();
boolean doDelete = true; boolean doDelete = true;
if (dts.length > 0) { if (dts.length > 0) {
@ -2204,11 +2204,11 @@ public class DataTypeMergeManager implements MergeResolver {
/** /**
* See if there is a data type in the result file that matches My data type based on * See if there is a data type in the result file that matches My data type based on
* name, path and contents. * name, path and contents.
* If there is a data type that is the same then return true. * If there is a data type that is the same then return true.
* @param myDtID the database ID (key) for My data type. * @param myDtID the database ID (key) for My data type.
* @param myDt My data type. * @param myDt My data type.
* @return true if the same named and equivalent data type is found in the result * @return true if the same named and equivalent data type is found in the result
* data type manager. * data type manager.
*/ */
private boolean equivalentDataTypeFound(long myDtID, DataType myDt) { private boolean equivalentDataTypeFound(long myDtID, DataType myDt) {
@ -2222,8 +2222,9 @@ public class DataTypeMergeManager implements MergeResolver {
UniversalID resultDtUniversalID = resultDt.getUniversalID(); UniversalID resultDtUniversalID = resultDt.getUniversalID();
UniversalID myDtUniversalID = myDt.getUniversalID(); UniversalID myDtUniversalID = myDt.getUniversalID();
// UniversalID can be null if data type is BuiltIn. // UniversalID can be null if data type is BuiltIn.
if (!resultSourceArchive.getSourceArchiveID().equals( if (!resultSourceArchive.getSourceArchiveID()
mySourceArchive.getSourceArchiveID()) || .equals(
mySourceArchive.getSourceArchiveID()) ||
!Objects.equals(resultDtUniversalID, myDtUniversalID)) { !Objects.equals(resultDtUniversalID, myDtUniversalID)) {
return false; return false;
} }
@ -2237,7 +2238,7 @@ public class DataTypeMergeManager implements MergeResolver {
private void cleanUpDataTypes() { private void cleanUpDataTypes() {
// clean up data types // clean up data types
List<Long> keys = new ArrayList<Long>(cleanupPlaceHolderList.keySet()); List<Long> keys = new ArrayList<>(cleanupPlaceHolderList.keySet());
for (long key : keys) { for (long key : keys) {
CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(key); CleanUpInfo cleanUpInfo = cleanupPlaceHolderList.get(key);
cleanUpInfo.cleanUp(); cleanUpInfo.cleanUp();
@ -2506,7 +2507,7 @@ public class DataTypeMergeManager implements MergeResolver {
} }
/** /**
* Determines the number of contiguous undefined bytes in this structure starting * Determines the number of contiguous undefined bytes in this structure starting
* at the indicated component ordinal. * at the indicated component ordinal.
* @param struct the structure to check. * @param struct the structure to check.
* @param ordinal the ordinal of the component where checking for undefined bytes should begin. * @param ordinal the ordinal of the component where checking for undefined bytes should begin.
@ -2649,11 +2650,6 @@ public class DataTypeMergeManager implements MergeResolver {
} }
} }
/**
* @param compID
* @param dataTypeManager
* @return
*/
private DataType resolve(long id, DataTypeManager dtm, private DataType resolve(long id, DataTypeManager dtm,
Map<Long, DataType> resolvedDataTypes) { Map<Long, DataType> resolvedDataTypes) {
DataType dt = getResolvedComponent(id, resolvedDataTypes); DataType dt = getResolvedComponent(id, resolvedDataTypes);
@ -2943,9 +2939,9 @@ public class DataTypeMergeManager implements MergeResolver {
} }
/** /**
* Processes my data types that were added and determines whether each is actually a * Processes my data types that were added and determines whether each is actually a
* conflict, an added data type, or a changed data type relative to the Latest check in. * conflict, an added data type, or a changed data type relative to the Latest check in.
* @param myDtAdds * @param myDtAdds the data type IDs
*/ */
private void processAddIDs(long[] myDtAdds) { private void processAddIDs(long[] myDtAdds) {
myDtAddedList = new ArrayList<>(); myDtAddedList = new ArrayList<>();
@ -3219,7 +3215,7 @@ public class DataTypeMergeManager implements MergeResolver {
* or components were resolved. * or components were resolved.
* @param id id of data type needed to be fixed up * @param id id of data type needed to be fixed up
* @param compID id of either component or base type * @param compID id of either component or base type
* @param index offset into non-packed structure, or ordinal into union or packed * @param index offset into non-packed structure, or ordinal into union or packed
* structure; or parameter/return ordinal; for other data types index is not used (specify -1). * structure; or parameter/return ordinal; for other data types index is not used (specify -1).
* @param resolvedDataTypes hashtable used for resolving the data type * @param resolvedDataTypes hashtable used for resolving the data type
*/ */
@ -3329,7 +3325,7 @@ public class DataTypeMergeManager implements MergeResolver {
/** /**
* *
* @param index offset into non-packed structure, or ordinal into union or packed * @param index offset into non-packed structure, or ordinal into union or packed
* structure; for other data types, offset is not used (specify -1) * structure; for other data types, offset is not used (specify -1)
* @param resolvedDataTypes hashtable used for resolving the data type * @param resolvedDataTypes hashtable used for resolving the data type
*/ */

View file

@ -24,47 +24,47 @@ public class DatabaseUtils {
private DatabaseUtils() { private DatabaseUtils() {
} }
/** /**
* Reassign the long key assigned to a contiguous group of records within a table. * Reassign the long key assigned to a contiguous group of records within a table.
* A shift in the key value is computed as the difference of oldStart and newStart. * A shift in the key value is computed as the difference of oldStart and newStart.
* Existing records whoose keys lie within the new range will be removed prior to * Existing records whose keys lie within the new range will be removed prior to
* moving the target set of records. * moving the target set of records.
* @param table table within which records should be moved. * @param table table within which records should be moved.
* @param oldStart old key value for start of range * @param oldStart old key value for start of range
* @param newStart new key value for start of range * @param newStart new key value for start of range
* @param size determines the range of keys to be moved (oldStart to oldStart+size-1, inclusive) * @param size determines the range of keys to be moved (oldStart to oldStart+size-1, inclusive)
* @throws IOException * @throws IOException if there is an error moving the records
*/ */
public static void moveRecords(Table table, long oldStart, long newStart, long size) throws IOException{ public static void moveRecords(Table table, long oldStart, long newStart, long size)
throws IOException {
if (oldStart == newStart) { if (oldStart == newStart) {
return; return;
} }
if (size <= 0) { if (size <= 0) {
throw new IllegalArgumentException("size must be > 0"); throw new IllegalArgumentException("size must be > 0");
} }
if ((oldStart + size -1 < 0) || (newStart+size-1 <0)){ if ((oldStart + size - 1 < 0) || (newStart + size - 1 < 0)) {
throw new IllegalArgumentException("Illegal range: end range overflow"); throw new IllegalArgumentException("Illegal range: end range overflow");
} }
DBHandle tmp = new DBHandle(); DBHandle tmp = new DBHandle();
Table tmpTable = tmp.createTable("tmp", table.getSchema()); Table tmpTable = tmp.createTable("tmp", table.getSchema());
long txID = tmp.startTransaction(); long txID = tmp.startTransaction();
long keyDiff = newStart - oldStart; long keyDiff = newStart - oldStart;
RecordIterator it = table.iterator(oldStart, oldStart+size-1, oldStart); RecordIterator it = table.iterator(oldStart, oldStart + size - 1, oldStart);
while(it.hasNext()) { while (it.hasNext()) {
DBRecord rec = it.next(); DBRecord rec = it.next();
rec.setKey(rec.getKey()+keyDiff); rec.setKey(rec.getKey() + keyDiff);
tmpTable.putRecord(rec); tmpTable.putRecord(rec);
} }
table.deleteRecords(oldStart, oldStart+size-1); table.deleteRecords(oldStart, oldStart + size - 1);
table.deleteRecords(newStart, newStart+size-1); table.deleteRecords(newStart, newStart + size - 1);
it = tmpTable.iterator(newStart, newStart+size-1, newStart); it = tmpTable.iterator(newStart, newStart + size - 1, newStart);
while(it.hasNext()) { while (it.hasNext()) {
DBRecord rec = it.next(); DBRecord rec = it.next();
table.putRecord(rec); table.putRecord(rec);
} }

View file

@ -20,8 +20,8 @@ import java.util.ArrayList;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
/** /**
* <code>FieldIndexTable</code> provides a simplified index table whoose key is * <code>FieldIndexTable</code> provides a simplified index table whose key is
* a fixed or variable length {@link IndexField} which consists of a concatenation of * a fixed or variable length {@link IndexField} which consists of a concatenation of
* the index field value and associated primary table key. * the index field value and associated primary table key.
*/ */
public class FieldIndexTable extends IndexTable { public class FieldIndexTable extends IndexTable {
@ -40,8 +40,10 @@ public class FieldIndexTable extends IndexTable {
* @throws IOException thrown if IO error occurs * @throws IOException thrown if IO error occurs
*/ */
FieldIndexTable(Table primaryTable, int colIndex) throws IOException { FieldIndexTable(Table primaryTable, int colIndex) throws IOException {
this(primaryTable, primaryTable.getDBHandle().getMasterTable().createTableRecord( this(primaryTable, primaryTable.getDBHandle()
primaryTable.getName(), getIndexTableSchema(primaryTable, colIndex), colIndex)); .getMasterTable()
.createTableRecord(
primaryTable.getName(), getIndexTableSchema(primaryTable, colIndex), colIndex));
} }
/** /**
@ -49,7 +51,7 @@ public class FieldIndexTable extends IndexTable {
* its root ID specified within the tableRecord. * its root ID specified within the tableRecord.
* @param primaryTable primary table. * @param primaryTable primary table.
* @param indexTableRecord specifies the index parameters. * @param indexTableRecord specifies the index parameters.
* @throws IOException thrown if an IO error occurs * @throws IOException thrown if an IO error occurs
*/ */
FieldIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException { FieldIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
super(primaryTable, indexTableRecord); super(primaryTable, indexTableRecord);
@ -167,7 +169,7 @@ public class FieldIndexTable extends IndexTable {
} }
/** /**
* Construct an index field iterator. * Construct an index field iterator.
* @param minValue minimum index value or null if no minimum * @param minValue minimum index value or null if no minimum
* @param maxValue maximum index value or null if no maximum * @param maxValue maximum index value or null if no maximum
* @param before if true initial position is before minValue, else position * @param before if true initial position is before minValue, else position
@ -213,7 +215,7 @@ public class FieldIndexTable extends IndexTable {
} }
/** /**
* Construct an index field iterator. The iterator is positioned at index * Construct an index field iterator. The iterator is positioned at index
* value identified by startValue. * value identified by startValue.
* @param minValue minimum index value or null if no minimum * @param minValue minimum index value or null if no minimum
* @param maxValue maximum index value or null if no maximum * @param maxValue maximum index value or null if no maximum
@ -442,7 +444,7 @@ public class FieldIndexTable extends IndexTable {
} }
/** /**
* Construct a key iterator. The iterator is positioned immediately before * Construct a key iterator. The iterator is positioned immediately before
* the key associated with the first occurance of the startValue. * the key associated with the first occurance of the startValue.
* @param startValue indexed field value. * @param startValue indexed field value.
* @param after if true the iterator is positioned immediately after * @param after if true the iterator is positioned immediately after
@ -456,13 +458,13 @@ public class FieldIndexTable extends IndexTable {
} }
/** /**
* Construct a key iterator. The iterator is positioned immediately before * Construct a key iterator. The iterator is positioned immediately before
* or after the key associated with the specified startValue/primaryKey. * or after the key associated with the specified startValue/primaryKey.
* @param minValue minimum index value or null if no minimum * @param minValue minimum index value or null if no minimum
* @param maxValue maximum index value or null if no maximum * @param maxValue maximum index value or null if no maximum
* @param startValue starting index value. * @param startValue starting index value.
* @param primaryKey starting primary key value (ignored if startValue is null). * @param primaryKey starting primary key value (ignored if startValue is null).
* @param after if true iterator is positioned immediately after * @param after if true iterator is positioned immediately after
* the startValue/primaryKey, * the startValue/primaryKey,
* otherwise immediately before. * otherwise immediately before.
* @throws IOException thrown if IO error occurs * @throws IOException thrown if IO error occurs
@ -499,11 +501,11 @@ public class FieldIndexTable extends IndexTable {
} }
/** /**
* If min or max index values was truncated, a comparison of the actual * If min or max index values was truncated, a comparison of the actual
* indexed field value (i.e., primary table value) is done with the min and/or max values. * indexed field value (i.e., primary table value) is done with the min and/or max values.
* @param f index field from index table iterator * @param f index field from index table iterator
* @return true if field value corresponding to f is outside the min/max range. * @return true if field value corresponding to f is outside the min/max range.
* It is assumed that the underlying table iterator will not return index values * It is assumed that the underlying table iterator will not return index values
* out of range which do not have the same truncated index value. * out of range which do not have the same truncated index value.
* @throws IOException thrown if IO error occurs * @throws IOException thrown if IO error occurs
*/ */

View file

@ -26,7 +26,7 @@ import ghidra.util.task.TaskMonitor;
* <code>LongKeyRecordNode</code> is an abstract implementation of a BTree leaf node * <code>LongKeyRecordNode</code> is an abstract implementation of a BTree leaf node
* which utilizes long key values and stores records. * which utilizes long key values and stores records.
* <p> * <p>
* This type of node has the following partial layout within a single DataBuffer * This type of node has the following partial layout within a single DataBuffer
* (field size in bytes): * (field size in bytes):
* <pre> * <pre>
* | NodeType(1) | KeyCount(4) | PrevLeafId(4) | NextLeafId(4) | ... * | NodeType(1) | KeyCount(4) | PrevLeafId(4) | NextLeafId(4) | ...
@ -412,7 +412,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
/** /**
* Inserts the record at the given index if there is sufficient space in * Inserts the record at the given index if there is sufficient space in
* the buffer. * the buffer.
* @param index insertion index * @param index insertion index
* @param record record to be inserted * @param record record to be inserted
* @return true if the record was successfully inserted. * @return true if the record was successfully inserted.
@ -421,7 +421,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
abstract boolean insertRecord(int index, DBRecord record) throws IOException; abstract boolean insertRecord(int index, DBRecord record) throws IOException;
/** /**
* Updates the record at the given index. * Updates the record at the given index.
* @param index record index * @param index record index
* @param record new record * @param record new record
* @return root node which may have changed. * @return root node which may have changed.
@ -448,7 +448,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
abstract DBRecord getRecord(Schema schema, int index) throws IOException; abstract DBRecord getRecord(Schema schema, int index) throws IOException;
/** /**
* Get the first record whoose key is less than the specified key. * Get the first record whose key is less than the specified key.
* @param key record key * @param key record key
* @param schema record data schema * @param schema record data schema
* @return Record requested or null if record not found. * @return Record requested or null if record not found.
@ -470,7 +470,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
} }
/** /**
* Get the first record whoose key is greater than the specified key. * Get the first record whose key is greater than the specified key.
* @param key record key * @param key record key
* @param schema record data schema * @param schema record data schema
* @return Record requested or null if record not found. * @return Record requested or null if record not found.
@ -492,7 +492,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
} }
/** /**
* Get the first record whoose key is less than or equal to the specified * Get the first record whose key is less than or equal to the specified
* key. * key.
* @param key record key * @param key record key
* @param schema record data schema * @param schema record data schema
@ -512,7 +512,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
} }
/** /**
* Get the first record whoose key is greater than or equal to the specified * Get the first record whose key is greater than or equal to the specified
* key. * key.
* @param key record key * @param key record key
* @param schema record data schema * @param schema record data schema

View file

@ -1,6 +1,5 @@
/* ### /* ###
* IP: GHIDRA * IP: GHIDRA
* REVIEWED: YES
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -16,14 +15,14 @@
*/ */
package db.buffers; package db.buffers;
import ghidra.util.datastruct.IntIntHashtable;
import ghidra.util.exception.AssertException;
import ghidra.util.exception.NoValueException;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
import ghidra.util.datastruct.IntIntHashtable;
import ghidra.util.exception.AssertException;
import ghidra.util.exception.NoValueException;
/** /**
* <code>VersionFile</code> records buffer changes and parameters necessary to reconstruct an * <code>VersionFile</code> records buffer changes and parameters necessary to reconstruct an
* older version of a LocalBufferFile. * older version of a LocalBufferFile.
@ -31,7 +30,7 @@ import java.util.*;
class RecoveryFile { class RecoveryFile {
private static final int MAGIC_NUMBER = 0x38DE7654; private static final int MAGIC_NUMBER = 0x38DE7654;
private static final int VALID = 1; private static final int VALID = 1;
private static final int INVALID = 0; private static final int INVALID = 0;
@ -44,20 +43,21 @@ class RecoveryFile {
private static final String TIMESTAMP_HI_PARM = RECOVERY_PARM_PREFIX + "TimestampHi"; private static final String TIMESTAMP_HI_PARM = RECOVERY_PARM_PREFIX + "TimestampHi";
private static final String TIMESTAMP_LOW_PARM = RECOVERY_PARM_PREFIX + "TimestampLow"; private static final String TIMESTAMP_LOW_PARM = RECOVERY_PARM_PREFIX + "TimestampLow";
private static final String MAP_BUFFER_INDEX_PARM = RECOVERY_PARM_PREFIX + "MapIndex"; private static final String MAP_BUFFER_INDEX_PARM = RECOVERY_PARM_PREFIX + "MapIndex";
private static final String FREE_LIST_BUFFER_INDEX_PARM = RECOVERY_PARM_PREFIX + "FreeListIndex"; private static final String FREE_LIST_BUFFER_INDEX_PARM =
RECOVERY_PARM_PREFIX + "FreeListIndex";
private static final String FREE_LIST_SIZE_PARM = RECOVERY_PARM_PREFIX + "FreeListSize"; private static final String FREE_LIST_SIZE_PARM = RECOVERY_PARM_PREFIX + "FreeListSize";
private static final String INDEX_COUNT_PARM = RECOVERY_PARM_PREFIX + "BufferCount"; private static final String INDEX_COUNT_PARM = RECOVERY_PARM_PREFIX + "BufferCount";
// Exception messages // Exception messages
private static final String BAD_FREE_LIST = "Recovery file is corrupt - bad free list"; private static final String BAD_FREE_LIST = "Recovery file is corrupt - bad free list";
private static final String BAD_BUFFER_MAP = "Recovery file is corrupt - bad buffer map"; private static final String BAD_BUFFER_MAP = "Recovery file is corrupt - bad buffer map";
// Used by both the Buffer Map and Free Index List // Used by both the Buffer Map and Free Index List
private static final int NEXT_BUFFER_INDEX_OFFSET = 0; private static final int NEXT_BUFFER_INDEX_OFFSET = 0;
private static final int FIRST_ENTRY_OFFSET = 4; private static final int FIRST_ENTRY_OFFSET = 4;
private boolean readOnly; private boolean readOnly;
private boolean valid = false; private boolean valid = false;
private long timestamp; private long timestamp;
private boolean modified = false; private boolean modified = false;
@ -67,75 +67,77 @@ class RecoveryFile {
private IndexProvider vfIndexProvider; private IndexProvider vfIndexProvider;
private int freeListIndex = -1; private int freeListIndex = -1;
private int mapIndex = -1; private int mapIndex = -1;
private int[] freeIndexes; // sorted to facilitate binary search private int[] freeIndexes; // sorted to facilitate binary search
// maps buffer IDs to version file buffer indexes // maps buffer IDs to version file buffer indexes
private IntIntHashtable bufferIndexMap = new IntIntHashtable(); private IntIntHashtable bufferIndexMap = new IntIntHashtable();
/** /**
* Construct a new recovery file for update/output. * Construct a new recovery file for update/output.
* @param srcBf the original source buffer file to which this file applies. * @param srcBf the original source buffer file to which this file applies.
* @param rfile version buffer file to be updated/created * @param rfile version buffer file to be updated/created
* @throws IOException if vfile already exists or an IO error occurs * @param create true to create the file
* @throws IOException if the file already exists or an IO error occurs
*/ */
RecoveryFile(LocalBufferFile srcBf, File rfile, boolean create) throws IOException { RecoveryFile(LocalBufferFile srcBf, File rfile, boolean create) throws IOException {
readOnly = false; readOnly = false;
if (create) { if (create) {
indexCnt = srcBf.getIndexCount(); indexCnt = srcBf.getIndexCount();
recoveryFile = new LocalBufferFile(rfile, srcBf.getBufferSize()); recoveryFile = new LocalBufferFile(rfile, srcBf.getBufferSize());
// Save magic number for version file // Save magic number for version file
recoveryFile.setParameter(MAGIC_NUMBER_PARM, MAGIC_NUMBER); recoveryFile.setParameter(MAGIC_NUMBER_PARM, MAGIC_NUMBER);
// Mark as invalid // Mark as invalid
recoveryFile.setParameter(IS_VALID_PARM, INVALID); recoveryFile.setParameter(IS_VALID_PARM, INVALID);
// Save original and source file ID as user paramater values // Save original and source file ID as user parameter values
srcFileId = srcBf.getFileId(); srcFileId = srcBf.getFileId();
recoveryFile.setParameter(SRC_FILE_ID_HI_PARM, (int)(srcFileId >>> 32)); recoveryFile.setParameter(SRC_FILE_ID_HI_PARM, (int) (srcFileId >>> 32));
recoveryFile.setParameter(SRC_FILE_ID_LOW_PARM, (int)(srcFileId & 0xffffffffL)); recoveryFile.setParameter(SRC_FILE_ID_LOW_PARM, (int) (srcFileId & 0xffffffffL));
vfIndexProvider = new IndexProvider(); vfIndexProvider = new IndexProvider();
modified = true; modified = true;
} }
else { else {
recoveryFile = new LocalBufferFile(rfile, false); recoveryFile = new LocalBufferFile(rfile, false);
valid = (recoveryFile.getParameter(IS_VALID_PARM) == VALID); valid = (recoveryFile.getParameter(IS_VALID_PARM) == VALID);
if (!valid) { if (!valid) {
throw new IOException("Can not update invalid recovery file"); throw new IOException("Can not update invalid recovery file");
} }
parseFile(); parseFile();
if (srcFileId != srcBf.getFileId()) { if (srcFileId != srcBf.getFileId()) {
throw new IOException("Recovery file not associated with source file"); throw new IOException("Recovery file not associated with source file");
} }
vfIndexProvider = new IndexProvider(recoveryFile.getIndexCount(), recoveryFile.getFreeIndexes()); vfIndexProvider =
new IndexProvider(recoveryFile.getIndexCount(), recoveryFile.getFreeIndexes());
} }
} }
/** /**
* Construct a read-only recovery file * Construct a read-only recovery file
* @param srcBf the original source buffer file to which this file applies. * @param srcBf the original source buffer file to which this file applies.
* @param rfile version buffer file to be updated/created * @param rfile version buffer file to be updated/created
* @throws IOException * @throws IOException if the file already exists or an IO error occurs
* @throws IOException if vfile already exists or an IO error occurs
*/ */
RecoveryFile(LocalBufferFile srcBf, File rfile) throws IOException { RecoveryFile(LocalBufferFile srcBf, File rfile) throws IOException {
recoveryFile = new LocalBufferFile(rfile, true); recoveryFile = new LocalBufferFile(rfile, true);
readOnly = true; readOnly = true;
parseFile(); parseFile();
valid = (recoveryFile.getParameter(IS_VALID_PARM) == VALID && srcFileId == srcBf.getFileId()); valid =
(recoveryFile.getParameter(IS_VALID_PARM) == VALID && srcFileId == srcBf.getFileId());
} }
private void setModified() { private void setModified() {
if (valid) { if (valid) {
recoveryFile.setParameter(IS_VALID_PARM, INVALID); recoveryFile.setParameter(IS_VALID_PARM, INVALID);
@ -143,77 +145,77 @@ class RecoveryFile {
modified = true; modified = true;
} }
} }
File getFile() { File getFile() {
return recoveryFile.getFile(); return recoveryFile.getFile();
} }
boolean isValid() { boolean isValid() {
return valid; return valid;
} }
long getTimestamp() { long getTimestamp() {
return timestamp; return timestamp;
} }
/**
* Close the version file.
*/
void close() throws IOException { void close() throws IOException {
if (recoveryFile == null) if (recoveryFile == null) {
return; return;
}
if (!readOnly && modified && !recoveryFile.isReadOnly()) { if (!readOnly && modified && !recoveryFile.isReadOnly()) {
saveBufferMap(); saveBufferMap();
saveFreeIndexList(); saveFreeIndexList();
recoveryFile.setParameter(INDEX_COUNT_PARM, indexCnt); recoveryFile.setParameter(INDEX_COUNT_PARM, indexCnt);
recoveryFile.setFreeIndexes(vfIndexProvider.getFreeIndexes()); recoveryFile.setFreeIndexes(vfIndexProvider.getFreeIndexes());
long t = (new Date()).getTime(); long t = (new Date()).getTime();
recoveryFile.setParameter(TIMESTAMP_HI_PARM, (int)(t >>> 32)); recoveryFile.setParameter(TIMESTAMP_HI_PARM, (int) (t >>> 32));
recoveryFile.setParameter(TIMESTAMP_LOW_PARM, (int)(t & 0xffffffffL)); recoveryFile.setParameter(TIMESTAMP_LOW_PARM, (int) (t & 0xffffffffL));
recoveryFile.setParameter(IS_VALID_PARM, VALID); // mark as valid recoveryFile.setParameter(IS_VALID_PARM, VALID); // mark as valid
} }
recoveryFile.close(); recoveryFile.close();
recoveryFile = null; recoveryFile = null;
} }
private void parseFile() throws IOException { private void parseFile() throws IOException {
try { try {
if (MAGIC_NUMBER != recoveryFile.getParameter(MAGIC_NUMBER_PARM)) { if (MAGIC_NUMBER != recoveryFile.getParameter(MAGIC_NUMBER_PARM)) {
throw new IOException("Invalid recovery file"); throw new IOException("Invalid recovery file");
} }
try { try {
timestamp = ((long)recoveryFile.getParameter(TIMESTAMP_HI_PARM) << 32) | timestamp = ((long) recoveryFile.getParameter(TIMESTAMP_HI_PARM) << 32) |
(recoveryFile.getParameter(TIMESTAMP_LOW_PARM) & 0xffffffffL); (recoveryFile.getParameter(TIMESTAMP_LOW_PARM) & 0xffffffffL);
} catch (NoSuchElementException e) { }
catch (NoSuchElementException e) {
// Not as good - better than nothing // Not as good - better than nothing
timestamp = recoveryFile.getFile().lastModified(); timestamp = recoveryFile.getFile().lastModified();
} }
srcFileId = ((long)recoveryFile.getParameter(SRC_FILE_ID_HI_PARM) << 32) | srcFileId = ((long) recoveryFile.getParameter(SRC_FILE_ID_HI_PARM) << 32) |
(recoveryFile.getParameter(SRC_FILE_ID_LOW_PARM) & 0xffffffffL); (recoveryFile.getParameter(SRC_FILE_ID_LOW_PARM) & 0xffffffffL);
indexCnt = recoveryFile.getParameter(INDEX_COUNT_PARM); indexCnt = recoveryFile.getParameter(INDEX_COUNT_PARM);
readBufferMap(); readBufferMap();
readFreeIndexList(); readFreeIndexList();
} catch (NoSuchElementException e) { }
catch (NoSuchElementException e) {
throw new IOException("Corrupt recovery file"); throw new IOException("Corrupt recovery file");
} }
} }
private void saveBufferMap() throws IOException { private void saveBufferMap() throws IOException {
DataBuffer buf = new DataBuffer(recoveryFile.getBufferSize()); DataBuffer buf = new DataBuffer(recoveryFile.getBufferSize());
if (mapIndex < 0) { if (mapIndex < 0) {
mapIndex = vfIndexProvider.allocateIndex(); mapIndex = vfIndexProvider.allocateIndex();
buf.setId(mapIndex); buf.setId(mapIndex);
@ -226,24 +228,24 @@ class RecoveryFile {
int maxOffset = (recoveryFile.getBufferSize() - 8) & ~0x07; int maxOffset = (recoveryFile.getBufferSize() - 8) & ~0x07;
int offset = FIRST_ENTRY_OFFSET; int offset = FIRST_ENTRY_OFFSET;
// Save new map entries // Save new map entries
int thisIndex = mapIndex; int thisIndex = mapIndex;
int[] realIndexes = bufferIndexMap.getKeys(); int[] realIndexes = bufferIndexMap.getKeys();
for (int i = 0; i <= realIndexes.length; i++) { for (int i = 0; i <= realIndexes.length; i++) {
if (offset > maxOffset) { if (offset > maxOffset) {
boolean newBuf = false; boolean newBuf = false;
int nextIndex = buf.getInt(NEXT_BUFFER_INDEX_OFFSET); int nextIndex = buf.getInt(NEXT_BUFFER_INDEX_OFFSET);
if (nextIndex < 0) { if (nextIndex < 0) {
nextIndex = vfIndexProvider.allocateIndex(); nextIndex = vfIndexProvider.allocateIndex();
newBuf = true; newBuf = true;
} }
buf.putInt(NEXT_BUFFER_INDEX_OFFSET, nextIndex); buf.putInt(NEXT_BUFFER_INDEX_OFFSET, nextIndex);
recoveryFile.put(buf, thisIndex); recoveryFile.put(buf, thisIndex);
thisIndex = nextIndex; thisIndex = nextIndex;
if (newBuf) { if (newBuf) {
buf.setId(thisIndex); buf.setId(thisIndex);
@ -255,7 +257,7 @@ class RecoveryFile {
offset = FIRST_ENTRY_OFFSET; offset = FIRST_ENTRY_OFFSET;
} }
// Save map entry as single integer // Save map entry as single integer
if (i == realIndexes.length) { if (i == realIndexes.length) {
buf.putInt(offset, -1); buf.putInt(offset, -1);
@ -264,42 +266,43 @@ class RecoveryFile {
try { try {
offset = buf.putInt(offset, realIndexes[i]); offset = buf.putInt(offset, realIndexes[i]);
offset = buf.putInt(offset, bufferIndexMap.get(realIndexes[i])); offset = buf.putInt(offset, bufferIndexMap.get(realIndexes[i]));
} catch (NoValueException e) { }
catch (NoValueException e) {
throw new AssertException(); throw new AssertException();
} }
} }
} }
// Make sure last buffer is saved // Make sure last buffer is saved
recoveryFile.put(buf, thisIndex); recoveryFile.put(buf, thisIndex);
} }
private void readBufferMap() throws NoSuchElementException, IOException { private void readBufferMap() throws NoSuchElementException, IOException {
mapIndex = recoveryFile.getParameter(MAP_BUFFER_INDEX_PARM); mapIndex = recoveryFile.getParameter(MAP_BUFFER_INDEX_PARM);
int maxOffset = (recoveryFile.getBufferSize() - 8) & ~0x07; int maxOffset = (recoveryFile.getBufferSize() - 8) & ~0x07;
int thisIndex = mapIndex; int thisIndex = mapIndex;
DataBuffer mapBuffer = new DataBuffer(); DataBuffer mapBuffer = new DataBuffer();
recoveryFile.get(mapBuffer, thisIndex); recoveryFile.get(mapBuffer, thisIndex);
if (mapBuffer.isEmpty()) { if (mapBuffer.isEmpty()) {
throw new IOException(BAD_BUFFER_MAP); throw new IOException(BAD_BUFFER_MAP);
} }
int nextMapEntryOffset = FIRST_ENTRY_OFFSET; int nextMapEntryOffset = FIRST_ENTRY_OFFSET;
while (true) { while (true) {
if (nextMapEntryOffset > maxOffset) { if (nextMapEntryOffset > maxOffset) {
// Get next map buffer // Get next map buffer
thisIndex = mapBuffer.getInt(NEXT_BUFFER_INDEX_OFFSET); thisIndex = mapBuffer.getInt(NEXT_BUFFER_INDEX_OFFSET);
recoveryFile.get(mapBuffer, thisIndex); recoveryFile.get(mapBuffer, thisIndex);
if (mapBuffer.isEmpty()) { if (mapBuffer.isEmpty()) {
throw new IOException(BAD_BUFFER_MAP); throw new IOException(BAD_BUFFER_MAP);
} }
nextMapEntryOffset = FIRST_ENTRY_OFFSET; nextMapEntryOffset = FIRST_ENTRY_OFFSET;
} }
// Read map entry - end of list signified by -1 // Read map entry - end of list signified by -1
int realIndex = mapBuffer.getInt(nextMapEntryOffset); int realIndex = mapBuffer.getInt(nextMapEntryOffset);
if (realIndex < 0) { if (realIndex < 0) {
@ -311,9 +314,9 @@ class RecoveryFile {
bufferIndexMap.put(realIndex, recoveryIndex); bufferIndexMap.put(realIndex, recoveryIndex);
} }
} }
private void saveFreeIndexList() throws IOException { private void saveFreeIndexList() throws IOException {
DataBuffer buf = new DataBuffer(recoveryFile.getBufferSize()); DataBuffer buf = new DataBuffer(recoveryFile.getBufferSize());
if (freeListIndex < 0) { if (freeListIndex < 0) {
freeListIndex = vfIndexProvider.allocateIndex(); freeListIndex = vfIndexProvider.allocateIndex();
@ -328,23 +331,23 @@ class RecoveryFile {
int maxOffset = (recoveryFile.getBufferSize() - 4) & ~0x03; int maxOffset = (recoveryFile.getBufferSize() - 4) & ~0x03;
int offset = FIRST_ENTRY_OFFSET; int offset = FIRST_ENTRY_OFFSET;
// Save freeIndexes entries // Save freeIndexes entries
int thisIndex = freeListIndex; int thisIndex = freeListIndex;
for (int i = 0; i <= freeIndexes.length; i++) { for (int i = 0; i <= freeIndexes.length; i++) {
if (offset > maxOffset) { if (offset > maxOffset) {
boolean newBuf = false; boolean newBuf = false;
int nextIndex = buf.getInt(NEXT_BUFFER_INDEX_OFFSET); int nextIndex = buf.getInt(NEXT_BUFFER_INDEX_OFFSET);
if (nextIndex < 0) { if (nextIndex < 0) {
nextIndex = vfIndexProvider.allocateIndex(); nextIndex = vfIndexProvider.allocateIndex();
newBuf = true; newBuf = true;
} }
buf.putInt(NEXT_BUFFER_INDEX_OFFSET, nextIndex); buf.putInt(NEXT_BUFFER_INDEX_OFFSET, nextIndex);
recoveryFile.put(buf, thisIndex); recoveryFile.put(buf, thisIndex);
thisIndex = nextIndex; thisIndex = nextIndex;
if (newBuf) { if (newBuf) {
buf.setId(thisIndex); buf.setId(thisIndex);
@ -356,45 +359,45 @@ class RecoveryFile {
offset = FIRST_ENTRY_OFFSET; offset = FIRST_ENTRY_OFFSET;
} }
// Save list entry as single integer // Save list entry as single integer
int val = (i == freeIndexes.length ? -1 : freeIndexes[i]); int val = (i == freeIndexes.length ? -1 : freeIndexes[i]);
offset = buf.putInt(offset, val); offset = buf.putInt(offset, val);
} }
// Make sure last buffer is saved // Make sure last buffer is saved
recoveryFile.put(buf, thisIndex); recoveryFile.put(buf, thisIndex);
} }
private void readFreeIndexList() throws NoSuchElementException, IOException { private void readFreeIndexList() throws NoSuchElementException, IOException {
freeListIndex = recoveryFile.getParameter(FREE_LIST_BUFFER_INDEX_PARM); freeListIndex = recoveryFile.getParameter(FREE_LIST_BUFFER_INDEX_PARM);
int size = recoveryFile.getParameter(FREE_LIST_SIZE_PARM); int size = recoveryFile.getParameter(FREE_LIST_SIZE_PARM);
freeIndexes = new int[size]; freeIndexes = new int[size];
int maxOffset = (recoveryFile.getBufferSize() - 4) & ~0x03; int maxOffset = (recoveryFile.getBufferSize() - 4) & ~0x03;
int thisIndex = freeListIndex; int thisIndex = freeListIndex;
DataBuffer listBuffer = new DataBuffer(); DataBuffer listBuffer = new DataBuffer();
recoveryFile.get(listBuffer, thisIndex); recoveryFile.get(listBuffer, thisIndex);
if (listBuffer.isEmpty()) { if (listBuffer.isEmpty()) {
throw new IOException(BAD_FREE_LIST); throw new IOException(BAD_FREE_LIST);
} }
int offset = FIRST_ENTRY_OFFSET; int offset = FIRST_ENTRY_OFFSET;
int entryIx = 0; int entryIx = 0;
while (true) { while (true) {
if (offset > maxOffset) { if (offset > maxOffset) {
// Get next list buffer // Get next list buffer
thisIndex = listBuffer.getInt(NEXT_BUFFER_INDEX_OFFSET); thisIndex = listBuffer.getInt(NEXT_BUFFER_INDEX_OFFSET);
recoveryFile.get(listBuffer, thisIndex); recoveryFile.get(listBuffer, thisIndex);
if (listBuffer.isEmpty()) { if (listBuffer.isEmpty()) {
throw new IOException(BAD_FREE_LIST); throw new IOException(BAD_FREE_LIST);
} }
offset = FIRST_ENTRY_OFFSET; offset = FIRST_ENTRY_OFFSET;
} }
// Read entry - end of list signified by -1 // Read entry - end of list signified by -1
int origIndex = listBuffer.getInt(offset); int origIndex = listBuffer.getInt(offset);
if (origIndex < 0) { if (origIndex < 0) {
@ -411,21 +414,22 @@ class RecoveryFile {
} }
Arrays.sort(freeIndexes); Arrays.sort(freeIndexes);
} }
/** /**
* Set the current index count for the file * Set the current index count for the file
* @param newIndexCnt * @param newIndexCount the count
*/ */
void setIndexCount(int newIndexCnt) { void setIndexCount(int newIndexCount) {
setModified(); setModified();
for (int index = indexCnt; index < newIndexCnt; index++) { for (int index = indexCnt; index < newIndexCount; index++) {
removeBuffer(index); removeBuffer(index);
} }
indexCnt = newIndexCnt; indexCnt = newIndexCount;
} }
/** /**
* Returns the index count for the file * Returns the index count for the file
* @return the count
*/ */
int getIndexCount() { int getIndexCount() {
return indexCnt; return indexCnt;
@ -433,29 +437,28 @@ class RecoveryFile {
/** /**
* Set the free index list * Set the free index list
* @param freeIndexes * @param freeIndexes the indexes
*/ */
void setFreeIndexList(int[] freeIndexes) { void setFreeIndexList(int[] freeIndexes) {
setModified(); setModified();
this.freeIndexes = freeIndexes.clone(); this.freeIndexes = freeIndexes.clone();
Arrays.sort(this.freeIndexes); Arrays.sort(this.freeIndexes);
for (int i = 0; i < freeIndexes.length; i++) { for (int index : freeIndexes) {
removeBuffer(freeIndexes[i]); removeBuffer(index);
} }
} }
/** /**
* Returns the list of free indexes associated with the original * Returns the list of free indexes associated with the original buffer file.
* buffer file. * @return the indexes
*/ */
int[] getFreeIndexList() { int[] getFreeIndexList() {
return freeIndexes; return freeIndexes;
} }
/** /**
* Store buffer which has been modified in the target. * Store buffer which has been modified in the target.
* @param buf modified buffer * @param buf modified buffer
* @param id buffer ID
* @throws IOException if an IO error occurs * @throws IOException if an IO error occurs
*/ */
void putBuffer(DataBuffer buf) throws IOException { void putBuffer(DataBuffer buf) throws IOException {
@ -463,24 +466,25 @@ class RecoveryFile {
throw new IOException("Version file is closed"); throw new IOException("Version file is closed");
} }
if (readOnly) { if (readOnly) {
throw new IOException("Version file is read-only"); throw new IOException("Version file is read-only");
} }
setModified(); setModified();
int vfIndex; int vfIndex;
int id = buf.getId(); int id = buf.getId();
try { try {
vfIndex = bufferIndexMap.get(id); vfIndex = bufferIndexMap.get(id);
} catch (NoValueException e) { }
catch (NoValueException e) {
vfIndex = vfIndexProvider.allocateIndex(); vfIndex = vfIndexProvider.allocateIndex();
bufferIndexMap.put(id, vfIndex); bufferIndexMap.put(id, vfIndex);
} }
recoveryFile.put(buf, vfIndex); recoveryFile.put(buf, vfIndex);
} }
/** /**
* Remove a buffer previously stored to the snapshot * Remove a buffer previously stored to the snapshot
* by removing it from the map. It is OK to invoke * by removing it from the map. It is OK to invoke
* this method for an index whoose buffer was never * this method for an index whose buffer was never
* put into this file. * put into this file.
* @param id buffer ID * @param id buffer ID
*/ */
@ -489,12 +493,14 @@ class RecoveryFile {
try { try {
int vfIndex = bufferIndexMap.remove(id); int vfIndex = bufferIndexMap.remove(id);
vfIndexProvider.freeIndex(vfIndex); vfIndexProvider.freeIndex(vfIndex);
} catch (NoValueException e) { }
catch (NoValueException e) {
// ignore?
} }
} }
/** /**
* Get modified buffer associated with the specified storage index in the * Get modified buffer associated with the specified storage index in the
* original file. * original file.
* @param buf data buffer * @param buf data buffer
* @param id buffer ID * @param id buffer ID
@ -508,16 +514,18 @@ class RecoveryFile {
int vfIndex; int vfIndex;
try { try {
vfIndex = bufferIndexMap.get(id); vfIndex = bufferIndexMap.get(id);
} catch (NoValueException e) { }
catch (NoValueException e) {
return null; return null;
} }
recoveryFile.get(buf, vfIndex); recoveryFile.get(buf, vfIndex);
return buf; return buf;
} }
/** /**
* Returns list of buffer indexes stored within this file. * Returns list of buffer indexes stored within this file.
* These indexes reflect those buffers which have been modified and stored. * These indexes reflect those buffers which have been modified and stored.
* @return the indexes
*/ */
int[] getBufferIndexes() { int[] getBufferIndexes() {
return bufferIndexMap.getKeys(); return bufferIndexMap.getKeys();
@ -525,36 +533,38 @@ class RecoveryFile {
/** /**
* Returns file ID for original source buffer file which may be produced with this version file. * Returns file ID for original source buffer file which may be produced with this version file.
* @return the id
*/ */
long getSourceFileID() { long getSourceFileID() {
return srcFileId; return srcFileId;
} }
/** /**
* Returns a list of parameters defined within the original beffer file. * Returns a list of parameters defined within the original buffer file.
* @throws IOException * @return the names
* @throws IOException if the recovery file is null
*/ */
String[] getUserParameterNames() throws IOException { String[] getUserParameterNames() throws IOException {
if (recoveryFile == null) { if (recoveryFile == null) {
throw new IOException("Version file is closed"); throw new IOException("Version file is closed");
} }
String[] allNames = recoveryFile.getParameterNames(); String[] allNames = recoveryFile.getParameterNames();
ArrayList<String> list = new ArrayList<String>(); ArrayList<String> list = new ArrayList<>();
for (int i = 0; i < allNames.length; i++) { for (String name : allNames) {
if (!allNames[i].startsWith(RECOVERY_PARM_PREFIX)) { if (!name.startsWith(RECOVERY_PARM_PREFIX)) {
list.add(allNames[i]); list.add(name);
} }
} }
String[] names = new String[list.size()]; String[] names = new String[list.size()];
list.toArray(names); list.toArray(names);
return names; return names;
} }
/** /**
* Get a parameter value associated with the original buffer file. * Get a parameter value associated with the original buffer file.
* @param name parameter name * @param name parameter name
* @return parameter value * @return parameter value
* @throws IOException * @throws IOException if the recovery file is null
*/ */
int getParameter(String name) throws IOException { int getParameter(String name) throws IOException {
if (recoveryFile == null) { if (recoveryFile == null) {
@ -562,42 +572,42 @@ class RecoveryFile {
} }
return recoveryFile.getParameter(name); return recoveryFile.getParameter(name);
} }
/** /**
* Clear all user parameters * Clear all user parameters
*/ */
void clearParameters() { void clearParameters() {
setModified(); setModified();
// Remember recovery parameters // Remember recovery parameters
String[] allNames = recoveryFile.getParameterNames(); String[] allNames = recoveryFile.getParameterNames();
Hashtable<String,Integer> recoveryProps = new Hashtable<String,Integer>(); Hashtable<String, Integer> recoveryProps = new Hashtable<>();
for (int i = 0; i < allNames.length; i++) { for (String name : allNames) {
if (allNames[i].startsWith(RECOVERY_PARM_PREFIX)) { if (name.startsWith(RECOVERY_PARM_PREFIX)) {
recoveryProps.put(allNames[i], new Integer(recoveryFile.getParameter(allNames[i]))); recoveryProps.put(name, recoveryFile.getParameter(name));
} }
} }
// Clear all parameters // Clear all parameters
recoveryFile.clearParameters(); recoveryFile.clearParameters();
// Restore recovery parameters // Restore recovery parameters
Iterator<String> iter = recoveryProps.keySet().iterator(); Iterator<String> iter = recoveryProps.keySet().iterator();
while (iter.hasNext()) { while (iter.hasNext()) {
String name = iter.next(); String name = iter.next();
recoveryFile.setParameter( recoveryFile.setParameter(
name, recoveryProps.get(name).intValue()); name, recoveryProps.get(name).intValue());
} }
} }
/** /**
* Set user parameter * Set user parameter
* @param name * @param name the name
* @param value * @param value the value
*/ */
void setParameter(String name, int value) { void setParameter(String name, int value) {
setModified(); setModified();
recoveryFile.setParameter(name, value); recoveryFile.setParameter(name, value);
} }
} }

View file

@ -287,8 +287,8 @@ public class VersionedDatabase extends Database {
/** /**
* Open a specific version of this database for non-update use. * Open a specific version of this database for non-update use.
* @param version database version or LATEST_VERSION for current version * @param version database version or LATEST_VERSION for current version
* @param minChangeDataVer the minimum database version whoose change data * @param minChangeDataVer the minimum database version whose change data
* should be associated with the returned buffer file. A value of -1 indicates that * should be associated with the returned buffer file. A value of -1 indicates that
* change data is not required. * change data is not required.
* @return buffer file for non-update use. * @return buffer file for non-update use.
* @throws IOException * @throws IOException
@ -455,7 +455,7 @@ public class VersionedDatabase extends Database {
} }
/** /**
* <code>VerDBBufferFileManager</code> provides buffer file management * <code>VerDBBufferFileManager</code> provides buffer file management
* for this versioned database instead of the DBBufferFileManager. * for this versioned database instead of the DBBufferFileManager.
*/ */
private class VerDBBufferFileManager implements BufferFileManager { private class VerDBBufferFileManager implements BufferFileManager {
@ -484,7 +484,7 @@ public class VersionedDatabase extends Database {
return new File(dbDir, return new File(dbDir,
CHANGE_FILE_PREFIX + version + LocalBufferFile.BUFFER_FILE_EXTENSION); CHANGE_FILE_PREFIX + version + LocalBufferFile.BUFFER_FILE_EXTENSION);
} }
@Override @Override
public File getChangeMapFile() { public File getChangeMapFile() {
return null; return null;

View file

@ -68,8 +68,8 @@ abstract class CompositeDBAdapter {
} }
/** /**
* Gets an adapter for working with the composite data type database table. * Gets an adapter for working with the composite data type database table.
* The composite table is used to store structures and unions. The adapter is based * The composite table is used to store structures and unions. The adapter is based
* on the version of the database associated with the specified database handle and the openMode. * on the version of the database associated with the specified database handle and the openMode.
* @param handle handle to the database to be accessed. * @param handle handle to the database to be accessed.
* @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE). * @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE).
@ -187,10 +187,10 @@ abstract class CompositeDBAdapter {
* @param sourceArchiveID the ID for the source archive where this data type originated. * @param sourceArchiveID the ID for the source archive where this data type originated.
* @param sourceDataTypeID the ID of the associated data type in the source archive. * @param sourceDataTypeID the ID of the associated data type in the source archive.
* @param lastChangeTime the time this data type was last changed. * @param lastChangeTime the time this data type was last changed.
* @param packValue {@link CompositeInternal#NO_PACKING}, {@link CompositeInternal#DEFAULT_PACKING} * @param packValue {@link CompositeInternal#NO_PACKING}, {@link CompositeInternal#DEFAULT_PACKING}
* or the explicit pack value currently in use by this data type (positive value). * or the explicit pack value currently in use by this data type (positive value).
* @param minAlignment {@link CompositeInternal#DEFAULT_ALIGNMENT}, {@link CompositeInternal#MACHINE_ALIGNMENT} * @param minAlignment {@link CompositeInternal#DEFAULT_ALIGNMENT}, {@link CompositeInternal#MACHINE_ALIGNMENT}
* or the minimum alignment value currently in use by this data type (positive value). * or the minimum alignment value currently in use by this data type (positive value).
* @return the database record for this data type. * @return the database record for this data type.
* @throws IOException if the database can't be accessed. * @throws IOException if the database can't be accessed.
*/ */
@ -216,7 +216,7 @@ abstract class CompositeDBAdapter {
/** /**
* Updates the composite data type table with the provided record. * Updates the composite data type table with the provided record.
* @param record the new record * @param record the new record
* @param setLastChangeTime true means change the last change time in the record to the * @param setLastChangeTime true means change the last change time in the record to the
* current time before putting the record in the database. * current time before putting the record in the database.
* @throws IOException if the database can't be accessed. * @throws IOException if the database can't be accessed.
*/ */
@ -240,7 +240,7 @@ abstract class CompositeDBAdapter {
/** /**
* Gets all the composite data types that are contained in the category that has the indicated ID. * Gets all the composite data types that are contained in the category that has the indicated ID.
* @param categoryID the category whose composite data types are wanted. * @param categoryID the category whose composite data types are wanted.
* @return an array of IDs as LongField values within Field array for the * @return an array of IDs as LongField values within Field array for the
* composite data types in the category. * composite data types in the category.
* @throws IOException if the database can't be accessed. * @throws IOException if the database can't be accessed.
*/ */
@ -256,7 +256,7 @@ abstract class CompositeDBAdapter {
abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException; abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException;
/** /**
* Get composite record whoose sourceID and datatypeID match the specified Universal IDs. * Get composite record whose sourceID and datatypeID match the specified Universal IDs.
* @param sourceID universal source archive ID * @param sourceID universal source archive ID
* @param datatypeID universal datatype ID * @param datatypeID universal datatype ID
* @return composite record found or null * @return composite record found or null

View file

@ -41,7 +41,7 @@ abstract class EnumDBAdapter {
static final int ENUM_LAST_CHANGE_TIME_COL = EnumDBAdapterV1.V1_ENUM_LAST_CHANGE_TIME_COL; static final int ENUM_LAST_CHANGE_TIME_COL = EnumDBAdapterV1.V1_ENUM_LAST_CHANGE_TIME_COL;
/** /**
* Gets an adapter for working with the enumeration data type database table. The adapter is based * Gets an adapter for working with the enumeration data type database table. The adapter is based
* on the version of the database associated with the specified database handle and the openMode. * on the version of the database associated with the specified database handle and the openMode.
* @param handle handle to the database to be accessed. * @param handle handle to the database to be accessed.
* @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE). * @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE).
@ -159,16 +159,17 @@ abstract class EnumDBAdapter {
/** /**
* Updates the enumeration data type table with the provided record. * Updates the enumeration data type table with the provided record.
* @param record the new record * @param record the new record
* @param setLastChangedTime true means change the last change time in the record to the * @param setLastChangeTime true means change the last change time in the record to the
* current time before putting the record in the database. * current time before putting the record in the database.
* @throws IOException if the database can't be accessed. * @throws IOException if the database can't be accessed.
*/ */
abstract void updateRecord(DBRecord record, boolean setLastChangeTime) throws IOException; abstract void updateRecord(DBRecord record, boolean setLastChangeTime) throws IOException;
/** /**
* Remove the record for the given enumeration ID, and remove all of its * Remove the record for the given enumeration ID, and remove all of its associated value
* associated value records. * records.
* @param enumID ID of enumerated data type to delete * @param enumID ID of enumerated data type to delete
* @return true if successful
* @throws IOException if there was a problem accessing the database * @throws IOException if there was a problem accessing the database
*/ */
abstract boolean removeRecord(long enumID) throws IOException; abstract boolean removeRecord(long enumID) throws IOException;
@ -198,7 +199,7 @@ abstract class EnumDBAdapter {
abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException; abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException;
/** /**
* Get enum record whoose sourceID and datatypeID match the specified Universal IDs. * Get enum record whose sourceID and datatypeID match the specified Universal IDs.
* @param sourceID universal source archive ID * @param sourceID universal source archive ID
* @param datatypeID universal datatype ID * @param datatypeID universal datatype ID
* @return enum record found or null * @return enum record found or null

View file

@ -56,7 +56,7 @@ abstract class FunctionDefinitionDBAdapter {
static final int GENERIC_CALLING_CONVENTION_FLAG_SHIFT = 1; static final int GENERIC_CALLING_CONVENTION_FLAG_SHIFT = 1;
/** /**
* Gets an adapter for working with the function definition data type database table. The adapter is based * Gets an adapter for working with the function definition data type database table. The adapter is based
* on the version of the database associated with the specified database handle and the openMode. * on the version of the database associated with the specified database handle and the openMode.
* @param handle handle to the database to be accessed. * @param handle handle to the database to be accessed.
* @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE). * @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE).
@ -187,7 +187,7 @@ abstract class FunctionDefinitionDBAdapter {
/** /**
* Updates the function definition data type table with the provided record. * Updates the function definition data type table with the provided record.
* @param record the new record * @param record the new record
* @param setLastChangedTime true means change the last change time in the record to the * @param setLastChangeTime true means change the last change time in the record to the
* current time before putting the record in the database. * current time before putting the record in the database.
* @throws IOException if the database can't be accessed. * @throws IOException if the database can't be accessed.
*/ */
@ -218,7 +218,7 @@ abstract class FunctionDefinitionDBAdapter {
abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException; abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException;
/** /**
* Get function definition record whoose sourceID and datatypeID match the specified Universal IDs. * Get function definition record whose sourceID and datatypeID match the specified Universal IDs.
* @param sourceID universal source archive ID * @param sourceID universal source archive ID
* @param datatypeID universal datatype ID * @param datatypeID universal datatype ID
* @return function definition record found or null * @return function definition record found or null

View file

@ -43,7 +43,7 @@ abstract class TypedefDBAdapter {
TypedefDBAdapterV1.V1_TYPEDEF_LAST_CHANGE_TIME_COL; TypedefDBAdapterV1.V1_TYPEDEF_LAST_CHANGE_TIME_COL;
/** /**
* Gets an adapter for working with the Typedef data type database table. The adapter is based * Gets an adapter for working with the Typedef data type database table. The adapter is based
* on the version of the database associated with the specified database handle and the openMode. * on the version of the database associated with the specified database handle and the openMode.
* @param handle handle to the database to be accessed. * @param handle handle to the database to be accessed.
* @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE). * @param openMode the mode this adapter is to be opened for (CREATE, UPDATE, READ_ONLY, UPGRADE).
@ -156,7 +156,7 @@ abstract class TypedefDBAdapter {
/** /**
* Updates the type definition data type table with the provided record. * Updates the type definition data type table with the provided record.
* @param record the new record * @param record the new record
* @param setLastChangedTime true means change the last change time in the record to the * @param setLastChangeTime true means change the last change time in the record to the
* current time before putting the record in the database. * current time before putting the record in the database.
* @throws IOException if the database can't be accessed. * @throws IOException if the database can't be accessed.
*/ */
@ -187,7 +187,7 @@ abstract class TypedefDBAdapter {
abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException; abstract Field[] getRecordIdsForSourceArchive(long archiveID) throws IOException;
/** /**
* Get typedef record whoose sourceID and datatypeID match the specified Universal IDs. * Get typedef record whose sourceID and datatypeID match the specified Universal IDs.
* @param sourceID universal source archive ID * @param sourceID universal source archive ID
* @param datatypeID universal datatype ID * @param datatypeID universal datatype ID
* @return typedef record found or null * @return typedef record found or null

View file

@ -1,6 +1,5 @@
/* ### /* ###
* IP: GHIDRA * IP: GHIDRA
* REVIEWED: YES
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -16,13 +15,13 @@
*/ */
package ghidra.program.database.oldfunction; package ghidra.program.database.oldfunction;
import db.DBHandle;
import ghidra.program.database.map.AddressMap; import ghidra.program.database.map.AddressMap;
import ghidra.program.database.util.SharedRangeMapDB; import ghidra.program.database.util.SharedRangeMapDB;
import ghidra.program.model.address.AddressSet; import ghidra.program.model.address.AddressSet;
import ghidra.program.model.address.AddressSetView; import ghidra.program.model.address.AddressSetView;
import ghidra.util.datastruct.IndexRange; import ghidra.util.datastruct.IndexRange;
import ghidra.util.datastruct.IndexRangeIterator; import ghidra.util.datastruct.IndexRangeIterator;
import db.DBHandle;
/** /**
* *
@ -52,7 +51,8 @@ class OldFunctionMapDB {
/** /**
* Get the address set which makes up a function. * Get the address set which makes up a function.
* @param functionKey * @param functionKey the function key
* @return the addresses
*/ */
synchronized AddressSetView getBody(long functionKey) { synchronized AddressSetView getBody(long functionKey) {
AddressSet body = new AddressSet(); AddressSet body = new AddressSet();
@ -81,7 +81,7 @@ class OldFunctionMapDB {
// } // }
// //
// /** // /**
// * Get all function keys whoose body contains the specified address. // * Get all function keys whose body contains the specified address.
// * @param addr // * @param addr
// * @return a LongField function key iterator. // * @return a LongField function key iterator.
// */ // */
@ -89,9 +89,9 @@ class OldFunctionMapDB {
// long index = addrMap.getKey(addr, false); // long index = addrMap.getKey(addr, false);
// return rangeMap.getValueIterator(index, index); // return rangeMap.getValueIterator(index, index);
// } // }
// //
// /** // /**
// * Get all function keys whoose body overlaps the specified address set. // * Get all function keys whose body overlaps the specified address set.
// * @param set // * @param set
// * @return a LongField function key iterator. // * @return a LongField function key iterator.
// */ // */

View file

@ -29,7 +29,7 @@ public interface CompositeInternal extends Composite {
static final String DEFAULT_PACKING_NAME = ""; static final String DEFAULT_PACKING_NAME = "";
/** /**
* The stored packing value which corresponds to a composite that will automatically pack * The stored packing value which corresponds to a composite that will automatically pack
* based upon the alignment requirements of its components. A positive pack value will * based upon the alignment requirements of its components. A positive pack value will
* also pack in a similar fashion but will use the pack value as a maximum alignment * also pack in a similar fashion but will use the pack value as a maximum alignment
* for each component. * for each component.
@ -38,7 +38,7 @@ public interface CompositeInternal extends Composite {
public final static int DEFAULT_PACKING = 0; public final static int DEFAULT_PACKING = 0;
/** /**
* The stored packing value which corresponds to a composite whoose packing has been disabled. * The stored packing value which corresponds to a composite whose packing has been disabled.
* In the case of structures this will permit explicit component placement by * In the case of structures this will permit explicit component placement by
* offset within the structure and undefined filler components will be used. * offset within the structure and undefined filler components will be used.
* This is the initial state of all newly instantiated structures. * This is the initial state of all newly instantiated structures.
@ -47,32 +47,32 @@ public interface CompositeInternal extends Composite {
public final static int NO_PACKING = -1; public final static int NO_PACKING = -1;
/** /**
* The stored minimum alignment value which indicates the default alignment * The stored minimum alignment value which indicates the default alignment
* should be used based upon the packing and component alignment requirements. * should be used based upon the packing and component alignment requirements.
* See {@link #getStoredMinimumAlignment}. * See {@link #getStoredMinimumAlignment}.
*/ */
public final static int DEFAULT_ALIGNMENT = 0; public final static int DEFAULT_ALIGNMENT = 0;
/** /**
* The stored minimum alignment value which indicates the machine alignment * The stored minimum alignment value which indicates the machine alignment
* should be used as the minimum alignment (as defined by the current * should be used as the minimum alignment (as defined by the current
* {@link DataOrganization#getMachineAlignment()}). * {@link DataOrganization#getMachineAlignment()}).
* See {@link #getStoredMinimumAlignment()}. * See {@link #getStoredMinimumAlignment()}.
*/ */
public final static int MACHINE_ALIGNMENT = -1; public final static int MACHINE_ALIGNMENT = -1;
/** /**
* Gets the current packing value (typically a power of 2). Other special values * Gets the current packing value (typically a power of 2). Other special values
* which may be returned include {@value #DEFAULT_PACKING} and {@value #NO_PACKING}. * which may be returned include {@value #DEFAULT_PACKING} and {@value #NO_PACKING}.
* @return the current positive packing value, {@value #DEFAULT_PACKING} or {@value #NO_PACKING}. * @return the current positive packing value, {@value #DEFAULT_PACKING} or {@value #NO_PACKING}.
*/ */
public int getStoredPackingValue(); public int getStoredPackingValue();
/** /**
* Sets the current packing behavior (positive value, usually a power of 2). If a positive * Sets the current packing behavior (positive value, usually a power of 2). If a positive
* value is specified the use of packing will be enabled if it was previously disabled * value is specified the use of packing will be enabled if it was previously disabled
* (see {@link #setPackingEnabled(boolean)}. A positive value will set the maximum * (see {@link #setPackingEnabled(boolean)}. A positive value will set the maximum
* alignment for this composite and each component within a structure * alignment for this composite and each component within a structure
* (e.g., a value of 1 will eliminate any padding). * (e.g., a value of 1 will eliminate any padding).
* <br> * <br>
* Special packing values which may be specified include: * Special packing values which may be specified include:
@ -87,9 +87,9 @@ public interface CompositeInternal extends Composite {
// public void setStoredPackingValue(int packingValue); // public void setStoredPackingValue(int packingValue);
/** /**
* Get the minimum alignment setting for this Composite which contributes * Get the minimum alignment setting for this Composite which contributes
* to the actual computed alignment value (see {@link #getAlignment()}. * to the actual computed alignment value (see {@link #getAlignment()}.
* @return the minimum alignment setting for this Composite or a reserved value to indicate * @return the minimum alignment setting for this Composite or a reserved value to indicate
* either {@link #DEFAULT_ALIGNMENT} or {@link #MACHINE_ALIGNMENT}. * either {@link #DEFAULT_ALIGNMENT} or {@link #MACHINE_ALIGNMENT}.
*/ */
public int getStoredMinimumAlignment(); public int getStoredMinimumAlignment();

View file

@ -69,14 +69,14 @@ public interface DataTypeManager {
* Returns a unique name not currently used by any other dataType or category * Returns a unique name not currently used by any other dataType or category
* with the same baseName * with the same baseName
* *
* @param path the path of the name * @param path the path of the name
* @param baseName the base name to be made unique * @param baseName the base name to be made unique
* @return a unique name starting with baseName * @return a unique name starting with baseName
*/ */
public String getUniqueName(CategoryPath path, String baseName); public String getUniqueName(CategoryPath path, String baseName);
/** /**
* Returns a dataType that is "in" (ie suitable implementation) this * Returns a dataType that is "in" (ie suitable implementation) this
* Manager, creating a new one if necessary. Also the returned dataType * Manager, creating a new one if necessary. Also the returned dataType
* will be in a category in this dataTypeManager that is equivalent to the * will be in a category in this dataTypeManager that is equivalent to the
* category of the passed in dataType. * category of the passed in dataType.
@ -98,11 +98,11 @@ public interface DataTypeManager {
public DataType addDataType(DataType dataType, DataTypeConflictHandler handler); public DataType addDataType(DataType dataType, DataTypeConflictHandler handler);
/** /**
* Sequentially adds a collection of datatypes to this data manager. * Sequentially adds a collection of datatypes to this data manager.
* This method provides the added benefit of equivalence caching * This method provides the added benefit of equivalence caching
* for improved performance. * for improved performance.
* <br> * <br>
* WARNING: This is an experimental method whoose use may cause the GUI and * WARNING: This is an experimental method whose use may cause the GUI and
* task monitor to become unresponsive due to extended hold times on the manager lock. * task monitor to become unresponsive due to extended hold times on the manager lock.
* @param dataTypes collection of datatypes * @param dataTypes collection of datatypes
* @param handler conflict handler * @param handler conflict handler
@ -165,7 +165,7 @@ public interface DataTypeManager {
* @param updateCategoryPath if true, the replacementDt will have its categoryPath changed * @param updateCategoryPath if true, the replacementDt will have its categoryPath changed
* to the exitingDt's path. * to the exitingDt's path.
* @return the resolved replacement dataType. * @return the resolved replacement dataType.
* @throws DataTypeDependencyException if the replacement datatype depends on * @throws DataTypeDependencyException if the replacement datatype depends on
* the existing dataType; * the existing dataType;
*/ */
public DataType replaceDataType(DataType existingDt, DataType replacementDt, public DataType replaceDataType(DataType existingDt, DataType replacementDt,
@ -179,7 +179,7 @@ public interface DataTypeManager {
* name of a category in the same category as the datatype. For example, if you call * name of a category in the same category as the datatype. For example, if you call
* getDataType("/a/b/c"), and "b/c" is the name of your datatype, it will find it unless * getDataType("/a/b/c"), and "b/c" is the name of your datatype, it will find it unless
* there is also a category "b" under category "a". A better solution is to use * there is also a category "b" under category "a". A better solution is to use
* the {@link #getDataType(DataTypePath)} method because the DataTypePath keeps the * the {@link #getDataType(DataTypePath)} method because the DataTypePath keeps the
* category and datatype name separate. * category and datatype name separate.
* *
* @param dataTypePath path * @param dataTypePath path
@ -204,10 +204,10 @@ public interface DataTypeManager {
public DataType getDataType(DataTypePath dataTypePath); public DataType getDataType(DataTypePath dataTypePath);
/** /**
* Returns the dataTypeId for the given dataType. If the dataType is not * Returns the dataTypeId for the given dataType. If the dataType is not
* currently in the dataTypeManger, it will be added * currently in the dataTypeManger, it will be added
* *
* @param dt the data type * @param dt the data type
* @return the ID of the resolved type * @return the ID of the resolved type
*/ */
public long getResolvedID(DataType dt); public long getResolvedID(DataType dt);
@ -222,15 +222,15 @@ public interface DataTypeManager {
public long getID(DataType dt); public long getID(DataType dt);
/** /**
* Returns the dataType associated with the given dataTypeId or null if the dataTypeId is * Returns the dataType associated with the given dataTypeId or null if the dataTypeId is
* not valid * not valid
* *
* @param dataTypeID the ID * @param dataTypeID the ID
* @return the type * @return the type
*/ */
public DataType getDataType(long dataTypeID); public DataType getDataType(long dataTypeID);
/** /**
* Returns the Category with the given id * Returns the Category with the given id
* *
* @param categoryID id of the desired category * @param categoryID id of the desired category
@ -249,7 +249,7 @@ public interface DataTypeManager {
/** /**
* Notification when data type is changed. * Notification when data type is changed.
* @param dataType data type that is changed * @param dataType data type that is changed
* @param isAutoChange true if change was an automatic change in response to * @param isAutoChange true if change was an automatic change in response to
* another datatype's change (e.g., size, alignment). * another datatype's change (e.g., size, alignment).
*/ */
public void dataTypeChanged(DataType dataType, boolean isAutoChange); public void dataTypeChanged(DataType dataType, boolean isAutoChange);
@ -291,7 +291,7 @@ public interface DataTypeManager {
/** /**
* Return true if the given dataType exists in this data type manager * Return true if the given dataType exists in this data type manager
* *
* @param dataType the type * @param dataType the type
* @return true if the type is in this manager * @return true if the type is in this manager
*/ */
public boolean contains(DataType dataType); public boolean contains(DataType dataType);
@ -367,12 +367,12 @@ public interface DataTypeManager {
/** /**
* Returns a pointer of the given size to the given datatype. * Returns a pointer of the given size to the given datatype.
* Note: It is preferred to use default sized pointers when possible (i.e., size=-1, * Note: It is preferred to use default sized pointers when possible (i.e., size=-1,
* see {@link #getPointer(DataType)}) instead of explicitly specifying the size value. * see {@link #getPointer(DataType)}) instead of explicitly specifying the size value.
* *
* @param datatype the pointed to data type * @param datatype the pointed to data type
* @param size the size of the pointer to be created or -1 for a default sized pointer * @param size the size of the pointer to be created or -1 for a default sized pointer
* @return the pointer * @return the pointer
*/ */
public Pointer getPointer(DataType datatype, int size); public Pointer getPointer(DataType datatype, int size);
@ -476,7 +476,7 @@ public interface DataTypeManager {
public void associateDataTypeWithArchive(DataType datatype, SourceArchive archive); public void associateDataTypeWithArchive(DataType datatype, SourceArchive archive);
/** /**
* If the indicated data type is associated with a source archive, this will remove the * If the indicated data type is associated with a source archive, this will remove the
* association and the data type will become local to this data type manager. * association and the data type will become local to this data type manager.
* @param datatype the data type to be disassociated from a source archive. * @param datatype the data type to be disassociated from a source archive.
*/ */
@ -501,8 +501,8 @@ public interface DataTypeManager {
public boolean updateSourceArchiveName(UniversalID sourceID, String name); public boolean updateSourceArchiveName(UniversalID sourceID, String name);
/** /**
* Get the data organization associated with this data type manager. Note that the * Get the data organization associated with this data type manager. Note that the
* DataOrganization settings may not be changed dynamically. * DataOrganization settings may not be changed dynamically.
* @return data organization (will never be null) * @return data organization (will never be null)
*/ */
public DataOrganization getDataOrganization(); public DataOrganization getDataOrganization();
@ -523,14 +523,14 @@ public interface DataTypeManager {
/** /**
* Returns or creates a persisted version of the given source archive * Returns or creates a persisted version of the given source archive
* @param sourceArchive the archive * @param sourceArchive the archive
* @return the archive * @return the archive
*/ */
public SourceArchive resolveSourceArchive(SourceArchive sourceArchive); public SourceArchive resolveSourceArchive(SourceArchive sourceArchive);
/** /**
* Returns the data types within this data type manager that contain the specified data type. * Returns the data types within this data type manager that contain the specified data type.
* @param dataType the data type * @param dataType the data type
* @return a set of data types that contain the specified data type. * @return a set of data types that contain the specified data type.
*/ */
public Set<DataType> getDataTypesContaining(DataType dataType); public Set<DataType> getDataTypesContaining(DataType dataType);

View file

@ -194,7 +194,7 @@ public class VariableUtilities {
/** /**
* Perform variable storage checks using the specified datatype. * Perform variable storage checks using the specified datatype.
* @param storage variable storage whoose size must match the specified data type size * @param storage variable storage whose size must match the specified data type size
* @param dataType a datatype checked using {@link #checkDataType(DataType, boolean, int, Program)} * @param dataType a datatype checked using {@link #checkDataType(DataType, boolean, int, Program)}
* @param allowSizeMismatch if true size mismatch will be ignore * @param allowSizeMismatch if true size mismatch will be ignore
* @throws InvalidInputException * @throws InvalidInputException
@ -258,7 +258,7 @@ public class VariableUtilities {
* <ul> * <ul>
* <li>Function definition datatype</li> * <li>Function definition datatype</li>
* <li>An unsized/zero-element array</li> * <li>An unsized/zero-element array</li>
* </ul> * </ul>
* @param dataType datatype to be checked * @param dataType datatype to be checked
* @param voidOK true if checking return datatype and void is allow, else false. * @param voidOK true if checking return datatype and void is allow, else false.
* @param defaultSize Undefined datatype size to be used if specified datatype is null. A value less than 1 * @param defaultSize Undefined datatype size to be used if specified datatype is null. A value less than 1
@ -312,7 +312,7 @@ public class VariableUtilities {
} }
if (dataType.getLength() <= 0) { if (dataType.getLength() <= 0) {
// Unexpected condition - only dynamic types are expected to have negative length and // Unexpected condition - only dynamic types are expected to have negative length and
// none should report 0 has a length. // none should report 0 has a length.
throw new IllegalArgumentException("Unsupported data type length (" + throw new IllegalArgumentException("Unsupported data type length (" +
dataType.getLength() + "): " + dataType.getName()); dataType.getLength() + "): " + dataType.getName());
@ -326,7 +326,7 @@ public class VariableUtilities {
* <ul> * <ul>
* <li>Function definition datatype</li> * <li>Function definition datatype</li>
* <li>An unsized/zero-element array</li> * <li>An unsized/zero-element array</li>
* </ul> * </ul>
* @param dataType datatype to be checked * @param dataType datatype to be checked
* @param voidOK true if checking return datatype and void is allow, else false. * @param voidOK true if checking return datatype and void is allow, else false.
* @param defaultSize Undefined datatype size to be used if specified datatype is null. A value less than 1 * @param defaultSize Undefined datatype size to be used if specified datatype is null. A value less than 1
@ -346,7 +346,7 @@ public class VariableUtilities {
* <ul> * <ul>
* <li>Function definition datatype</li> * <li>Function definition datatype</li>
* <li>An unsized/zero-element array</li> * <li>An unsized/zero-element array</li>
* </ul> * </ul>
* @param dataType datatype to be checked. If null is specified the DEFAULT datatype will be * @param dataType datatype to be checked. If null is specified the DEFAULT datatype will be
* returned. * returned.
* @param voidOK true if checking return datatype and void is allow, else false. * @param voidOK true if checking return datatype and void is allow, else false.
@ -474,7 +474,7 @@ public class VariableUtilities {
" bytes: " + curStorage.toString()); " bytes: " + curStorage.toString());
} }
} }
vnAddr = newReg.getAddress(); vnAddr = newReg.getAddress();
if (bigEndian) { if (bigEndian) {
vnAddr = vnAddr.add(newReg.getMinimumByteSize() - size); vnAddr = vnAddr.add(newReg.getMinimumByteSize() - size);
@ -746,7 +746,7 @@ public class VariableUtilities {
} }
/** /**
* Create an empty placeholder class structure whose category is derived from * Create an empty placeholder class structure whose category is derived from
* the function's class namespace. NOTE: The structure will not be added to the data * the function's class namespace. NOTE: The structure will not be added to the data
* type manager. * type manager.
* @param classNamespace class namespace * @param classNamespace class namespace
@ -768,14 +768,14 @@ public class VariableUtilities {
/** /**
* Find the structure data type which corresponds to the specified class namespace * Find the structure data type which corresponds to the specified class namespace
* within the specified data type manager. * within the specified data type manager.
* The preferred structure will utilize a namespace-based category path, however, * The preferred structure will utilize a namespace-based category path, however,
* the match criteria can be fuzzy and relies primarily on the class name. * the match criteria can be fuzzy and relies primarily on the class name.
* While a new empty structure may be returned, it will not be added to the program's data type * While a new empty structure may be returned, it will not be added to the program's data type
* manager. * manager.
* @param classNamespace class namespace * @param classNamespace class namespace
* @param dataTypeManager data type manager which should be searched and whose * @param dataTypeManager data type manager which should be searched and whose
* data organization should be used. * data organization should be used.
* @return new or existing structure whose name matches the specified class namespace * @return new or existing structure whose name matches the specified class namespace
*/ */
public static Structure findOrCreateClassStruct(GhidraClass classNamespace, public static Structure findOrCreateClassStruct(GhidraClass classNamespace,
@ -789,8 +789,8 @@ public class VariableUtilities {
/** /**
* Find the structure data type which corresponds to the specified function's class namespace * Find the structure data type which corresponds to the specified function's class namespace
* within the function's program. One will be instantiated if not found. * within the function's program. One will be instantiated if not found.
* The preferred structure will utilize a namespace-based category path, however, * The preferred structure will utilize a namespace-based category path, however,
* the match criteria can be fuzzy and relies primarily on the class name. * the match criteria can be fuzzy and relies primarily on the class name.
* @param function function's whose class namespace is the basis for the structure * @param function function's whose class namespace is the basis for the structure
* @return new or existing structure whose name matches the function's class namespace or * @return new or existing structure whose name matches the function's class namespace or
@ -807,8 +807,8 @@ public class VariableUtilities {
/** /**
* Find the structure data type which corresponds to the specified class namespace * Find the structure data type which corresponds to the specified class namespace
* within the specified data type manager. . * within the specified data type manager. .
* The preferred structure will utilize a namespace-based category path, however, * The preferred structure will utilize a namespace-based category path, however,
* the match criteria can be fuzzy and relies primarily on the class name. * the match criteria can be fuzzy and relies primarily on the class name.
* @param classNamespace class namespace * @param classNamespace class namespace
* @param dataTypeManager data type manager which should be searched. * @param dataTypeManager data type manager which should be searched.
@ -823,8 +823,8 @@ public class VariableUtilities {
/** /**
* Find the structure data type which corresponds to the specified function's class namespace * Find the structure data type which corresponds to the specified function's class namespace
* within the function's program. * within the function's program.
* The preferred structure will utilize a namespace-based category path, however, * The preferred structure will utilize a namespace-based category path, however,
* the match criteria can be fuzzy and relies primarily on the class name. * the match criteria can be fuzzy and relies primarily on the class name.
* @param func the function. * @param func the function.
* @return existing structure whose name matches the specified function's class namespace * @return existing structure whose name matches the specified function's class namespace

View file

@ -1,6 +1,5 @@
/* ### /* ###
* IP: GHIDRA * IP: GHIDRA
* REVIEWED: YES
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -19,18 +18,20 @@ package ghidra.program.model.symbol;
import ghidra.program.model.address.Address; import ghidra.program.model.address.Address;
/** /**
* <code>OffsetReference</code> is a memory reference whoose "to" address is * <code>OffsetReference</code> is a memory reference whose "to" address is
* computed from a base address plus an offset. * computed from a base address plus an offset.
*/ */
public interface OffsetReference extends Reference { public interface OffsetReference extends Reference {
/** /**
* Returns the offset. * Returns the offset.
* @return the offset
*/ */
public long getOffset(); public long getOffset();
/** /**
* Returns the base address. * Returns the base address.
* @return the address
*/ */
public Address getBaseAddress(); public Address getBaseAddress();

View file

@ -1,6 +1,5 @@
/* ### /* ###
* IP: GHIDRA * IP: GHIDRA
* REVIEWED: YES
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
@ -17,18 +16,20 @@
package ghidra.program.model.symbol; package ghidra.program.model.symbol;
/** /**
* <code>ShiftedReference</code> is a memory reference whoose "to" address is * <code>ShiftedReference</code> is a memory reference whose "to" address is
* computed from a base value left shifted by a shift amount. * computed from a base value left shifted by a shift amount.
*/ */
public interface ShiftedReference extends Reference { public interface ShiftedReference extends Reference {
/** /**
* Returns the left shift amount. * Returns the left shift amount.
* @return the shift
*/ */
public int getShift(); public int getShift();
/** /**
* Returns the base value. * Returns the base value.
* @return the value
*/ */
public long getValue(); public long getValue();
} }