mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-03 09:49:23 +02:00
Merge remote-tracking branch 'origin/patch'
This commit is contained in:
commit
ee25a7d0cc
34 changed files with 396 additions and 422 deletions
|
@ -22,6 +22,7 @@
|
|||
package ghidra.app.plugin.core.misc;
|
||||
|
||||
import java.awt.Color;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.swing.ImageIcon;
|
||||
|
||||
|
@ -43,8 +44,7 @@ import ghidra.framework.plugintool.util.PluginStatus;
|
|||
import ghidra.program.model.address.*;
|
||||
import ghidra.program.model.listing.Program;
|
||||
import ghidra.program.model.listing.ProgramChangeSet;
|
||||
import ghidra.util.HelpLocation;
|
||||
import ghidra.util.SystemUtilities;
|
||||
import ghidra.util.*;
|
||||
import ghidra.util.exception.CancelledException;
|
||||
import ghidra.util.task.*;
|
||||
import ghidra.util.worker.Job;
|
||||
|
@ -439,6 +439,10 @@ public class MyProgramChangesDisplayPlugin extends ProgramPlugin implements Doma
|
|||
try {
|
||||
changes = (ProgramChangeSet) domainFile.getChangesByOthersSinceCheckout();
|
||||
}
|
||||
catch (IOException e) {
|
||||
Msg.warn(this, "Unable to determine program change set: " + e.getMessage());
|
||||
return;
|
||||
}
|
||||
catch (Exception e) {
|
||||
ClientUtil.handleException(tool.getProject().getRepository(), e, "Get Change Set",
|
||||
false, tool.getToolFrame());
|
||||
|
|
|
@ -807,7 +807,7 @@ public class DBHandle {
|
|||
* Load existing tables from database.
|
||||
* @throws IOException thrown if IO error occurs.
|
||||
*/
|
||||
private void loadTables() {
|
||||
private void loadTables() throws IOException {
|
||||
|
||||
tables = new Hashtable<>();
|
||||
TableRecord[] tableRecords = masterTable.getTableRecords();
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,8 +15,6 @@
|
|||
*/
|
||||
package db;
|
||||
|
||||
import ghidra.util.exception.AssertException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
|
@ -308,10 +305,11 @@ public abstract class Field implements Comparable<Field> {
|
|||
|
||||
/**
|
||||
* Get the field associated with the specified type value.
|
||||
* @param fieldType
|
||||
* @param fieldType encoded Field type
|
||||
* @return Field
|
||||
* @throws UnsupportedFieldException if unsupported fieldType specified
|
||||
*/
|
||||
static Field getField(byte fieldType) {
|
||||
static Field getField(byte fieldType) throws UnsupportedFieldException {
|
||||
if ((fieldType & INDEX_TYPE_FLAG) == 0) {
|
||||
switch (fieldType & BASE_TYPE_MASK) {
|
||||
case LONG_TYPE:
|
||||
|
@ -333,7 +331,13 @@ public abstract class Field implements Comparable<Field> {
|
|||
else {
|
||||
return IndexField.getIndexField(fieldType);
|
||||
}
|
||||
throw new AssertException();
|
||||
throw new UnsupportedFieldException(fieldType);
|
||||
}
|
||||
|
||||
public static class UnsupportedFieldException extends IOException {
|
||||
UnsupportedFieldException(byte fieldType) {
|
||||
super("Unsupported DB field type: 0x" + Integer.toHexString(fieldType & 0xff));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -31,10 +31,9 @@ public class FieldIndexTable extends IndexTable {
|
|||
/**
|
||||
* Construct a new secondary index which is based upon a specific field within the
|
||||
* primary table specified by name.
|
||||
* @param db database handle
|
||||
* @param primaryTable primary table.
|
||||
* @param colIndex identifies the indexed column within the primary table.
|
||||
* @throws IOException
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
FieldIndexTable(Table primaryTable, int colIndex) throws IOException {
|
||||
this(primaryTable, primaryTable.getDBHandle().getMasterTable().createTableRecord(
|
||||
|
@ -44,11 +43,11 @@ public class FieldIndexTable extends IndexTable {
|
|||
/**
|
||||
* Construct a new or existing secondary index. An existing index must have
|
||||
* its' root ID specified within the tableRecord.
|
||||
* @param db database handle
|
||||
* @param bufferMgr database buffer manager
|
||||
* @param primaryTable primary table.
|
||||
* @param indexTableRecord specifies the index parameters.
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
FieldIndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
||||
FieldIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||
super(primaryTable, indexTableRecord);
|
||||
this.indexSchema = indexTable.getSchema();
|
||||
this.indexColumn = indexTableRecord.getIndexedColumn();
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -28,48 +27,45 @@ import java.util.NoSuchElementException;
|
|||
*/
|
||||
class FixedIndexTable extends IndexTable {
|
||||
|
||||
private static final Class<?>[] fieldClasses = {
|
||||
BinaryField.class, // index data
|
||||
private static final Class<?>[] fieldClasses = { BinaryField.class, // index data
|
||||
};
|
||||
|
||||
private static final String[] fieldNames = {
|
||||
"IndexBuffer"
|
||||
};
|
||||
private static final String[] fieldNames = { "IndexBuffer" };
|
||||
|
||||
private static Schema indexSchema = new Schema(0, "IndexKey", fieldClasses, fieldNames);
|
||||
|
||||
/**
|
||||
* Construct a new secondary index which is based upon a field within the
|
||||
* primary table specified by name.
|
||||
* @param db database handle
|
||||
* @param primaryTable primary table.
|
||||
* @param colIndex identifies the indexed column within the primary table.
|
||||
* @throws IOException
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
FixedIndexTable(Table primaryTable, int colIndex) throws IOException {
|
||||
this(primaryTable,
|
||||
primaryTable.getDBHandle().getMasterTable().createTableRecord(primaryTable.getName(), indexSchema, colIndex));
|
||||
this(primaryTable, primaryTable.getDBHandle().getMasterTable().createTableRecord(
|
||||
primaryTable.getName(), indexSchema, colIndex));
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new or existing secondary index. An existing index must have
|
||||
* its' root ID specified within the tableRecord.
|
||||
* @param db database handle
|
||||
* @param bufferMgr database buffer manager
|
||||
* @param primaryTable primary table.
|
||||
* @param indexTableRecord specifies the index parameters.
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
FixedIndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
||||
FixedIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||
super(primaryTable, indexTableRecord);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all primary keys which correspond to the specified indexed field
|
||||
* value.
|
||||
* @param field the field value to search for.
|
||||
* @param indexValue the field value to search for.
|
||||
* @return list of primary keys
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
@Override
|
||||
long[] findPrimaryKeys(Field indexValue) throws IOException {
|
||||
long[] findPrimaryKeys(Field indexValue) throws IOException {
|
||||
if (!indexValue.getClass().equals(fieldType.getClass()))
|
||||
throw new IllegalArgumentException("Incorrect indexed field type");
|
||||
Record indexRecord = indexTable.getRecord(indexValue.getLongValue());
|
||||
|
@ -82,11 +78,11 @@ class FixedIndexTable extends IndexTable {
|
|||
/**
|
||||
* Get the number of primary keys which correspond to the specified indexed field
|
||||
* value.
|
||||
* @param field the field value to search for.
|
||||
* @param indexValue the field value to search for.
|
||||
* @return key count
|
||||
*/
|
||||
@Override
|
||||
int getKeyCount(Field indexValue) throws IOException {
|
||||
int getKeyCount(Field indexValue) throws IOException {
|
||||
if (!indexValue.getClass().equals(fieldType.getClass()))
|
||||
throw new IllegalArgumentException("Incorrect indexed field type");
|
||||
Record indexRecord = indexTable.getRecord(indexValue.getLongValue());
|
||||
|
@ -100,7 +96,7 @@ class FixedIndexTable extends IndexTable {
|
|||
* @see ghidra.framework.store.db.IndexTable#addEntry(ghidra.framework.store.db.Record)
|
||||
*/
|
||||
@Override
|
||||
void addEntry(Record record) throws IOException {
|
||||
void addEntry(Record record) throws IOException {
|
||||
Field indexField = record.getField(colIndex);
|
||||
long secondaryKey = indexField.getLongValue();
|
||||
Record indexRecord = indexTable.getRecord(secondaryKey);
|
||||
|
@ -117,7 +113,7 @@ class FixedIndexTable extends IndexTable {
|
|||
* @see ghidra.framework.store.db.IndexTable#deleteEntry(ghidra.framework.store.db.Record)
|
||||
*/
|
||||
@Override
|
||||
void deleteEntry(Record record) throws IOException {
|
||||
void deleteEntry(Record record) throws IOException {
|
||||
Field indexField = record.getField(colIndex);
|
||||
long secondaryKey = indexField.getLongValue();
|
||||
Record indexRecord = indexTable.getRecord(secondaryKey);
|
||||
|
@ -150,7 +146,7 @@ class FixedIndexTable extends IndexTable {
|
|||
* @see ghidra.framework.store.db.IndexTable#indexIterator()
|
||||
*/
|
||||
@Override
|
||||
DBFieldIterator indexIterator() throws IOException {
|
||||
DBFieldIterator indexIterator() throws IOException {
|
||||
return new IndexLongIterator();
|
||||
}
|
||||
|
||||
|
@ -158,7 +154,8 @@ class FixedIndexTable extends IndexTable {
|
|||
* @see ghidra.framework.store.db.IndexTable#indexIterator(ghidra.framework.store.db.Field, ghidra.framework.store.db.Field, boolean)
|
||||
*/
|
||||
@Override
|
||||
DBFieldIterator indexIterator(Field minField, Field maxField, boolean atMin) throws IOException {
|
||||
DBFieldIterator indexIterator(Field minField, Field maxField, boolean atMin)
|
||||
throws IOException {
|
||||
long min = minField != null ? minField.getLongValue() : Long.MIN_VALUE;
|
||||
long max = maxField != null ? maxField.getLongValue() : Long.MAX_VALUE;
|
||||
return new IndexLongIterator(min, max, atMin);
|
||||
|
@ -168,7 +165,8 @@ class FixedIndexTable extends IndexTable {
|
|||
* @see db.IndexTable#indexIterator(db.Field, db.Field, db.Field, boolean)
|
||||
*/
|
||||
@Override
|
||||
DBFieldIterator indexIterator(Field minField, Field maxField, Field startField, boolean before) throws IOException {
|
||||
DBFieldIterator indexIterator(Field minField, Field maxField, Field startField, boolean before)
|
||||
throws IOException {
|
||||
if (startField == null) {
|
||||
throw new IllegalArgumentException("starting index value required");
|
||||
}
|
||||
|
@ -201,7 +199,7 @@ class FixedIndexTable extends IndexTable {
|
|||
* @param startValue minimum index value or null if no minimum
|
||||
* @param endValue maximum index value or null if no maximum
|
||||
* @param atStart if true initial position is before startValue, else position
|
||||
* is after endValue
|
||||
* is after endValue
|
||||
* @throws IOException
|
||||
*/
|
||||
IndexLongIterator(long minValue, long maxValue, boolean atMin) throws IOException {
|
||||
|
@ -223,7 +221,8 @@ class FixedIndexTable extends IndexTable {
|
|||
* @param longValue
|
||||
* @param before
|
||||
*/
|
||||
public IndexLongIterator(long minValue, long maxValue, long start, boolean before) throws IOException {
|
||||
public IndexLongIterator(long minValue, long maxValue, long start, boolean before)
|
||||
throws IOException {
|
||||
|
||||
indexIterator = indexTable.longKeyIterator(minValue, maxValue, start);
|
||||
|
||||
|
@ -235,6 +234,7 @@ class FixedIndexTable extends IndexTable {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() throws IOException {
|
||||
if (hasNext)
|
||||
return true;
|
||||
|
@ -244,12 +244,14 @@ class FixedIndexTable extends IndexTable {
|
|||
keyField.setLongValue(key);
|
||||
hasNext = true;
|
||||
hasPrev = false;
|
||||
} catch (NoSuchElementException e) {
|
||||
}
|
||||
catch (NoSuchElementException e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPrevious() throws IOException {
|
||||
if (hasPrev)
|
||||
return true;
|
||||
|
@ -259,12 +261,14 @@ class FixedIndexTable extends IndexTable {
|
|||
keyField.setLongValue(key);
|
||||
hasNext = false;
|
||||
hasPrev = true;
|
||||
} catch (NoSuchElementException e) {
|
||||
}
|
||||
catch (NoSuchElementException e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Field next() throws IOException {
|
||||
if (hasNext || hasNext()) {
|
||||
hasNext = false;
|
||||
|
@ -275,6 +279,7 @@ class FixedIndexTable extends IndexTable {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Field previous() throws IOException {
|
||||
if (hasPrev || hasPrevious()) {
|
||||
hasNext = true;
|
||||
|
@ -290,6 +295,7 @@ class FixedIndexTable extends IndexTable {
|
|||
* index value (lastKey).
|
||||
* @see db.DBFieldIterator#delete()
|
||||
*/
|
||||
@Override
|
||||
public boolean delete() throws IOException {
|
||||
if (lastKey == null)
|
||||
return false;
|
||||
|
|
|
@ -68,7 +68,7 @@ abstract class IndexTable {
|
|||
* @param indexTableRecord specifies the index parameters.
|
||||
* @throws IOException thrown if IO error occurs
|
||||
*/
|
||||
IndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
||||
IndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||
if (!primaryTable.useLongKeys())
|
||||
throw new AssertException("Only long-key tables may be indexed");
|
||||
this.db = primaryTable.getDBHandle();
|
||||
|
@ -87,7 +87,7 @@ abstract class IndexTable {
|
|||
* @return IndexTable index table
|
||||
* @throws IOException thrown if IO error occurs
|
||||
*/
|
||||
static IndexTable getIndexTable(DBHandle db, TableRecord indexTableRecord) {
|
||||
static IndexTable getIndexTable(DBHandle db, TableRecord indexTableRecord) throws IOException {
|
||||
String name = indexTableRecord.getName();
|
||||
Table primaryTable = db.getTable(name);
|
||||
if (primaryTable == null)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,11 +15,12 @@
|
|||
*/
|
||||
package db;
|
||||
|
||||
import ghidra.util.exception.AssertException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import db.Field.UnsupportedFieldException;
|
||||
import ghidra.util.exception.AssertException;
|
||||
|
||||
/**
|
||||
* Class for definining the columns in a Ghidra Database table.
|
||||
*/
|
||||
|
@ -93,8 +93,10 @@ public class Schema {
|
|||
* @param fieldTypes
|
||||
* @param packedFieldNames packed list of field names separated by ';'.
|
||||
* The first field name corresponds to the key name.
|
||||
* @throws UnsupportedFieldException if unsupported fieldType specified
|
||||
*/
|
||||
Schema(int version, byte keyFieldType, byte[] fieldTypes, String packedFieldNames) {
|
||||
Schema(int version, byte keyFieldType, byte[] fieldTypes, String packedFieldNames)
|
||||
throws UnsupportedFieldException {
|
||||
this.version = version;
|
||||
this.keyType = Field.getField(keyFieldType);
|
||||
parseNames(packedFieldNames);
|
||||
|
@ -257,8 +259,8 @@ public class Schema {
|
|||
*/
|
||||
public Record createRecord(Field key) {
|
||||
if (!getKeyFieldClass().equals(key.getClass())) {
|
||||
throw new IllegalArgumentException("expected key field type of " +
|
||||
keyType.getClass().getSimpleName());
|
||||
throw new IllegalArgumentException(
|
||||
"expected key field type of " + keyType.getClass().getSimpleName());
|
||||
}
|
||||
Field[] fieldValues = new Field[fieldClasses.length];
|
||||
for (int i = 0; i < fieldClasses.length; i++) {
|
||||
|
|
|
@ -18,6 +18,7 @@ package db;
|
|||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
import db.Field.UnsupportedFieldException;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.datastruct.IntObjectHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
|
@ -56,8 +57,9 @@ public class Table {
|
|||
* Construct a new or existing Table.
|
||||
* @param db database handle
|
||||
* @param tableRecord master table record for this table.
|
||||
* @throws UnsupportedFieldException if unsupported schema field encountered
|
||||
*/
|
||||
Table(DBHandle db, TableRecord tableRecord) {
|
||||
Table(DBHandle db, TableRecord tableRecord) throws UnsupportedFieldException {
|
||||
this.db = db;
|
||||
this.tableRecord = tableRecord;
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,6 +15,8 @@
|
|||
*/
|
||||
package db;
|
||||
|
||||
import db.Field.UnsupportedFieldException;
|
||||
|
||||
/**
|
||||
* <code>TableRecord</code> manages information about a table. Each TableRecord
|
||||
* corresponds to a stored record within the master table.
|
||||
|
@ -32,29 +33,19 @@ class TableRecord implements Comparable<TableRecord> {
|
|||
private static final int MAX_KEY_COLUMN = 7;
|
||||
private static final int RECORD_COUNT_COLUMN = 8;
|
||||
|
||||
private static Class<?>[] fieldClasses = {
|
||||
StringField.class, // name of table
|
||||
IntField.class, // Schema version
|
||||
IntField.class, // Root buffer ID (first buffer)
|
||||
ByteField.class, // Key field type
|
||||
BinaryField.class, // Schema field types
|
||||
StringField.class, // Schema key/field names
|
||||
IntField.class, // indexing column (-1 = primary)
|
||||
LongField.class, // max primary key value ever used
|
||||
IntField.class // number of records
|
||||
private static Class<?>[] fieldClasses = { StringField.class, // name of table
|
||||
IntField.class, // Schema version
|
||||
IntField.class, // Root buffer ID (first buffer)
|
||||
ByteField.class, // Key field type
|
||||
BinaryField.class, // Schema field types
|
||||
StringField.class, // Schema key/field names
|
||||
IntField.class, // indexing column (-1 = primary)
|
||||
LongField.class, // max primary key value ever used
|
||||
IntField.class // number of records
|
||||
};
|
||||
|
||||
private static String[] tableRecordFieldNames = {
|
||||
"TableName",
|
||||
"SchemaVersion",
|
||||
"RootBufferId",
|
||||
"KeyType",
|
||||
"FieldTypes",
|
||||
"FieldNames",
|
||||
"IndexColumn",
|
||||
"MaxKey",
|
||||
"RecordCount"
|
||||
};
|
||||
private static String[] tableRecordFieldNames = { "TableName", "SchemaVersion", "RootBufferId",
|
||||
"KeyType", "FieldTypes", "FieldNames", "IndexColumn", "MaxKey", "RecordCount" };
|
||||
|
||||
private static Schema schema = new Schema(0, "TableNum", fieldClasses, tableRecordFieldNames);
|
||||
|
||||
|
@ -158,12 +149,11 @@ class TableRecord implements Comparable<TableRecord> {
|
|||
/**
|
||||
* Get the table schema
|
||||
* @return table schema
|
||||
* @throws UnsupportedFieldException if unsupported schema field encountered
|
||||
*/
|
||||
Schema getSchema() {
|
||||
return new Schema(record.getIntValue(VERSION_COLUMN),
|
||||
record.getByteValue(KEY_TYPE_COLUMN),
|
||||
record.getBinaryData(FIELD_TYPES_COLUMN),
|
||||
record.getString(FIELD_NAMES_COLUMN));
|
||||
Schema getSchema() throws UnsupportedFieldException {
|
||||
return new Schema(record.getIntValue(VERSION_COLUMN), record.getByteValue(KEY_TYPE_COLUMN),
|
||||
record.getBinaryData(FIELD_TYPES_COLUMN), record.getString(FIELD_NAMES_COLUMN));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -239,6 +229,7 @@ class TableRecord implements Comparable<TableRecord> {
|
|||
* key of another table record (obj).
|
||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public int compareTo(TableRecord otherRecord) {
|
||||
long myKey = record.getKey();
|
||||
long otherKey = otherRecord.record.getKey();
|
||||
|
|
|
@ -36,10 +36,9 @@ class VarIndexTable extends IndexTable {
|
|||
/**
|
||||
* Construct a new secondary index which is based upon a field within the
|
||||
* primary table specified by name.
|
||||
* @param db database handle
|
||||
* @param primaryTable primary table.
|
||||
* @param colIndex identifies the indexed column within the primary table.
|
||||
* @throws IOException
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
VarIndexTable(Table primaryTable, int colIndex) throws IOException {
|
||||
this(primaryTable,
|
||||
|
@ -52,11 +51,11 @@ class VarIndexTable extends IndexTable {
|
|||
/**
|
||||
* Construct a new or existing secondary index. An existing index must have
|
||||
* its' root ID specified within the tableRecord.
|
||||
* @param db database handle
|
||||
* @param bufferMgr database buffer manager
|
||||
* @param primaryTable primary table.
|
||||
* @param indexTableRecord specifies the index parameters.
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
VarIndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
||||
VarIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||
super(primaryTable, indexTableRecord);
|
||||
this.indexSchema = indexTable.getSchema();
|
||||
}
|
||||
|
@ -64,8 +63,9 @@ class VarIndexTable extends IndexTable {
|
|||
/**
|
||||
* Find all primary keys which correspond to the specified indexed field
|
||||
* value.
|
||||
* @param field the field value to search for.
|
||||
* @param indexValue the field value to search for.
|
||||
* @return list of primary keys
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
@Override
|
||||
long[] findPrimaryKeys(Field indexValue) throws IOException {
|
||||
|
@ -81,8 +81,9 @@ class VarIndexTable extends IndexTable {
|
|||
/**
|
||||
* Get the number of primary keys which correspond to the specified indexed field
|
||||
* value.
|
||||
* @param field the field value to search for.
|
||||
* @param indexValue the field value to search for.
|
||||
* @return key count
|
||||
* @throws IOException thrown if an IO error occurs
|
||||
*/
|
||||
@Override
|
||||
int getKeyCount(Field indexValue) throws IOException {
|
||||
|
|
|
@ -47,8 +47,9 @@ class VarKeyInteriorNode extends VarKeyNode {
|
|||
* Construct an existing variable-length-key interior node.
|
||||
* @param nodeMgr table node manager instance
|
||||
* @param buf node buffer
|
||||
* @throws IOException if IO error occurs
|
||||
*/
|
||||
VarKeyInteriorNode(NodeMgr nodeMgr, DataBuffer buf) {
|
||||
VarKeyInteriorNode(NodeMgr nodeMgr, DataBuffer buf) throws IOException {
|
||||
super(nodeMgr, buf);
|
||||
}
|
||||
|
||||
|
|
|
@ -46,8 +46,9 @@ abstract class VarKeyNode implements BTreeNode {
|
|||
* Construct an existing variable-length-key node.
|
||||
* @param nodeMgr table node manager instance
|
||||
* @param buf node buffer
|
||||
* @throws IOException if IO error occurs
|
||||
*/
|
||||
VarKeyNode(NodeMgr nodeMgr, DataBuffer buf) {
|
||||
VarKeyNode(NodeMgr nodeMgr, DataBuffer buf) throws IOException {
|
||||
this.nodeMgr = nodeMgr;
|
||||
this.buffer = buf;
|
||||
keyType = Field.getField(buf.getByte(KEY_TYPE_OFFSET));
|
||||
|
|
|
@ -55,8 +55,9 @@ class VarKeyRecordNode extends VarKeyNode {
|
|||
* Construct an existing variable-length-key record leaf node.
|
||||
* @param nodeMgr table node manager instance
|
||||
* @param buf node buffer
|
||||
* @throws IOException thrown if IO error occurs
|
||||
*/
|
||||
VarKeyRecordNode(NodeMgr nodeMgr, DataBuffer buf) {
|
||||
VarKeyRecordNode(NodeMgr nodeMgr, DataBuffer buf) throws IOException {
|
||||
super(nodeMgr, buf);
|
||||
}
|
||||
|
||||
|
|
|
@ -61,8 +61,8 @@ public interface ContentHandler extends ExtensionPoint {
|
|||
* @throws CancelledException if the user cancels
|
||||
*/
|
||||
long createFile(FileSystem fs, FileSystem userfs, String path, String name,
|
||||
DomainObject domainObject, TaskMonitor monitor) throws IOException,
|
||||
InvalidNameException, CancelledException;
|
||||
DomainObject domainObject, TaskMonitor monitor)
|
||||
throws IOException, InvalidNameException, CancelledException;
|
||||
|
||||
/**
|
||||
* Open a folder item for immutable use. If any changes are attempted on the
|
||||
|
@ -83,8 +83,8 @@ public interface ContentHandler extends ExtensionPoint {
|
|||
* difference which could not be handled.
|
||||
*/
|
||||
DomainObjectAdapter getImmutableObject(FolderItem item, Object consumer, int version,
|
||||
int minChangeVersion, TaskMonitor monitor) throws IOException, CancelledException,
|
||||
VersionException;
|
||||
int minChangeVersion, TaskMonitor monitor)
|
||||
throws IOException, CancelledException, VersionException;
|
||||
|
||||
/**
|
||||
* Open a folder item for read-only use. While changes are permitted on the
|
||||
|
@ -104,8 +104,8 @@ public interface ContentHandler extends ExtensionPoint {
|
|||
* difference which could not be handled.
|
||||
*/
|
||||
DomainObjectAdapter getReadOnlyObject(FolderItem item, int version, boolean okToUpgrade,
|
||||
Object consumer, TaskMonitor monitor) throws IOException, VersionException,
|
||||
CancelledException;
|
||||
Object consumer, TaskMonitor monitor)
|
||||
throws IOException, VersionException, CancelledException;
|
||||
|
||||
/**
|
||||
* Open a folder item for update. Changes made to the returned object may be
|
||||
|
@ -138,7 +138,8 @@ public interface ContentHandler extends ExtensionPoint {
|
|||
* @param newerVersion the newer version number
|
||||
* @return the set of changes that were made
|
||||
* @throws VersionException if a database version change prevents reading of data.
|
||||
* @throws IOException if a folder item access error occurs
|
||||
* @throws IOException if a folder item access error occurs or change set was
|
||||
* produced by newer version of software and can not be read
|
||||
*/
|
||||
ChangeSet getChangeSet(FolderItem versionedFolderItem, int olderVersion, int newerVersion)
|
||||
throws VersionException, IOException;
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Map;
|
|||
import javax.swing.Icon;
|
||||
|
||||
import db.DBHandle;
|
||||
import db.Field;
|
||||
import db.buffers.*;
|
||||
import ghidra.framework.client.ClientUtil;
|
||||
import ghidra.framework.client.NotConnectedException;
|
||||
|
@ -1748,6 +1749,9 @@ public class GhidraFileData {
|
|||
catch (FileNotFoundException e) {
|
||||
// file has been deleted, just return an empty map.
|
||||
}
|
||||
catch (Field.UnsupportedFieldException e) {
|
||||
// file created with newer version of Ghidra
|
||||
}
|
||||
catch (IOException e) {
|
||||
Msg.error(this, "Read meta-data error", e);
|
||||
}
|
||||
|
|
|
@ -107,7 +107,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
|||
* Returns changes made to versioned file by others since checkout was performed.
|
||||
* @return change set or null
|
||||
* @throws VersionException latest version was created with a newer version of software
|
||||
* @throws IOException
|
||||
* @throws IOException if a folder item access error occurs or change set was
|
||||
* produced by newer version of software and can not be read
|
||||
*/
|
||||
public ChangeSet getChangesByOthersSinceCheckout() throws VersionException, IOException;
|
||||
|
||||
|
@ -352,8 +353,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
|||
* @throws IOException thrown if an IO or access error occurs.
|
||||
* @throws CancelledException if task monitor cancelled operation.
|
||||
*/
|
||||
public boolean checkout(boolean exclusive, TaskMonitor monitor) throws IOException,
|
||||
CancelledException;
|
||||
public boolean checkout(boolean exclusive, TaskMonitor monitor)
|
||||
throws IOException, CancelledException;
|
||||
|
||||
/**
|
||||
* Performs check in to associated repository. File must be checked-out
|
||||
|
@ -379,8 +380,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
|||
* If okToUpgrade was false, check exception to see if it can be upgraded
|
||||
* @throws CancelledException if task monitor cancelled operation
|
||||
*/
|
||||
public void merge(boolean okToUpgrade, TaskMonitor monitor) throws IOException,
|
||||
VersionException, CancelledException;
|
||||
public void merge(boolean okToUpgrade, TaskMonitor monitor)
|
||||
throws IOException, VersionException, CancelledException;
|
||||
|
||||
/**
|
||||
* Undo "checked-out" file. The original repository file is restored.
|
||||
|
@ -456,8 +457,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
|||
* @throws IOException thrown if an IO or access error occurs.
|
||||
* @throws CancelledException if task monitor cancelled operation.
|
||||
*/
|
||||
DomainFile copyTo(DomainFolder newParent, TaskMonitor monitor) throws IOException,
|
||||
CancelledException;
|
||||
DomainFile copyTo(DomainFolder newParent, TaskMonitor monitor)
|
||||
throws IOException, CancelledException;
|
||||
|
||||
/**
|
||||
* Copy a specific version of this file to the specified destFolder.
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<launchConfiguration type="org.eclipse.jdt.launching.localJavaApplication">
|
||||
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
|
||||
<listEntry value="/Framework Utility/src/main/java/ghidra/GhidraLauncher.java"/>
|
||||
</listAttribute>
|
||||
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
|
||||
<listEntry value="1"/>
|
||||
</listAttribute>
|
||||
<listAttribute key="org.eclipse.debug.ui.favoriteGroups">
|
||||
<listEntry value="org.eclipse.debug.ui.launchGroup.debug"/>
|
||||
<listEntry value="org.eclipse.debug.ui.launchGroup.run"/>
|
||||
</listAttribute>
|
||||
<booleanAttribute key="org.eclipse.jdt.launching.ATTR_USE_CLASSPATH_ONLY_JAR" value="false"/>
|
||||
<booleanAttribute key="org.eclipse.jdt.launching.ATTR_USE_START_ON_FIRST_THREAD" value="true"/>
|
||||
<listAttribute key="org.eclipse.jdt.launching.CLASSPATH">
|
||||
<listEntry value="<?xml version="1.0" encoding="UTF-8" standalone="no"?> <runtimeClasspathEntry path="5" projectName="Framework Utility" type="1"/> "/>
|
||||
</listAttribute>
|
||||
<booleanAttribute key="org.eclipse.jdt.launching.DEFAULT_CLASSPATH" value="false"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.JRE_CONTAINER" value="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="ghidra.GhidraLauncher"/>
|
||||
<listAttribute key="org.eclipse.jdt.launching.MODULEPATH">
|
||||
<listEntry value="<?xml version="1.0" encoding="UTF-8" standalone="no"?> <runtimeClasspathEntry containerPath="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11" path="4" type="4"/> "/>
|
||||
</listAttribute>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.PROGRAM_ARGUMENTS" value="ghidra.pcodeCPort.slgh_compile.SleighCompileLauncher -i "${project_loc}/build/data/sleighArgs.txt" -a "${project_loc}/data/languages""/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="Framework Utility"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Djava.awt.headless=true -Xmx1048M -XX:+IgnoreUnrecognizedVMOptions -Djava.system.class.loader=ghidra.GhidraClassLoader -Dfile.encoding=UTF-8 -Duser.country=US -Duser.language=en -Duser.variant -Xdock:name="Sleigh" -Dvisualvm.display.name=Sleigh"/>
|
||||
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
|
||||
<listEntry value="/Framework Utility/src/main/java/ghidra/GhidraLauncher.java"/>
|
||||
</listAttribute>
|
||||
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
|
||||
<listEntry value="1"/>
|
||||
</listAttribute>
|
||||
<listAttribute key="org.eclipse.debug.ui.favoriteGroups">
|
||||
<listEntry value="org.eclipse.debug.ui.launchGroup.debug"/>
|
||||
<listEntry value="org.eclipse.debug.ui.launchGroup.run"/>
|
||||
</listAttribute>
|
||||
<booleanAttribute key="org.eclipse.jdt.launching.ATTR_USE_CLASSPATH_ONLY_JAR" value="false"/>
|
||||
<booleanAttribute key="org.eclipse.jdt.launching.ATTR_USE_START_ON_FIRST_THREAD" value="true"/>
|
||||
<listAttribute key="org.eclipse.jdt.launching.CLASSPATH">
|
||||
<listEntry value="<?xml version="1.0" encoding="UTF-8" standalone="no"?> <runtimeClasspathEntry path="5" projectName="Framework Utility" type="1"/> "/>
|
||||
</listAttribute>
|
||||
<booleanAttribute key="org.eclipse.jdt.launching.DEFAULT_CLASSPATH" value="false"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.JRE_CONTAINER" value="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="ghidra.GhidraLauncher"/>
|
||||
<listAttribute key="org.eclipse.jdt.launching.MODULEPATH">
|
||||
<listEntry value="<?xml version="1.0" encoding="UTF-8" standalone="no"?> <runtimeClasspathEntry containerPath="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11" path="4" type="4"/> "/>
|
||||
</listAttribute>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.PROGRAM_ARGUMENTS" value="ghidra.pcodeCPort.slgh_compile.SleighCompileLauncher -DBaseDir="${project_loc}/../../../../" -i "${project_loc}/build/tmp/sleighArgs.txt" -a "${project_loc}/data/languages""/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="Framework Utility"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Djava.awt.headless=true -Xmx1048M -XX:+IgnoreUnrecognizedVMOptions -Djava.system.class.loader=ghidra.GhidraClassLoader -Dfile.encoding=UTF-8 -Duser.country=US -Duser.language=en -Duser.variant -Xdock:name="Sleigh" -Dvisualvm.display.name=Sleigh"/>
|
||||
</launchConfiguration>
|
||||
|
|
|
@ -422,7 +422,7 @@ public class SleighLanguage implements Language {
|
|||
String languageName = specName + ".slaspec";
|
||||
ResourceFile languageFile = new ResourceFile(slaFile.getParentFile(), languageName);
|
||||
|
||||
// see gradleScripts/processorUtils.gradle for sleighArgs.txt generation
|
||||
// see gradle/processorUtils.gradle for sleighArgs.txt generation
|
||||
ResourceFile sleighArgsFile = null;
|
||||
ResourceFile languageModule = Application.getModuleContainingResourceFile(languageFile);
|
||||
if (languageModule != null) {
|
||||
|
@ -430,21 +430,23 @@ public class SleighLanguage implements Language {
|
|||
sleighArgsFile = new ResourceFile(languageModule, "data/sleighArgs.txt");
|
||||
}
|
||||
else {
|
||||
sleighArgsFile = new ResourceFile(languageModule, "build/data/sleighArgs.txt");
|
||||
sleighArgsFile = new ResourceFile(languageModule, "build/tmp/sleighArgs.txt");
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, String> defineMap;
|
||||
String[] args;
|
||||
if (sleighArgsFile != null && sleighArgsFile.isFile()) {
|
||||
args = new String[] { "-i", sleighArgsFile.getAbsolutePath(),
|
||||
String baseDir = Application.getInstallationDirectory().getAbsolutePath().replace(
|
||||
File.separatorChar, '/');
|
||||
if (!baseDir.endsWith("/")) {
|
||||
baseDir += "/";
|
||||
}
|
||||
args = new String[] { "-DBaseDir=" + baseDir, "-i", sleighArgsFile.getAbsolutePath(),
|
||||
languageFile.getAbsolutePath(), description.getSlaFile().getAbsolutePath() };
|
||||
defineMap = new HashMap<>();
|
||||
}
|
||||
else {
|
||||
args = new String[] { languageFile.getAbsolutePath(),
|
||||
description.getSlaFile().getAbsolutePath() };
|
||||
defineMap = ModuleDefinitionsMap.getModuleMap();
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -454,7 +456,7 @@ public class SleighLanguage implements Language {
|
|||
buf.append(" ");
|
||||
}
|
||||
Msg.debug(this, "Sleigh compile: " + buf);
|
||||
int returnCode = SleighCompileLauncher.runMain(args, defineMap);
|
||||
int returnCode = SleighCompileLauncher.runMain(args);
|
||||
if (returnCode != 0) {
|
||||
throw new SleighException("Errors compiling " + languageFile.getAbsolutePath() +
|
||||
" -- please check log messages for details");
|
||||
|
|
|
@ -1706,6 +1706,6 @@ public class SleighCompile extends SleighBase {
|
|||
* @throws RecognitionException for parsing errors
|
||||
*/
|
||||
public static void main(String[] args) throws JDOMException, IOException, RecognitionException {
|
||||
System.exit(SleighCompileLauncher.runMain(args, new HashMap<String, String>()));
|
||||
System.exit(SleighCompileLauncher.runMain(args));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,24 +71,24 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
|||
ApplicationConfiguration configuration = new ApplicationConfiguration();
|
||||
Application.initializeApplication(layout, configuration);
|
||||
|
||||
System.exit(runMain(args, new HashMap<String, String>()));
|
||||
System.exit(runMain(args));
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the Sleigh compiler process
|
||||
*
|
||||
* @param args sleigh compiler command line arguments
|
||||
* @param preprocs additional preprocessor macro
|
||||
* @return exit code (TODO: exit codes are not well defined)
|
||||
* @throws JDOMException
|
||||
* @throws IOException
|
||||
* @throws RecognitionException
|
||||
*/
|
||||
public static int runMain(String[] args, Map<String, String> preprocs)
|
||||
public static int runMain(String[] args)
|
||||
throws JDOMException, IOException, RecognitionException {
|
||||
int retval;
|
||||
String filein = null;
|
||||
String fileout = null;
|
||||
Map<String, String> preprocs = new HashMap<>();
|
||||
|
||||
SleighCompile.yydebug = false;
|
||||
boolean allMode = false;
|
||||
|
@ -218,8 +218,8 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
|||
System.out.println("Compiling " + input + ":");
|
||||
SleighCompile compiler = new SleighCompile();
|
||||
initCompiler(compiler, preprocs, unnecessaryPcodeWarning, lenientConflict,
|
||||
allCollisionWarning,
|
||||
allNopWarning, deadTempWarning, unusedFieldWarning, enforceLocalKeyWord);
|
||||
allCollisionWarning, allNopWarning, deadTempWarning, unusedFieldWarning,
|
||||
enforceLocalKeyWord);
|
||||
|
||||
String outname = input.getName().replace(".slaspec", ".sla");
|
||||
File output = new File(input.getParent(), outname);
|
||||
|
@ -247,8 +247,8 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
|||
// single file compile
|
||||
SleighCompile compiler = new SleighCompile();
|
||||
initCompiler(compiler, preprocs, unnecessaryPcodeWarning, lenientConflict,
|
||||
allCollisionWarning, allNopWarning,
|
||||
deadTempWarning, unusedFieldWarning, enforceLocalKeyWord);
|
||||
allCollisionWarning, allNopWarning, deadTempWarning, unusedFieldWarning,
|
||||
enforceLocalKeyWord);
|
||||
if (i == args.length) {
|
||||
Msg.error(SleighCompile.class, "Missing input file name");
|
||||
return 1;
|
||||
|
@ -418,6 +418,7 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
|||
return 4;
|
||||
}
|
||||
catch (PreprocessorException e) {
|
||||
Msg.error(SleighCompile.class, e.getMessage());
|
||||
Msg.error(SleighCompile.class, "Errors during preprocessing, halting compilation");
|
||||
return 5;
|
||||
}
|
||||
|
|
|
@ -74,9 +74,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
addedSourceArchiveIds = new HashSet<Long>();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#dataTypeChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void dataTypeChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -88,9 +85,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#dataTypeAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void dataTypeAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -99,25 +93,16 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
tmpAddedDataTypeIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getDataTypeChanges()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getDataTypeChanges() {
|
||||
return getLongs(changedDataTypeIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getDataTypeAdditions()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getDataTypeAdditions() {
|
||||
return getLongs(addedDataTypeIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#categoryChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void categoryChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -129,9 +114,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#categoryAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void categoryAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -140,25 +122,16 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
tmpAddedCategoryIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getCategoryChanges()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getCategoryChanges() {
|
||||
return getLongs(changedCategoryIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getCategoryAdditions()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getCategoryAdditions() {
|
||||
return getLongs(addedCategoryIds);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public void sourceArchiveAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -167,9 +140,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
tmpAddedSourceArchiveIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public void sourceArchiveChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -181,17 +151,11 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveAdditions()
|
||||
*/
|
||||
@Override
|
||||
public long[] getSourceArchiveAdditions() {
|
||||
return getLongs(addedSourceArchiveIds);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveChanges()
|
||||
*/
|
||||
@Override
|
||||
public long[] getSourceArchiveChanges() {
|
||||
return getLongs(changedSourceArchiveIds);
|
||||
|
@ -213,9 +177,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
clearUndo();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#startTransaction()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void startTransaction() {
|
||||
redoList.clear();
|
||||
|
@ -229,9 +190,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
tmpAddedSourceArchiveIds = new HashSet<Long>();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#endTransaction(boolean)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void endTransaction(boolean commit) {
|
||||
if (!inTransaction) {
|
||||
|
@ -268,9 +226,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#undo()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void undo() {
|
||||
MyChangeDiff diff = undoList.removeLast();
|
||||
|
@ -283,9 +238,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
redoList.addLast(diff);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#redo()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void redo() {
|
||||
MyChangeDiff diff = redoList.removeLast();
|
||||
|
@ -298,26 +250,17 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
undoList.addLast(diff);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.model.ChangeSet#clearUndo()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void clearUndo() {
|
||||
undoList.clear();
|
||||
redoList.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#setMaxUndos(int)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void setMaxUndos(int numUndos) {
|
||||
this.numUndos = numUndos;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.model.ChangeSet#read(ghidra.framework.store.db.DBHandle)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void read(DBHandle dbh) throws IOException {
|
||||
|
||||
|
@ -343,6 +286,9 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
private void readIdRecords(DBHandle dbh, String tableName, Set<Long> ids) throws IOException {
|
||||
Table table = dbh.getTable(tableName);
|
||||
if (table != null) {
|
||||
if (table.getSchema().getVersion() != 0) {
|
||||
throw new IOException("Change data produced with newer version of Ghidra");
|
||||
}
|
||||
RecordIterator it = table.iterator();
|
||||
while (it.hasNext()) {
|
||||
Record rec = it.next();
|
||||
|
@ -351,9 +297,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.model.ChangeSet#write(ghidra.framework.store.db.DBHandle)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void write(DBHandle dbh, boolean isRecoverySave) throws IOException {
|
||||
|
||||
|
|
|
@ -20,8 +20,7 @@ import java.io.IOException;
|
|||
import javax.swing.Icon;
|
||||
import javax.swing.ImageIcon;
|
||||
|
||||
import db.DBConstants;
|
||||
import db.DBHandle;
|
||||
import db.*;
|
||||
import db.buffers.BufferFile;
|
||||
import db.buffers.ManagedBufferFile;
|
||||
import ghidra.framework.data.*;
|
||||
|
@ -80,6 +79,9 @@ public class ProgramContentHandler extends DBContentHandler {
|
|||
success = true;
|
||||
return program;
|
||||
}
|
||||
catch (Field.UnsupportedFieldException e) {
|
||||
throw new VersionException(false);
|
||||
}
|
||||
catch (VersionException e) {
|
||||
throw e;
|
||||
}
|
||||
|
@ -136,6 +138,9 @@ public class ProgramContentHandler extends DBContentHandler {
|
|||
success = true;
|
||||
return program;
|
||||
}
|
||||
catch (Field.UnsupportedFieldException e) {
|
||||
throw new VersionException(false);
|
||||
}
|
||||
catch (VersionException e) {
|
||||
throw e;
|
||||
}
|
||||
|
@ -199,6 +204,9 @@ public class ProgramContentHandler extends DBContentHandler {
|
|||
success = true;
|
||||
return program;
|
||||
}
|
||||
catch (Field.UnsupportedFieldException e) {
|
||||
throw new VersionException(false);
|
||||
}
|
||||
catch (VersionException e) {
|
||||
throw e;
|
||||
}
|
||||
|
|
|
@ -126,9 +126,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
addedTagIds = new HashSet<Long>();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getAddressSet()
|
||||
*/
|
||||
@Override
|
||||
public synchronized AddressSetView getAddressSet() {
|
||||
SynchronizedAddressSetCollection addressSetCollection =
|
||||
|
@ -149,9 +146,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#add(ghidra.program.model.address.AddressSetView)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void add(AddressSetView addrSet) {
|
||||
if (!inTransaction) {
|
||||
|
@ -160,9 +154,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddrs.add(addrSet);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#addRange(ghidra.program.model.address.Address, ghidra.program.model.address.Address)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void addRange(Address addr1, Address addr2) {
|
||||
if (!inTransaction) {
|
||||
|
@ -173,9 +164,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#addRegisterRange(ghidra.program.model.address.Address, ghidra.program.model.address.Address)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void addRegisterRange(Address addr1, Address addr2) {
|
||||
if (!inTransaction) {
|
||||
|
@ -184,9 +172,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpRegAddrs.addRange(addr1, addr2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getRegisterAddressSet()
|
||||
*/
|
||||
@Override
|
||||
public synchronized AddressSetView getRegisterAddressSet() {
|
||||
SynchronizedAddressSetCollection addressSetCollection =
|
||||
|
@ -195,9 +180,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
return addressSetCollection.getCombinedAddressSet();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#dataTypeChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void dataTypeChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -209,9 +191,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#dataTypeAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void dataTypeAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -220,25 +199,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedDataTypeIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getDataTypeChanges()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getDataTypeChanges() {
|
||||
return getLongs(changedDataTypeIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getDataTypeAdditions()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getDataTypeAdditions() {
|
||||
return getLongs(addedDataTypeIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#categoryChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void categoryChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -250,9 +220,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#categoryAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void categoryAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -261,25 +228,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedCategoryIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getCategoryChanges()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getCategoryChanges() {
|
||||
return getLongs(changedCategoryIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getCategoryAdditions()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getCategoryAdditions() {
|
||||
return getLongs(addedCategoryIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#programTreeChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void programTreeChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -291,9 +249,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#programTreeAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void programTreeAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -302,25 +257,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedProgramTreeIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getProgramTreeChanges()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getProgramTreeChanges() {
|
||||
return getLongs(changedProgramTreeIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getProgramTreeAdditions()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getProgramTreeAdditions() {
|
||||
return getLongs(addedProgramTreeIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#symbolChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void symbolChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -332,9 +278,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#symbolAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void symbolAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -343,25 +286,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedSymbolIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getSymbolChanges()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getSymbolChanges() {
|
||||
return getLongs(changedSymbolIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getSymbolAdditions()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getSymbolAdditions() {
|
||||
return getLongs(addedSymbolIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#tagChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void tagChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -373,9 +307,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#tagCreated(long)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void tagCreated(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -384,25 +315,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedTagIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getTagChanges()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getTagChanges() {
|
||||
return getLongs(changedTagIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.program.model.listing.ProgramChangeSet#getTagCreations()
|
||||
*/
|
||||
@Override
|
||||
public synchronized long[] getTagCreations() {
|
||||
return getLongs(addedTagIds);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveAdded(long)
|
||||
*/
|
||||
@Override
|
||||
public void sourceArchiveAdded(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -411,9 +333,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedSourceArchiveIds.add(new Long(id));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveChanged(long)
|
||||
*/
|
||||
@Override
|
||||
public void sourceArchiveChanged(long id) {
|
||||
if (!inTransaction) {
|
||||
|
@ -425,25 +344,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveAdditions()
|
||||
*/
|
||||
@Override
|
||||
public long[] getSourceArchiveAdditions() {
|
||||
return getLongs(addedSourceArchiveIds);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveChanges()
|
||||
*/
|
||||
@Override
|
||||
public long[] getSourceArchiveChanges() {
|
||||
return getLongs(changedSourceArchiveIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#clear()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void clearUndo(boolean isCheckedOut) {
|
||||
if (inTransaction) {
|
||||
|
@ -470,9 +380,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
clearUndo();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#startTransaction()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void startTransaction() {
|
||||
inTransaction = true;
|
||||
|
@ -493,9 +400,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedTagIds = new HashSet<Long>();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#endTransaction(boolean)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void endTransaction(boolean commit) {
|
||||
if (!inTransaction) {
|
||||
|
@ -556,9 +460,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
tmpAddedTagIds = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#undo()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void undo() {
|
||||
ChangeDiff diff = undoList.removeLast();
|
||||
|
@ -579,9 +480,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
redoList.addLast(diff);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#redo()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void redo() {
|
||||
ChangeDiff diff = redoList.removeLast();
|
||||
|
@ -602,26 +500,17 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
undoList.addLast(diff);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.model.ChangeSet#clearUndo()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void clearUndo() {
|
||||
undoList.clear();
|
||||
redoList.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#setMaxUndos(int)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void setMaxUndos(int numUndos) {
|
||||
this.numUndos = numUndos;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.model.ChangeSet#read(ghidra.framework.store.db.DBHandle)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void read(DBHandle dbh) throws IOException {
|
||||
|
||||
|
@ -648,6 +537,9 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
private void readIdRecords(DBHandle dbh, String tableName, Set<Long> ids) throws IOException {
|
||||
Table table = dbh.getTable(tableName);
|
||||
if (table != null) {
|
||||
if (table.getSchema().getVersion() != 0) {
|
||||
throw new IOException("Change data produced with newer version of Ghidra");
|
||||
}
|
||||
RecordIterator it = table.iterator();
|
||||
while (it.hasNext()) {
|
||||
Record rec = it.next();
|
||||
|
@ -660,6 +552,9 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
throws IOException {
|
||||
Table table = dbh.getTable(tableName);
|
||||
if (table != null) {
|
||||
if (table.getSchema().getVersion() != 0) {
|
||||
throw new IOException("Change data produced with newer version of Ghidra");
|
||||
}
|
||||
RecordIterator it = table.iterator();
|
||||
while (it.hasNext()) {
|
||||
Record rec = it.next();
|
||||
|
@ -673,9 +568,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see ghidra.framework.model.ChangeSet#write(ghidra.framework.store.db.DBHandle)
|
||||
*/
|
||||
@Override
|
||||
public synchronized void write(DBHandle dbh, boolean isRecoverySave) throws IOException {
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
*/
|
||||
package ghidra.pcodeCPort.slgh_compile.regression;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
@ -167,11 +167,9 @@ public class SleighCompileRegressionTest extends AbstractGenericTest {
|
|||
|
||||
private int runActualCompiler(File inputFile, File actualFile)
|
||||
throws JDOMException, IOException, RecognitionException {
|
||||
return SleighCompileLauncher.runMain(
|
||||
new String[] { "-DMIPS=../../../../../../ghidra/Ghidra/Processors/MIPS",
|
||||
"-D8051=../../../../../../ghidra/Ghidra/Processors/8051",
|
||||
inputFile.getAbsolutePath(), actualFile.getAbsolutePath() },
|
||||
new HashMap<String, String>());
|
||||
return SleighCompileLauncher.runMain(new String[] { "-DBaseDir=../../../../../../",
|
||||
"-DMIPS=ghidra/Ghidra/Processors/MIPS", "-D8051=ghidra/Ghidra/Processors/8051",
|
||||
inputFile.getAbsolutePath(), actualFile.getAbsolutePath() });
|
||||
}
|
||||
|
||||
private static final Pattern SPACEMATCH = Pattern.compile("^\\s*<print piece=\" \"/>\\s*$");
|
||||
|
|
|
@ -7,9 +7,9 @@ apply plugin: 'eclipse'
|
|||
|
||||
eclipse.project.name = 'Processors AARCH64'
|
||||
|
||||
sleighCompile {
|
||||
args '-l'
|
||||
}
|
||||
sleighCompileOptions = [
|
||||
'-l'
|
||||
]
|
||||
|
||||
dependencies {
|
||||
compile project(':Base')
|
||||
|
|
|
@ -10,6 +10,6 @@ dependencies {
|
|||
compile project(':Base')
|
||||
}
|
||||
|
||||
sleighCompile {
|
||||
// args '-l'
|
||||
}
|
||||
sleighCompileOptions = [
|
||||
// '-l'
|
||||
]
|
||||
|
|
|
@ -7,9 +7,9 @@ apply plugin: 'eclipse'
|
|||
eclipse.project.name = 'Processors Dalvik'
|
||||
|
||||
|
||||
sleighCompile {
|
||||
args '-l'
|
||||
}
|
||||
sleighCompileOptions = [
|
||||
'-l'
|
||||
]
|
||||
|
||||
dependencies {
|
||||
compile project(':Base')
|
||||
|
|
|
@ -3,6 +3,6 @@ apply from: "$rootProject.projectDir/gradle/processorProject.gradle"
|
|||
apply plugin: 'eclipse'
|
||||
eclipse.project.name = 'Processors HCS08'
|
||||
|
||||
sleighCompile {
|
||||
args '-l'
|
||||
}
|
||||
sleighCompileOptions = [
|
||||
'-l'
|
||||
]
|
|
@ -5,9 +5,9 @@ apply plugin: 'eclipse'
|
|||
|
||||
eclipse.project.name = 'Processors HCS12'
|
||||
|
||||
sleighCompile {
|
||||
args '-l'
|
||||
}
|
||||
sleighCompileOptions = [
|
||||
'-l'
|
||||
]
|
||||
|
||||
dependencies {
|
||||
compile project(':Base')
|
||||
|
|
|
@ -3,7 +3,6 @@ apply from: "$rootProject.projectDir/gradle/processorProject.gradle"
|
|||
apply plugin: 'eclipse'
|
||||
eclipse.project.name = 'Processors MCS96'
|
||||
|
||||
|
||||
sleighCompile {
|
||||
args '-l'
|
||||
}
|
||||
sleighCompileOptions = [
|
||||
'-l'
|
||||
]
|
|
@ -10,6 +10,6 @@ dependencies {
|
|||
compile project(':Base')
|
||||
}
|
||||
|
||||
sleighCompile {
|
||||
args '-l'
|
||||
}
|
||||
sleighCompileOptions = [
|
||||
'-l'
|
||||
]
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!--
|
||||
+ Compile sleigh languages within this source language module.
|
||||
+ Compile sleigh languages within this source language module via Eclipse or
|
||||
+ a command shell.
|
||||
+
|
||||
+ * Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
||||
+
|
||||
+ * From command line (requires ant install)
|
||||
+ - cd to data directory containing this file
|
||||
+ - run ant
|
||||
+
|
||||
+ Sleigh compiler options are read from build/data/sleighArgs.txt which is
|
||||
+ updated by "gradle prepdev" based upon specification within module's build.gradle.
|
||||
+
|
||||
+ Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
||||
+ From command line (requires ant install)
|
||||
+ - cd to data directory containing this file
|
||||
+ - run ant
|
||||
-->
|
||||
|
||||
<project name="privateBuildDeveloper" default="sleigh-compile">
|
||||
|
@ -46,8 +49,9 @@
|
|||
fork="true"
|
||||
failonerror="true">
|
||||
<jvmarg value="-Xmx2048M"/>
|
||||
<arg value="-DBaseDir=../../../../../../" /> <!-- repos root directory -->
|
||||
<arg value="-i"/>
|
||||
<arg value="../build/data/sleighArgs.txt"/>
|
||||
<arg value="../build/tmp/sleighArgs.txt"/>
|
||||
<arg value="-a"/>
|
||||
<arg value="./languages"/>
|
||||
</java>
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!--
|
||||
+ Compile sleigh languages within this distribution language module.
|
||||
+ Sleigh compiler options are read from the sleighArgs.txt file.
|
||||
+ Compile sleigh languages within this distribution language module via Eclipse or
|
||||
+ a command shell.
|
||||
+
|
||||
+ Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
||||
+ From command line (requires ant install)
|
||||
+ - cd to data directory containing this file
|
||||
+ - run ant
|
||||
+ * Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
||||
+
|
||||
+ * From command line (requires ant install)
|
||||
+ - cd to data directory containing this file
|
||||
+ - run ant
|
||||
+
|
||||
+ Sleigh compiler options are read from sleighArgs.txt.
|
||||
-->
|
||||
|
||||
<project name="privateBuildDistribution" default="sleigh-compile">
|
||||
|
@ -39,6 +42,7 @@
|
|||
fork="true"
|
||||
failonerror="true">
|
||||
<jvmarg value="-Xmx2048M"/>
|
||||
<arg value="-DBaseDir=../../../../../" /> <!-- Ghidra install directory -->
|
||||
<arg value="-i"/>
|
||||
<arg value="sleighArgs.txt"/>
|
||||
<arg value="-a"/>
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
apply from: "$rootProject.projectDir/gradle/nativeProject.gradle"
|
||||
*****************************************************************************************/
|
||||
|
||||
|
||||
|
||||
/*****************************************************************************************
|
||||
*
|
||||
* Create a configuration so the a dependency can be declared on the the software modeling
|
||||
|
@ -26,21 +24,71 @@ dependencies {
|
|||
|
||||
/*****************************************************************************************
|
||||
*
|
||||
* Task to write sleigh compiler args to build/data/sleighArgs.txt for use with sleigh
|
||||
* external sleigh compiler.
|
||||
* Sleigh compile options to be written to sleighArgs.txt in support of the following
|
||||
* use cases:
|
||||
* - Ant build using data/build.xml (development and distribution)
|
||||
* - Eclipse Sleigh launcher (development only)
|
||||
* - Ghidra runtime language rebuild (SleighLanguage.reloadLanguage; development and distribution)
|
||||
* - Distribution build (sleighCompile task; development layout)
|
||||
*
|
||||
* This list may be added to or replaced by a specific processor project/module.
|
||||
*
|
||||
* Example: MIPS processor module dependency within a slaspec specified as:
|
||||
*
|
||||
* @include "$(BaseDir)$(MIPS)/data/language/maips.sinc
|
||||
*
|
||||
* with the corresponding MIPS definition specified within the sleighCompileOptions
|
||||
* list specified within the module's build.gradle file:
|
||||
*
|
||||
* sleighCompileOptions.add "-DMIPS=%%MIPS%%"
|
||||
* -or-
|
||||
* sleighCompileOptions = [
|
||||
* "-l",
|
||||
* "-DMIPS=%%MIPS%%"
|
||||
* ]
|
||||
*
|
||||
*****************************************************************************************/
|
||||
ext.sleighCompileOptions = [ ]
|
||||
|
||||
/*****************************************************************************************
|
||||
*
|
||||
* Check for invalid sleighCompileOptions
|
||||
*
|
||||
*****************************************************************************************/
|
||||
def checkSleighCompileOptions() {
|
||||
sleighCompileOptions.each { a ->
|
||||
def arg = a.trim()
|
||||
assert !(arg.startsWith("-a") || arg.startsWith("-i")) : "Invalid sleighCompileOption: ${arg}"
|
||||
}
|
||||
}
|
||||
|
||||
/*****************************************************************************************
|
||||
*
|
||||
* Task to write sleigh compiler args for use with sleigh compiler.
|
||||
* Due to the possible presence of module dependency paths two different sleighArgs.txt
|
||||
* files are produced: one for development layout (build/tmp/sleighArgs.txt) and
|
||||
* one for distribution layout ([build/]data/sleighArgs.txt). When invoking the
|
||||
* Sleigh compiler and using a sleighArgs.txt file the appropriate 'BaseDir' property
|
||||
* must be specified. Withing a distribution install 'BaseDir' must specifiy the
|
||||
* path to the install directory while in a development layout 'BaseDir' must specify
|
||||
* the repos root directory which contains the 'ghidra' repo directory.
|
||||
*
|
||||
*****************************************************************************************/
|
||||
task saveSleighArgs {
|
||||
def sleighArgsFile = file("build/data/sleighArgs.txt")
|
||||
outputs.files sleighArgsFile
|
||||
def sleighArgsDevFile = file("build/tmp/sleighArgs.txt")
|
||||
outputs.files sleighArgsFile, sleighArgsDevFile
|
||||
outputs.upToDateWhen { false }
|
||||
doLast {
|
||||
checkSleighCompileOptions()
|
||||
sleighArgsFile.withWriter { out->
|
||||
project.sleighCompile.args.each { a->
|
||||
// don't save -a option
|
||||
if (!"-a".equals(a)) {
|
||||
out.println a
|
||||
}
|
||||
sleighCompileOptions.each { a->
|
||||
out.println resolveSleighArg(a, false)
|
||||
}
|
||||
}
|
||||
sleighArgsDevFile.withWriter { out->
|
||||
sleighCompileOptions.each { a->
|
||||
out.println resolveSleighArg(a, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -50,6 +98,7 @@ rootProject.prepDev.dependsOn(saveSleighArgs)
|
|||
apply plugin: 'base'
|
||||
clean {
|
||||
delete file("build/data/sleighArgs.txt")
|
||||
delete file("build/tmp/sleighArgs.txt")
|
||||
}
|
||||
|
||||
/*****************************************************************************************
|
||||
|
@ -94,6 +143,7 @@ rootProject.assembleDistribution {
|
|||
*
|
||||
*****************************************************************************************/
|
||||
task sleighCompile (type: JavaExec) {
|
||||
dependsOn saveSleighArgs
|
||||
group = rootProject.GHIDRA_GROUP
|
||||
description " Compiles all the sleigh languages. [gradle/processorProject.gradle]\n"
|
||||
|
||||
|
@ -104,7 +154,12 @@ task sleighCompile (type: JavaExec) {
|
|||
// Delay adding the directory argument until the first part of the execution phase, so
|
||||
// that any extra args added by a project override will be added to the arg list before
|
||||
// these arguments.
|
||||
// NOTE: projects should no longer add arguments to this task and should instead
|
||||
// add such args to the sleighCompileOptions list.
|
||||
doFirst {
|
||||
args "-i"
|
||||
args "./build/tmp/sleighArgs.txt"
|
||||
args "-DBaseDir=${getProjectReposRootPath()}"
|
||||
args '-a'
|
||||
args './data/languages'
|
||||
}
|
||||
|
@ -115,11 +170,9 @@ task sleighCompile (type: JavaExec) {
|
|||
// The task that copies the common files to the distribution folder must depend on
|
||||
// the sleigh tasks before executing.
|
||||
rootProject.assembleDistribution.dependsOn sleighCompile
|
||||
rootProject.assembleDistribution.dependsOn saveSleighArgs
|
||||
|
||||
// Add in this projects sleighCompile to the allSleighCompile task
|
||||
rootProject.allSleighCompile.dependsOn sleighCompile
|
||||
rootProject.allSleighCompile.dependsOn saveSleighArgs
|
||||
|
||||
/*****************************************************************************************
|
||||
*
|
||||
|
@ -154,3 +207,55 @@ sleighCompile.outputs.files (taskOutputs)
|
|||
|
||||
// define the sleigh compile inputs to saveSleighArgs to limit task creation to language modules
|
||||
saveSleighArgs.inputs.files (taskInputs)
|
||||
|
||||
/*****************************************************************************************
|
||||
*
|
||||
* Gets the absolute repos root directory path with a trailing File separator.
|
||||
* This path may be used for specifying 'BaseDir' to the sleigh compiler within a
|
||||
* development layout.
|
||||
*
|
||||
*****************************************************************************************/
|
||||
def getProjectReposRootPath() {
|
||||
return rootProject.projectDir.getParent() + File.separator
|
||||
}
|
||||
|
||||
/*****************************************************************************************
|
||||
*
|
||||
* Filter a sleigh compiler argument replacing any project/module reference of the form
|
||||
* %%MODULE%% witha that MODULE's relative path. If useDevPath is true the path will
|
||||
* include the containing repo directory (e.g., ghidra/Ghidra/...), otherwise the
|
||||
* path should start at the application root 'Ghidra/'. Only a single replacement per
|
||||
* arg is supported.
|
||||
*
|
||||
* This mechanism relies on the relative depth of a language module project within a
|
||||
* repository directory hierarchy. In general language module projects must reside
|
||||
* within the directory Ghidra/Processors.
|
||||
*
|
||||
*****************************************************************************************/
|
||||
def resolveSleighArg(String arg, boolean useDevPath) {
|
||||
arg = arg.trim()
|
||||
int index = arg.indexOf("%%")
|
||||
if (index < 0) {
|
||||
return arg
|
||||
}
|
||||
String newArg = arg.substring(0, index)
|
||||
String tail = arg.substring(index+2)
|
||||
index = tail.indexOf("%%")
|
||||
assert index > 0 : "Badly formed sleigh path-replacment option: ${arg}"
|
||||
String moduleName = tail.substring(0, index)
|
||||
tail = tail.substring(index+2)
|
||||
def moduleProject = project(":${moduleName}")
|
||||
def modulePath
|
||||
if (useDevPath) {
|
||||
// first path element is the containing repo directory
|
||||
modulePath = moduleProject.projectDir.absolutePath
|
||||
modulePath = modulePath.substring(getProjectReposRootPath().length())
|
||||
}
|
||||
else {
|
||||
// first path element is the Ghidra directory
|
||||
modulePath = getZipPath(moduleProject)
|
||||
}
|
||||
newArg += modulePath
|
||||
newArg += tail
|
||||
return newArg
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue