mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-03 09:49:23 +02:00
Merge remote-tracking branch 'origin/patch'
This commit is contained in:
commit
ee25a7d0cc
34 changed files with 396 additions and 422 deletions
|
@ -22,6 +22,7 @@
|
||||||
package ghidra.app.plugin.core.misc;
|
package ghidra.app.plugin.core.misc;
|
||||||
|
|
||||||
import java.awt.Color;
|
import java.awt.Color;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
import javax.swing.ImageIcon;
|
import javax.swing.ImageIcon;
|
||||||
|
|
||||||
|
@ -43,8 +44,7 @@ import ghidra.framework.plugintool.util.PluginStatus;
|
||||||
import ghidra.program.model.address.*;
|
import ghidra.program.model.address.*;
|
||||||
import ghidra.program.model.listing.Program;
|
import ghidra.program.model.listing.Program;
|
||||||
import ghidra.program.model.listing.ProgramChangeSet;
|
import ghidra.program.model.listing.ProgramChangeSet;
|
||||||
import ghidra.util.HelpLocation;
|
import ghidra.util.*;
|
||||||
import ghidra.util.SystemUtilities;
|
|
||||||
import ghidra.util.exception.CancelledException;
|
import ghidra.util.exception.CancelledException;
|
||||||
import ghidra.util.task.*;
|
import ghidra.util.task.*;
|
||||||
import ghidra.util.worker.Job;
|
import ghidra.util.worker.Job;
|
||||||
|
@ -439,6 +439,10 @@ public class MyProgramChangesDisplayPlugin extends ProgramPlugin implements Doma
|
||||||
try {
|
try {
|
||||||
changes = (ProgramChangeSet) domainFile.getChangesByOthersSinceCheckout();
|
changes = (ProgramChangeSet) domainFile.getChangesByOthersSinceCheckout();
|
||||||
}
|
}
|
||||||
|
catch (IOException e) {
|
||||||
|
Msg.warn(this, "Unable to determine program change set: " + e.getMessage());
|
||||||
|
return;
|
||||||
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
ClientUtil.handleException(tool.getProject().getRepository(), e, "Get Change Set",
|
ClientUtil.handleException(tool.getProject().getRepository(), e, "Get Change Set",
|
||||||
false, tool.getToolFrame());
|
false, tool.getToolFrame());
|
||||||
|
|
|
@ -807,7 +807,7 @@ public class DBHandle {
|
||||||
* Load existing tables from database.
|
* Load existing tables from database.
|
||||||
* @throws IOException thrown if IO error occurs.
|
* @throws IOException thrown if IO error occurs.
|
||||||
*/
|
*/
|
||||||
private void loadTables() {
|
private void loadTables() throws IOException {
|
||||||
|
|
||||||
tables = new Hashtable<>();
|
tables = new Hashtable<>();
|
||||||
TableRecord[] tableRecords = masterTable.getTableRecords();
|
TableRecord[] tableRecords = masterTable.getTableRecords();
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
/* ###
|
/* ###
|
||||||
* IP: GHIDRA
|
* IP: GHIDRA
|
||||||
* REVIEWED: YES
|
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
@ -16,8 +15,6 @@
|
||||||
*/
|
*/
|
||||||
package db;
|
package db;
|
||||||
|
|
||||||
import ghidra.util.exception.AssertException;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -308,10 +305,11 @@ public abstract class Field implements Comparable<Field> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the field associated with the specified type value.
|
* Get the field associated with the specified type value.
|
||||||
* @param fieldType
|
* @param fieldType encoded Field type
|
||||||
* @return Field
|
* @return Field
|
||||||
|
* @throws UnsupportedFieldException if unsupported fieldType specified
|
||||||
*/
|
*/
|
||||||
static Field getField(byte fieldType) {
|
static Field getField(byte fieldType) throws UnsupportedFieldException {
|
||||||
if ((fieldType & INDEX_TYPE_FLAG) == 0) {
|
if ((fieldType & INDEX_TYPE_FLAG) == 0) {
|
||||||
switch (fieldType & BASE_TYPE_MASK) {
|
switch (fieldType & BASE_TYPE_MASK) {
|
||||||
case LONG_TYPE:
|
case LONG_TYPE:
|
||||||
|
@ -333,7 +331,13 @@ public abstract class Field implements Comparable<Field> {
|
||||||
else {
|
else {
|
||||||
return IndexField.getIndexField(fieldType);
|
return IndexField.getIndexField(fieldType);
|
||||||
}
|
}
|
||||||
throw new AssertException();
|
throw new UnsupportedFieldException(fieldType);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class UnsupportedFieldException extends IOException {
|
||||||
|
UnsupportedFieldException(byte fieldType) {
|
||||||
|
super("Unsupported DB field type: 0x" + Integer.toHexString(fieldType & 0xff));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,10 +31,9 @@ public class FieldIndexTable extends IndexTable {
|
||||||
/**
|
/**
|
||||||
* Construct a new secondary index which is based upon a specific field within the
|
* Construct a new secondary index which is based upon a specific field within the
|
||||||
* primary table specified by name.
|
* primary table specified by name.
|
||||||
* @param db database handle
|
|
||||||
* @param primaryTable primary table.
|
* @param primaryTable primary table.
|
||||||
* @param colIndex identifies the indexed column within the primary table.
|
* @param colIndex identifies the indexed column within the primary table.
|
||||||
* @throws IOException
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
FieldIndexTable(Table primaryTable, int colIndex) throws IOException {
|
FieldIndexTable(Table primaryTable, int colIndex) throws IOException {
|
||||||
this(primaryTable, primaryTable.getDBHandle().getMasterTable().createTableRecord(
|
this(primaryTable, primaryTable.getDBHandle().getMasterTable().createTableRecord(
|
||||||
|
@ -44,11 +43,11 @@ public class FieldIndexTable extends IndexTable {
|
||||||
/**
|
/**
|
||||||
* Construct a new or existing secondary index. An existing index must have
|
* Construct a new or existing secondary index. An existing index must have
|
||||||
* its' root ID specified within the tableRecord.
|
* its' root ID specified within the tableRecord.
|
||||||
* @param db database handle
|
* @param primaryTable primary table.
|
||||||
* @param bufferMgr database buffer manager
|
|
||||||
* @param indexTableRecord specifies the index parameters.
|
* @param indexTableRecord specifies the index parameters.
|
||||||
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
FieldIndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
FieldIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||||
super(primaryTable, indexTableRecord);
|
super(primaryTable, indexTableRecord);
|
||||||
this.indexSchema = indexTable.getSchema();
|
this.indexSchema = indexTable.getSchema();
|
||||||
this.indexColumn = indexTableRecord.getIndexedColumn();
|
this.indexColumn = indexTableRecord.getIndexedColumn();
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
/* ###
|
/* ###
|
||||||
* IP: GHIDRA
|
* IP: GHIDRA
|
||||||
* REVIEWED: YES
|
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
@ -28,45 +27,42 @@ import java.util.NoSuchElementException;
|
||||||
*/
|
*/
|
||||||
class FixedIndexTable extends IndexTable {
|
class FixedIndexTable extends IndexTable {
|
||||||
|
|
||||||
private static final Class<?>[] fieldClasses = {
|
private static final Class<?>[] fieldClasses = { BinaryField.class, // index data
|
||||||
BinaryField.class, // index data
|
|
||||||
};
|
};
|
||||||
|
|
||||||
private static final String[] fieldNames = {
|
private static final String[] fieldNames = { "IndexBuffer" };
|
||||||
"IndexBuffer"
|
|
||||||
};
|
|
||||||
|
|
||||||
private static Schema indexSchema = new Schema(0, "IndexKey", fieldClasses, fieldNames);
|
private static Schema indexSchema = new Schema(0, "IndexKey", fieldClasses, fieldNames);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a new secondary index which is based upon a field within the
|
* Construct a new secondary index which is based upon a field within the
|
||||||
* primary table specified by name.
|
* primary table specified by name.
|
||||||
* @param db database handle
|
|
||||||
* @param primaryTable primary table.
|
* @param primaryTable primary table.
|
||||||
* @param colIndex identifies the indexed column within the primary table.
|
* @param colIndex identifies the indexed column within the primary table.
|
||||||
* @throws IOException
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
FixedIndexTable(Table primaryTable, int colIndex) throws IOException {
|
FixedIndexTable(Table primaryTable, int colIndex) throws IOException {
|
||||||
this(primaryTable,
|
this(primaryTable, primaryTable.getDBHandle().getMasterTable().createTableRecord(
|
||||||
primaryTable.getDBHandle().getMasterTable().createTableRecord(primaryTable.getName(), indexSchema, colIndex));
|
primaryTable.getName(), indexSchema, colIndex));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a new or existing secondary index. An existing index must have
|
* Construct a new or existing secondary index. An existing index must have
|
||||||
* its' root ID specified within the tableRecord.
|
* its' root ID specified within the tableRecord.
|
||||||
* @param db database handle
|
* @param primaryTable primary table.
|
||||||
* @param bufferMgr database buffer manager
|
|
||||||
* @param indexTableRecord specifies the index parameters.
|
* @param indexTableRecord specifies the index parameters.
|
||||||
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
FixedIndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
FixedIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||||
super(primaryTable, indexTableRecord);
|
super(primaryTable, indexTableRecord);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find all primary keys which correspond to the specified indexed field
|
* Find all primary keys which correspond to the specified indexed field
|
||||||
* value.
|
* value.
|
||||||
* @param field the field value to search for.
|
* @param indexValue the field value to search for.
|
||||||
* @return list of primary keys
|
* @return list of primary keys
|
||||||
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
long[] findPrimaryKeys(Field indexValue) throws IOException {
|
long[] findPrimaryKeys(Field indexValue) throws IOException {
|
||||||
|
@ -82,7 +78,7 @@ class FixedIndexTable extends IndexTable {
|
||||||
/**
|
/**
|
||||||
* Get the number of primary keys which correspond to the specified indexed field
|
* Get the number of primary keys which correspond to the specified indexed field
|
||||||
* value.
|
* value.
|
||||||
* @param field the field value to search for.
|
* @param indexValue the field value to search for.
|
||||||
* @return key count
|
* @return key count
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
|
@ -158,7 +154,8 @@ class FixedIndexTable extends IndexTable {
|
||||||
* @see ghidra.framework.store.db.IndexTable#indexIterator(ghidra.framework.store.db.Field, ghidra.framework.store.db.Field, boolean)
|
* @see ghidra.framework.store.db.IndexTable#indexIterator(ghidra.framework.store.db.Field, ghidra.framework.store.db.Field, boolean)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
DBFieldIterator indexIterator(Field minField, Field maxField, boolean atMin) throws IOException {
|
DBFieldIterator indexIterator(Field minField, Field maxField, boolean atMin)
|
||||||
|
throws IOException {
|
||||||
long min = minField != null ? minField.getLongValue() : Long.MIN_VALUE;
|
long min = minField != null ? minField.getLongValue() : Long.MIN_VALUE;
|
||||||
long max = maxField != null ? maxField.getLongValue() : Long.MAX_VALUE;
|
long max = maxField != null ? maxField.getLongValue() : Long.MAX_VALUE;
|
||||||
return new IndexLongIterator(min, max, atMin);
|
return new IndexLongIterator(min, max, atMin);
|
||||||
|
@ -168,7 +165,8 @@ class FixedIndexTable extends IndexTable {
|
||||||
* @see db.IndexTable#indexIterator(db.Field, db.Field, db.Field, boolean)
|
* @see db.IndexTable#indexIterator(db.Field, db.Field, db.Field, boolean)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
DBFieldIterator indexIterator(Field minField, Field maxField, Field startField, boolean before) throws IOException {
|
DBFieldIterator indexIterator(Field minField, Field maxField, Field startField, boolean before)
|
||||||
|
throws IOException {
|
||||||
if (startField == null) {
|
if (startField == null) {
|
||||||
throw new IllegalArgumentException("starting index value required");
|
throw new IllegalArgumentException("starting index value required");
|
||||||
}
|
}
|
||||||
|
@ -223,7 +221,8 @@ class FixedIndexTable extends IndexTable {
|
||||||
* @param longValue
|
* @param longValue
|
||||||
* @param before
|
* @param before
|
||||||
*/
|
*/
|
||||||
public IndexLongIterator(long minValue, long maxValue, long start, boolean before) throws IOException {
|
public IndexLongIterator(long minValue, long maxValue, long start, boolean before)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
indexIterator = indexTable.longKeyIterator(minValue, maxValue, start);
|
indexIterator = indexTable.longKeyIterator(minValue, maxValue, start);
|
||||||
|
|
||||||
|
@ -235,6 +234,7 @@ class FixedIndexTable extends IndexTable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public boolean hasNext() throws IOException {
|
public boolean hasNext() throws IOException {
|
||||||
if (hasNext)
|
if (hasNext)
|
||||||
return true;
|
return true;
|
||||||
|
@ -244,12 +244,14 @@ class FixedIndexTable extends IndexTable {
|
||||||
keyField.setLongValue(key);
|
keyField.setLongValue(key);
|
||||||
hasNext = true;
|
hasNext = true;
|
||||||
hasPrev = false;
|
hasPrev = false;
|
||||||
} catch (NoSuchElementException e) {
|
}
|
||||||
|
catch (NoSuchElementException e) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public boolean hasPrevious() throws IOException {
|
public boolean hasPrevious() throws IOException {
|
||||||
if (hasPrev)
|
if (hasPrev)
|
||||||
return true;
|
return true;
|
||||||
|
@ -259,12 +261,14 @@ class FixedIndexTable extends IndexTable {
|
||||||
keyField.setLongValue(key);
|
keyField.setLongValue(key);
|
||||||
hasNext = false;
|
hasNext = false;
|
||||||
hasPrev = true;
|
hasPrev = true;
|
||||||
} catch (NoSuchElementException e) {
|
}
|
||||||
|
catch (NoSuchElementException e) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Field next() throws IOException {
|
public Field next() throws IOException {
|
||||||
if (hasNext || hasNext()) {
|
if (hasNext || hasNext()) {
|
||||||
hasNext = false;
|
hasNext = false;
|
||||||
|
@ -275,6 +279,7 @@ class FixedIndexTable extends IndexTable {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Field previous() throws IOException {
|
public Field previous() throws IOException {
|
||||||
if (hasPrev || hasPrevious()) {
|
if (hasPrev || hasPrevious()) {
|
||||||
hasNext = true;
|
hasNext = true;
|
||||||
|
@ -290,6 +295,7 @@ class FixedIndexTable extends IndexTable {
|
||||||
* index value (lastKey).
|
* index value (lastKey).
|
||||||
* @see db.DBFieldIterator#delete()
|
* @see db.DBFieldIterator#delete()
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public boolean delete() throws IOException {
|
public boolean delete() throws IOException {
|
||||||
if (lastKey == null)
|
if (lastKey == null)
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -68,7 +68,7 @@ abstract class IndexTable {
|
||||||
* @param indexTableRecord specifies the index parameters.
|
* @param indexTableRecord specifies the index parameters.
|
||||||
* @throws IOException thrown if IO error occurs
|
* @throws IOException thrown if IO error occurs
|
||||||
*/
|
*/
|
||||||
IndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
IndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||||
if (!primaryTable.useLongKeys())
|
if (!primaryTable.useLongKeys())
|
||||||
throw new AssertException("Only long-key tables may be indexed");
|
throw new AssertException("Only long-key tables may be indexed");
|
||||||
this.db = primaryTable.getDBHandle();
|
this.db = primaryTable.getDBHandle();
|
||||||
|
@ -87,7 +87,7 @@ abstract class IndexTable {
|
||||||
* @return IndexTable index table
|
* @return IndexTable index table
|
||||||
* @throws IOException thrown if IO error occurs
|
* @throws IOException thrown if IO error occurs
|
||||||
*/
|
*/
|
||||||
static IndexTable getIndexTable(DBHandle db, TableRecord indexTableRecord) {
|
static IndexTable getIndexTable(DBHandle db, TableRecord indexTableRecord) throws IOException {
|
||||||
String name = indexTableRecord.getName();
|
String name = indexTableRecord.getName();
|
||||||
Table primaryTable = db.getTable(name);
|
Table primaryTable = db.getTable(name);
|
||||||
if (primaryTable == null)
|
if (primaryTable == null)
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
/* ###
|
/* ###
|
||||||
* IP: GHIDRA
|
* IP: GHIDRA
|
||||||
* REVIEWED: YES
|
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
@ -16,11 +15,12 @@
|
||||||
*/
|
*/
|
||||||
package db;
|
package db;
|
||||||
|
|
||||||
import ghidra.util.exception.AssertException;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
|
import db.Field.UnsupportedFieldException;
|
||||||
|
import ghidra.util.exception.AssertException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for definining the columns in a Ghidra Database table.
|
* Class for definining the columns in a Ghidra Database table.
|
||||||
*/
|
*/
|
||||||
|
@ -93,8 +93,10 @@ public class Schema {
|
||||||
* @param fieldTypes
|
* @param fieldTypes
|
||||||
* @param packedFieldNames packed list of field names separated by ';'.
|
* @param packedFieldNames packed list of field names separated by ';'.
|
||||||
* The first field name corresponds to the key name.
|
* The first field name corresponds to the key name.
|
||||||
|
* @throws UnsupportedFieldException if unsupported fieldType specified
|
||||||
*/
|
*/
|
||||||
Schema(int version, byte keyFieldType, byte[] fieldTypes, String packedFieldNames) {
|
Schema(int version, byte keyFieldType, byte[] fieldTypes, String packedFieldNames)
|
||||||
|
throws UnsupportedFieldException {
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.keyType = Field.getField(keyFieldType);
|
this.keyType = Field.getField(keyFieldType);
|
||||||
parseNames(packedFieldNames);
|
parseNames(packedFieldNames);
|
||||||
|
@ -257,8 +259,8 @@ public class Schema {
|
||||||
*/
|
*/
|
||||||
public Record createRecord(Field key) {
|
public Record createRecord(Field key) {
|
||||||
if (!getKeyFieldClass().equals(key.getClass())) {
|
if (!getKeyFieldClass().equals(key.getClass())) {
|
||||||
throw new IllegalArgumentException("expected key field type of " +
|
throw new IllegalArgumentException(
|
||||||
keyType.getClass().getSimpleName());
|
"expected key field type of " + keyType.getClass().getSimpleName());
|
||||||
}
|
}
|
||||||
Field[] fieldValues = new Field[fieldClasses.length];
|
Field[] fieldValues = new Field[fieldClasses.length];
|
||||||
for (int i = 0; i < fieldClasses.length; i++) {
|
for (int i = 0; i < fieldClasses.length; i++) {
|
||||||
|
|
|
@ -18,6 +18,7 @@ package db;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
|
import db.Field.UnsupportedFieldException;
|
||||||
import ghidra.util.Msg;
|
import ghidra.util.Msg;
|
||||||
import ghidra.util.datastruct.IntObjectHashtable;
|
import ghidra.util.datastruct.IntObjectHashtable;
|
||||||
import ghidra.util.exception.*;
|
import ghidra.util.exception.*;
|
||||||
|
@ -56,8 +57,9 @@ public class Table {
|
||||||
* Construct a new or existing Table.
|
* Construct a new or existing Table.
|
||||||
* @param db database handle
|
* @param db database handle
|
||||||
* @param tableRecord master table record for this table.
|
* @param tableRecord master table record for this table.
|
||||||
|
* @throws UnsupportedFieldException if unsupported schema field encountered
|
||||||
*/
|
*/
|
||||||
Table(DBHandle db, TableRecord tableRecord) {
|
Table(DBHandle db, TableRecord tableRecord) throws UnsupportedFieldException {
|
||||||
this.db = db;
|
this.db = db;
|
||||||
this.tableRecord = tableRecord;
|
this.tableRecord = tableRecord;
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
/* ###
|
/* ###
|
||||||
* IP: GHIDRA
|
* IP: GHIDRA
|
||||||
* REVIEWED: YES
|
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
@ -16,6 +15,8 @@
|
||||||
*/
|
*/
|
||||||
package db;
|
package db;
|
||||||
|
|
||||||
|
import db.Field.UnsupportedFieldException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <code>TableRecord</code> manages information about a table. Each TableRecord
|
* <code>TableRecord</code> manages information about a table. Each TableRecord
|
||||||
* corresponds to a stored record within the master table.
|
* corresponds to a stored record within the master table.
|
||||||
|
@ -32,8 +33,7 @@ class TableRecord implements Comparable<TableRecord> {
|
||||||
private static final int MAX_KEY_COLUMN = 7;
|
private static final int MAX_KEY_COLUMN = 7;
|
||||||
private static final int RECORD_COUNT_COLUMN = 8;
|
private static final int RECORD_COUNT_COLUMN = 8;
|
||||||
|
|
||||||
private static Class<?>[] fieldClasses = {
|
private static Class<?>[] fieldClasses = { StringField.class, // name of table
|
||||||
StringField.class, // name of table
|
|
||||||
IntField.class, // Schema version
|
IntField.class, // Schema version
|
||||||
IntField.class, // Root buffer ID (first buffer)
|
IntField.class, // Root buffer ID (first buffer)
|
||||||
ByteField.class, // Key field type
|
ByteField.class, // Key field type
|
||||||
|
@ -44,17 +44,8 @@ class TableRecord implements Comparable<TableRecord> {
|
||||||
IntField.class // number of records
|
IntField.class // number of records
|
||||||
};
|
};
|
||||||
|
|
||||||
private static String[] tableRecordFieldNames = {
|
private static String[] tableRecordFieldNames = { "TableName", "SchemaVersion", "RootBufferId",
|
||||||
"TableName",
|
"KeyType", "FieldTypes", "FieldNames", "IndexColumn", "MaxKey", "RecordCount" };
|
||||||
"SchemaVersion",
|
|
||||||
"RootBufferId",
|
|
||||||
"KeyType",
|
|
||||||
"FieldTypes",
|
|
||||||
"FieldNames",
|
|
||||||
"IndexColumn",
|
|
||||||
"MaxKey",
|
|
||||||
"RecordCount"
|
|
||||||
};
|
|
||||||
|
|
||||||
private static Schema schema = new Schema(0, "TableNum", fieldClasses, tableRecordFieldNames);
|
private static Schema schema = new Schema(0, "TableNum", fieldClasses, tableRecordFieldNames);
|
||||||
|
|
||||||
|
@ -158,12 +149,11 @@ class TableRecord implements Comparable<TableRecord> {
|
||||||
/**
|
/**
|
||||||
* Get the table schema
|
* Get the table schema
|
||||||
* @return table schema
|
* @return table schema
|
||||||
|
* @throws UnsupportedFieldException if unsupported schema field encountered
|
||||||
*/
|
*/
|
||||||
Schema getSchema() {
|
Schema getSchema() throws UnsupportedFieldException {
|
||||||
return new Schema(record.getIntValue(VERSION_COLUMN),
|
return new Schema(record.getIntValue(VERSION_COLUMN), record.getByteValue(KEY_TYPE_COLUMN),
|
||||||
record.getByteValue(KEY_TYPE_COLUMN),
|
record.getBinaryData(FIELD_TYPES_COLUMN), record.getString(FIELD_NAMES_COLUMN));
|
||||||
record.getBinaryData(FIELD_TYPES_COLUMN),
|
|
||||||
record.getString(FIELD_NAMES_COLUMN));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -239,6 +229,7 @@ class TableRecord implements Comparable<TableRecord> {
|
||||||
* key of another table record (obj).
|
* key of another table record (obj).
|
||||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public int compareTo(TableRecord otherRecord) {
|
public int compareTo(TableRecord otherRecord) {
|
||||||
long myKey = record.getKey();
|
long myKey = record.getKey();
|
||||||
long otherKey = otherRecord.record.getKey();
|
long otherKey = otherRecord.record.getKey();
|
||||||
|
|
|
@ -36,10 +36,9 @@ class VarIndexTable extends IndexTable {
|
||||||
/**
|
/**
|
||||||
* Construct a new secondary index which is based upon a field within the
|
* Construct a new secondary index which is based upon a field within the
|
||||||
* primary table specified by name.
|
* primary table specified by name.
|
||||||
* @param db database handle
|
|
||||||
* @param primaryTable primary table.
|
* @param primaryTable primary table.
|
||||||
* @param colIndex identifies the indexed column within the primary table.
|
* @param colIndex identifies the indexed column within the primary table.
|
||||||
* @throws IOException
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
VarIndexTable(Table primaryTable, int colIndex) throws IOException {
|
VarIndexTable(Table primaryTable, int colIndex) throws IOException {
|
||||||
this(primaryTable,
|
this(primaryTable,
|
||||||
|
@ -52,11 +51,11 @@ class VarIndexTable extends IndexTable {
|
||||||
/**
|
/**
|
||||||
* Construct a new or existing secondary index. An existing index must have
|
* Construct a new or existing secondary index. An existing index must have
|
||||||
* its' root ID specified within the tableRecord.
|
* its' root ID specified within the tableRecord.
|
||||||
* @param db database handle
|
* @param primaryTable primary table.
|
||||||
* @param bufferMgr database buffer manager
|
|
||||||
* @param indexTableRecord specifies the index parameters.
|
* @param indexTableRecord specifies the index parameters.
|
||||||
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
VarIndexTable(Table primaryTable, TableRecord indexTableRecord) {
|
VarIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||||
super(primaryTable, indexTableRecord);
|
super(primaryTable, indexTableRecord);
|
||||||
this.indexSchema = indexTable.getSchema();
|
this.indexSchema = indexTable.getSchema();
|
||||||
}
|
}
|
||||||
|
@ -64,8 +63,9 @@ class VarIndexTable extends IndexTable {
|
||||||
/**
|
/**
|
||||||
* Find all primary keys which correspond to the specified indexed field
|
* Find all primary keys which correspond to the specified indexed field
|
||||||
* value.
|
* value.
|
||||||
* @param field the field value to search for.
|
* @param indexValue the field value to search for.
|
||||||
* @return list of primary keys
|
* @return list of primary keys
|
||||||
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
long[] findPrimaryKeys(Field indexValue) throws IOException {
|
long[] findPrimaryKeys(Field indexValue) throws IOException {
|
||||||
|
@ -81,8 +81,9 @@ class VarIndexTable extends IndexTable {
|
||||||
/**
|
/**
|
||||||
* Get the number of primary keys which correspond to the specified indexed field
|
* Get the number of primary keys which correspond to the specified indexed field
|
||||||
* value.
|
* value.
|
||||||
* @param field the field value to search for.
|
* @param indexValue the field value to search for.
|
||||||
* @return key count
|
* @return key count
|
||||||
|
* @throws IOException thrown if an IO error occurs
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
int getKeyCount(Field indexValue) throws IOException {
|
int getKeyCount(Field indexValue) throws IOException {
|
||||||
|
|
|
@ -47,8 +47,9 @@ class VarKeyInteriorNode extends VarKeyNode {
|
||||||
* Construct an existing variable-length-key interior node.
|
* Construct an existing variable-length-key interior node.
|
||||||
* @param nodeMgr table node manager instance
|
* @param nodeMgr table node manager instance
|
||||||
* @param buf node buffer
|
* @param buf node buffer
|
||||||
|
* @throws IOException if IO error occurs
|
||||||
*/
|
*/
|
||||||
VarKeyInteriorNode(NodeMgr nodeMgr, DataBuffer buf) {
|
VarKeyInteriorNode(NodeMgr nodeMgr, DataBuffer buf) throws IOException {
|
||||||
super(nodeMgr, buf);
|
super(nodeMgr, buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,8 +46,9 @@ abstract class VarKeyNode implements BTreeNode {
|
||||||
* Construct an existing variable-length-key node.
|
* Construct an existing variable-length-key node.
|
||||||
* @param nodeMgr table node manager instance
|
* @param nodeMgr table node manager instance
|
||||||
* @param buf node buffer
|
* @param buf node buffer
|
||||||
|
* @throws IOException if IO error occurs
|
||||||
*/
|
*/
|
||||||
VarKeyNode(NodeMgr nodeMgr, DataBuffer buf) {
|
VarKeyNode(NodeMgr nodeMgr, DataBuffer buf) throws IOException {
|
||||||
this.nodeMgr = nodeMgr;
|
this.nodeMgr = nodeMgr;
|
||||||
this.buffer = buf;
|
this.buffer = buf;
|
||||||
keyType = Field.getField(buf.getByte(KEY_TYPE_OFFSET));
|
keyType = Field.getField(buf.getByte(KEY_TYPE_OFFSET));
|
||||||
|
|
|
@ -55,8 +55,9 @@ class VarKeyRecordNode extends VarKeyNode {
|
||||||
* Construct an existing variable-length-key record leaf node.
|
* Construct an existing variable-length-key record leaf node.
|
||||||
* @param nodeMgr table node manager instance
|
* @param nodeMgr table node manager instance
|
||||||
* @param buf node buffer
|
* @param buf node buffer
|
||||||
|
* @throws IOException thrown if IO error occurs
|
||||||
*/
|
*/
|
||||||
VarKeyRecordNode(NodeMgr nodeMgr, DataBuffer buf) {
|
VarKeyRecordNode(NodeMgr nodeMgr, DataBuffer buf) throws IOException {
|
||||||
super(nodeMgr, buf);
|
super(nodeMgr, buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -61,8 +61,8 @@ public interface ContentHandler extends ExtensionPoint {
|
||||||
* @throws CancelledException if the user cancels
|
* @throws CancelledException if the user cancels
|
||||||
*/
|
*/
|
||||||
long createFile(FileSystem fs, FileSystem userfs, String path, String name,
|
long createFile(FileSystem fs, FileSystem userfs, String path, String name,
|
||||||
DomainObject domainObject, TaskMonitor monitor) throws IOException,
|
DomainObject domainObject, TaskMonitor monitor)
|
||||||
InvalidNameException, CancelledException;
|
throws IOException, InvalidNameException, CancelledException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a folder item for immutable use. If any changes are attempted on the
|
* Open a folder item for immutable use. If any changes are attempted on the
|
||||||
|
@ -83,8 +83,8 @@ public interface ContentHandler extends ExtensionPoint {
|
||||||
* difference which could not be handled.
|
* difference which could not be handled.
|
||||||
*/
|
*/
|
||||||
DomainObjectAdapter getImmutableObject(FolderItem item, Object consumer, int version,
|
DomainObjectAdapter getImmutableObject(FolderItem item, Object consumer, int version,
|
||||||
int minChangeVersion, TaskMonitor monitor) throws IOException, CancelledException,
|
int minChangeVersion, TaskMonitor monitor)
|
||||||
VersionException;
|
throws IOException, CancelledException, VersionException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a folder item for read-only use. While changes are permitted on the
|
* Open a folder item for read-only use. While changes are permitted on the
|
||||||
|
@ -104,8 +104,8 @@ public interface ContentHandler extends ExtensionPoint {
|
||||||
* difference which could not be handled.
|
* difference which could not be handled.
|
||||||
*/
|
*/
|
||||||
DomainObjectAdapter getReadOnlyObject(FolderItem item, int version, boolean okToUpgrade,
|
DomainObjectAdapter getReadOnlyObject(FolderItem item, int version, boolean okToUpgrade,
|
||||||
Object consumer, TaskMonitor monitor) throws IOException, VersionException,
|
Object consumer, TaskMonitor monitor)
|
||||||
CancelledException;
|
throws IOException, VersionException, CancelledException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open a folder item for update. Changes made to the returned object may be
|
* Open a folder item for update. Changes made to the returned object may be
|
||||||
|
@ -138,7 +138,8 @@ public interface ContentHandler extends ExtensionPoint {
|
||||||
* @param newerVersion the newer version number
|
* @param newerVersion the newer version number
|
||||||
* @return the set of changes that were made
|
* @return the set of changes that were made
|
||||||
* @throws VersionException if a database version change prevents reading of data.
|
* @throws VersionException if a database version change prevents reading of data.
|
||||||
* @throws IOException if a folder item access error occurs
|
* @throws IOException if a folder item access error occurs or change set was
|
||||||
|
* produced by newer version of software and can not be read
|
||||||
*/
|
*/
|
||||||
ChangeSet getChangeSet(FolderItem versionedFolderItem, int olderVersion, int newerVersion)
|
ChangeSet getChangeSet(FolderItem versionedFolderItem, int olderVersion, int newerVersion)
|
||||||
throws VersionException, IOException;
|
throws VersionException, IOException;
|
||||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Map;
|
||||||
import javax.swing.Icon;
|
import javax.swing.Icon;
|
||||||
|
|
||||||
import db.DBHandle;
|
import db.DBHandle;
|
||||||
|
import db.Field;
|
||||||
import db.buffers.*;
|
import db.buffers.*;
|
||||||
import ghidra.framework.client.ClientUtil;
|
import ghidra.framework.client.ClientUtil;
|
||||||
import ghidra.framework.client.NotConnectedException;
|
import ghidra.framework.client.NotConnectedException;
|
||||||
|
@ -1748,6 +1749,9 @@ public class GhidraFileData {
|
||||||
catch (FileNotFoundException e) {
|
catch (FileNotFoundException e) {
|
||||||
// file has been deleted, just return an empty map.
|
// file has been deleted, just return an empty map.
|
||||||
}
|
}
|
||||||
|
catch (Field.UnsupportedFieldException e) {
|
||||||
|
// file created with newer version of Ghidra
|
||||||
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
Msg.error(this, "Read meta-data error", e);
|
Msg.error(this, "Read meta-data error", e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,7 +107,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
||||||
* Returns changes made to versioned file by others since checkout was performed.
|
* Returns changes made to versioned file by others since checkout was performed.
|
||||||
* @return change set or null
|
* @return change set or null
|
||||||
* @throws VersionException latest version was created with a newer version of software
|
* @throws VersionException latest version was created with a newer version of software
|
||||||
* @throws IOException
|
* @throws IOException if a folder item access error occurs or change set was
|
||||||
|
* produced by newer version of software and can not be read
|
||||||
*/
|
*/
|
||||||
public ChangeSet getChangesByOthersSinceCheckout() throws VersionException, IOException;
|
public ChangeSet getChangesByOthersSinceCheckout() throws VersionException, IOException;
|
||||||
|
|
||||||
|
@ -352,8 +353,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
||||||
* @throws IOException thrown if an IO or access error occurs.
|
* @throws IOException thrown if an IO or access error occurs.
|
||||||
* @throws CancelledException if task monitor cancelled operation.
|
* @throws CancelledException if task monitor cancelled operation.
|
||||||
*/
|
*/
|
||||||
public boolean checkout(boolean exclusive, TaskMonitor monitor) throws IOException,
|
public boolean checkout(boolean exclusive, TaskMonitor monitor)
|
||||||
CancelledException;
|
throws IOException, CancelledException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Performs check in to associated repository. File must be checked-out
|
* Performs check in to associated repository. File must be checked-out
|
||||||
|
@ -379,8 +380,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
||||||
* If okToUpgrade was false, check exception to see if it can be upgraded
|
* If okToUpgrade was false, check exception to see if it can be upgraded
|
||||||
* @throws CancelledException if task monitor cancelled operation
|
* @throws CancelledException if task monitor cancelled operation
|
||||||
*/
|
*/
|
||||||
public void merge(boolean okToUpgrade, TaskMonitor monitor) throws IOException,
|
public void merge(boolean okToUpgrade, TaskMonitor monitor)
|
||||||
VersionException, CancelledException;
|
throws IOException, VersionException, CancelledException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Undo "checked-out" file. The original repository file is restored.
|
* Undo "checked-out" file. The original repository file is restored.
|
||||||
|
@ -456,8 +457,8 @@ public interface DomainFile extends Comparable<DomainFile> {
|
||||||
* @throws IOException thrown if an IO or access error occurs.
|
* @throws IOException thrown if an IO or access error occurs.
|
||||||
* @throws CancelledException if task monitor cancelled operation.
|
* @throws CancelledException if task monitor cancelled operation.
|
||||||
*/
|
*/
|
||||||
DomainFile copyTo(DomainFolder newParent, TaskMonitor monitor) throws IOException,
|
DomainFile copyTo(DomainFolder newParent, TaskMonitor monitor)
|
||||||
CancelledException;
|
throws IOException, CancelledException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copy a specific version of this file to the specified destFolder.
|
* Copy a specific version of this file to the specified destFolder.
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
<listAttribute key="org.eclipse.jdt.launching.MODULEPATH">
|
<listAttribute key="org.eclipse.jdt.launching.MODULEPATH">
|
||||||
<listEntry value="<?xml version="1.0" encoding="UTF-8" standalone="no"?> <runtimeClasspathEntry containerPath="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11" path="4" type="4"/> "/>
|
<listEntry value="<?xml version="1.0" encoding="UTF-8" standalone="no"?> <runtimeClasspathEntry containerPath="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11" path="4" type="4"/> "/>
|
||||||
</listAttribute>
|
</listAttribute>
|
||||||
<stringAttribute key="org.eclipse.jdt.launching.PROGRAM_ARGUMENTS" value="ghidra.pcodeCPort.slgh_compile.SleighCompileLauncher -i "${project_loc}/build/data/sleighArgs.txt" -a "${project_loc}/data/languages""/>
|
<stringAttribute key="org.eclipse.jdt.launching.PROGRAM_ARGUMENTS" value="ghidra.pcodeCPort.slgh_compile.SleighCompileLauncher -DBaseDir="${project_loc}/../../../../" -i "${project_loc}/build/tmp/sleighArgs.txt" -a "${project_loc}/data/languages""/>
|
||||||
<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="Framework Utility"/>
|
<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="Framework Utility"/>
|
||||||
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Djava.awt.headless=true -Xmx1048M -XX:+IgnoreUnrecognizedVMOptions -Djava.system.class.loader=ghidra.GhidraClassLoader -Dfile.encoding=UTF-8 -Duser.country=US -Duser.language=en -Duser.variant -Xdock:name="Sleigh" -Dvisualvm.display.name=Sleigh"/>
|
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Djava.awt.headless=true -Xmx1048M -XX:+IgnoreUnrecognizedVMOptions -Djava.system.class.loader=ghidra.GhidraClassLoader -Dfile.encoding=UTF-8 -Duser.country=US -Duser.language=en -Duser.variant -Xdock:name="Sleigh" -Dvisualvm.display.name=Sleigh"/>
|
||||||
</launchConfiguration>
|
</launchConfiguration>
|
||||||
|
|
|
@ -422,7 +422,7 @@ public class SleighLanguage implements Language {
|
||||||
String languageName = specName + ".slaspec";
|
String languageName = specName + ".slaspec";
|
||||||
ResourceFile languageFile = new ResourceFile(slaFile.getParentFile(), languageName);
|
ResourceFile languageFile = new ResourceFile(slaFile.getParentFile(), languageName);
|
||||||
|
|
||||||
// see gradleScripts/processorUtils.gradle for sleighArgs.txt generation
|
// see gradle/processorUtils.gradle for sleighArgs.txt generation
|
||||||
ResourceFile sleighArgsFile = null;
|
ResourceFile sleighArgsFile = null;
|
||||||
ResourceFile languageModule = Application.getModuleContainingResourceFile(languageFile);
|
ResourceFile languageModule = Application.getModuleContainingResourceFile(languageFile);
|
||||||
if (languageModule != null) {
|
if (languageModule != null) {
|
||||||
|
@ -430,21 +430,23 @@ public class SleighLanguage implements Language {
|
||||||
sleighArgsFile = new ResourceFile(languageModule, "data/sleighArgs.txt");
|
sleighArgsFile = new ResourceFile(languageModule, "data/sleighArgs.txt");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
sleighArgsFile = new ResourceFile(languageModule, "build/data/sleighArgs.txt");
|
sleighArgsFile = new ResourceFile(languageModule, "build/tmp/sleighArgs.txt");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, String> defineMap;
|
|
||||||
String[] args;
|
String[] args;
|
||||||
if (sleighArgsFile != null && sleighArgsFile.isFile()) {
|
if (sleighArgsFile != null && sleighArgsFile.isFile()) {
|
||||||
args = new String[] { "-i", sleighArgsFile.getAbsolutePath(),
|
String baseDir = Application.getInstallationDirectory().getAbsolutePath().replace(
|
||||||
|
File.separatorChar, '/');
|
||||||
|
if (!baseDir.endsWith("/")) {
|
||||||
|
baseDir += "/";
|
||||||
|
}
|
||||||
|
args = new String[] { "-DBaseDir=" + baseDir, "-i", sleighArgsFile.getAbsolutePath(),
|
||||||
languageFile.getAbsolutePath(), description.getSlaFile().getAbsolutePath() };
|
languageFile.getAbsolutePath(), description.getSlaFile().getAbsolutePath() };
|
||||||
defineMap = new HashMap<>();
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
args = new String[] { languageFile.getAbsolutePath(),
|
args = new String[] { languageFile.getAbsolutePath(),
|
||||||
description.getSlaFile().getAbsolutePath() };
|
description.getSlaFile().getAbsolutePath() };
|
||||||
defineMap = ModuleDefinitionsMap.getModuleMap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -454,7 +456,7 @@ public class SleighLanguage implements Language {
|
||||||
buf.append(" ");
|
buf.append(" ");
|
||||||
}
|
}
|
||||||
Msg.debug(this, "Sleigh compile: " + buf);
|
Msg.debug(this, "Sleigh compile: " + buf);
|
||||||
int returnCode = SleighCompileLauncher.runMain(args, defineMap);
|
int returnCode = SleighCompileLauncher.runMain(args);
|
||||||
if (returnCode != 0) {
|
if (returnCode != 0) {
|
||||||
throw new SleighException("Errors compiling " + languageFile.getAbsolutePath() +
|
throw new SleighException("Errors compiling " + languageFile.getAbsolutePath() +
|
||||||
" -- please check log messages for details");
|
" -- please check log messages for details");
|
||||||
|
|
|
@ -1706,6 +1706,6 @@ public class SleighCompile extends SleighBase {
|
||||||
* @throws RecognitionException for parsing errors
|
* @throws RecognitionException for parsing errors
|
||||||
*/
|
*/
|
||||||
public static void main(String[] args) throws JDOMException, IOException, RecognitionException {
|
public static void main(String[] args) throws JDOMException, IOException, RecognitionException {
|
||||||
System.exit(SleighCompileLauncher.runMain(args, new HashMap<String, String>()));
|
System.exit(SleighCompileLauncher.runMain(args));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,24 +71,24 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
||||||
ApplicationConfiguration configuration = new ApplicationConfiguration();
|
ApplicationConfiguration configuration = new ApplicationConfiguration();
|
||||||
Application.initializeApplication(layout, configuration);
|
Application.initializeApplication(layout, configuration);
|
||||||
|
|
||||||
System.exit(runMain(args, new HashMap<String, String>()));
|
System.exit(runMain(args));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute the Sleigh compiler process
|
* Execute the Sleigh compiler process
|
||||||
*
|
*
|
||||||
* @param args sleigh compiler command line arguments
|
* @param args sleigh compiler command line arguments
|
||||||
* @param preprocs additional preprocessor macro
|
|
||||||
* @return exit code (TODO: exit codes are not well defined)
|
* @return exit code (TODO: exit codes are not well defined)
|
||||||
* @throws JDOMException
|
* @throws JDOMException
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws RecognitionException
|
* @throws RecognitionException
|
||||||
*/
|
*/
|
||||||
public static int runMain(String[] args, Map<String, String> preprocs)
|
public static int runMain(String[] args)
|
||||||
throws JDOMException, IOException, RecognitionException {
|
throws JDOMException, IOException, RecognitionException {
|
||||||
int retval;
|
int retval;
|
||||||
String filein = null;
|
String filein = null;
|
||||||
String fileout = null;
|
String fileout = null;
|
||||||
|
Map<String, String> preprocs = new HashMap<>();
|
||||||
|
|
||||||
SleighCompile.yydebug = false;
|
SleighCompile.yydebug = false;
|
||||||
boolean allMode = false;
|
boolean allMode = false;
|
||||||
|
@ -218,8 +218,8 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
||||||
System.out.println("Compiling " + input + ":");
|
System.out.println("Compiling " + input + ":");
|
||||||
SleighCompile compiler = new SleighCompile();
|
SleighCompile compiler = new SleighCompile();
|
||||||
initCompiler(compiler, preprocs, unnecessaryPcodeWarning, lenientConflict,
|
initCompiler(compiler, preprocs, unnecessaryPcodeWarning, lenientConflict,
|
||||||
allCollisionWarning,
|
allCollisionWarning, allNopWarning, deadTempWarning, unusedFieldWarning,
|
||||||
allNopWarning, deadTempWarning, unusedFieldWarning, enforceLocalKeyWord);
|
enforceLocalKeyWord);
|
||||||
|
|
||||||
String outname = input.getName().replace(".slaspec", ".sla");
|
String outname = input.getName().replace(".slaspec", ".sla");
|
||||||
File output = new File(input.getParent(), outname);
|
File output = new File(input.getParent(), outname);
|
||||||
|
@ -247,8 +247,8 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
||||||
// single file compile
|
// single file compile
|
||||||
SleighCompile compiler = new SleighCompile();
|
SleighCompile compiler = new SleighCompile();
|
||||||
initCompiler(compiler, preprocs, unnecessaryPcodeWarning, lenientConflict,
|
initCompiler(compiler, preprocs, unnecessaryPcodeWarning, lenientConflict,
|
||||||
allCollisionWarning, allNopWarning,
|
allCollisionWarning, allNopWarning, deadTempWarning, unusedFieldWarning,
|
||||||
deadTempWarning, unusedFieldWarning, enforceLocalKeyWord);
|
enforceLocalKeyWord);
|
||||||
if (i == args.length) {
|
if (i == args.length) {
|
||||||
Msg.error(SleighCompile.class, "Missing input file name");
|
Msg.error(SleighCompile.class, "Missing input file name");
|
||||||
return 1;
|
return 1;
|
||||||
|
@ -418,6 +418,7 @@ public class SleighCompileLauncher implements GhidraLaunchable {
|
||||||
return 4;
|
return 4;
|
||||||
}
|
}
|
||||||
catch (PreprocessorException e) {
|
catch (PreprocessorException e) {
|
||||||
|
Msg.error(SleighCompile.class, e.getMessage());
|
||||||
Msg.error(SleighCompile.class, "Errors during preprocessing, halting compilation");
|
Msg.error(SleighCompile.class, "Errors during preprocessing, halting compilation");
|
||||||
return 5;
|
return 5;
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,9 +74,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
addedSourceArchiveIds = new HashSet<Long>();
|
addedSourceArchiveIds = new HashSet<Long>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#dataTypeChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void dataTypeChanged(long id) {
|
public synchronized void dataTypeChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -88,9 +85,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#dataTypeAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void dataTypeAdded(long id) {
|
public synchronized void dataTypeAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -99,25 +93,16 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
tmpAddedDataTypeIds.add(new Long(id));
|
tmpAddedDataTypeIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getDataTypeChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getDataTypeChanges() {
|
public synchronized long[] getDataTypeChanges() {
|
||||||
return getLongs(changedDataTypeIds);
|
return getLongs(changedDataTypeIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getDataTypeAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getDataTypeAdditions() {
|
public synchronized long[] getDataTypeAdditions() {
|
||||||
return getLongs(addedDataTypeIds);
|
return getLongs(addedDataTypeIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#categoryChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void categoryChanged(long id) {
|
public synchronized void categoryChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -129,9 +114,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#categoryAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void categoryAdded(long id) {
|
public synchronized void categoryAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -140,25 +122,16 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
tmpAddedCategoryIds.add(new Long(id));
|
tmpAddedCategoryIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getCategoryChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getCategoryChanges() {
|
public synchronized long[] getCategoryChanges() {
|
||||||
return getLongs(changedCategoryIds);
|
return getLongs(changedCategoryIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getCategoryAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getCategoryAdditions() {
|
public synchronized long[] getCategoryAdditions() {
|
||||||
return getLongs(addedCategoryIds);
|
return getLongs(addedCategoryIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public void sourceArchiveAdded(long id) {
|
public void sourceArchiveAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -167,9 +140,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
tmpAddedSourceArchiveIds.add(new Long(id));
|
tmpAddedSourceArchiveIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public void sourceArchiveChanged(long id) {
|
public void sourceArchiveChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -181,17 +151,11 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public long[] getSourceArchiveAdditions() {
|
public long[] getSourceArchiveAdditions() {
|
||||||
return getLongs(addedSourceArchiveIds);
|
return getLongs(addedSourceArchiveIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public long[] getSourceArchiveChanges() {
|
public long[] getSourceArchiveChanges() {
|
||||||
return getLongs(changedSourceArchiveIds);
|
return getLongs(changedSourceArchiveIds);
|
||||||
|
@ -213,9 +177,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
clearUndo();
|
clearUndo();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#startTransaction()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void startTransaction() {
|
public synchronized void startTransaction() {
|
||||||
redoList.clear();
|
redoList.clear();
|
||||||
|
@ -229,9 +190,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
tmpAddedSourceArchiveIds = new HashSet<Long>();
|
tmpAddedSourceArchiveIds = new HashSet<Long>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#endTransaction(boolean)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void endTransaction(boolean commit) {
|
public synchronized void endTransaction(boolean commit) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -268,9 +226,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#undo()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void undo() {
|
public synchronized void undo() {
|
||||||
MyChangeDiff diff = undoList.removeLast();
|
MyChangeDiff diff = undoList.removeLast();
|
||||||
|
@ -283,9 +238,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
redoList.addLast(diff);
|
redoList.addLast(diff);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#redo()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void redo() {
|
public synchronized void redo() {
|
||||||
MyChangeDiff diff = redoList.removeLast();
|
MyChangeDiff diff = redoList.removeLast();
|
||||||
|
@ -298,26 +250,17 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
undoList.addLast(diff);
|
undoList.addLast(diff);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.model.ChangeSet#clearUndo()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void clearUndo() {
|
public synchronized void clearUndo() {
|
||||||
undoList.clear();
|
undoList.clear();
|
||||||
redoList.clear();
|
redoList.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#setMaxUndos(int)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void setMaxUndos(int numUndos) {
|
public synchronized void setMaxUndos(int numUndos) {
|
||||||
this.numUndos = numUndos;
|
this.numUndos = numUndos;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.model.ChangeSet#read(ghidra.framework.store.db.DBHandle)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void read(DBHandle dbh) throws IOException {
|
public synchronized void read(DBHandle dbh) throws IOException {
|
||||||
|
|
||||||
|
@ -343,6 +286,9 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
private void readIdRecords(DBHandle dbh, String tableName, Set<Long> ids) throws IOException {
|
private void readIdRecords(DBHandle dbh, String tableName, Set<Long> ids) throws IOException {
|
||||||
Table table = dbh.getTable(tableName);
|
Table table = dbh.getTable(tableName);
|
||||||
if (table != null) {
|
if (table != null) {
|
||||||
|
if (table.getSchema().getVersion() != 0) {
|
||||||
|
throw new IOException("Change data produced with newer version of Ghidra");
|
||||||
|
}
|
||||||
RecordIterator it = table.iterator();
|
RecordIterator it = table.iterator();
|
||||||
while (it.hasNext()) {
|
while (it.hasNext()) {
|
||||||
Record rec = it.next();
|
Record rec = it.next();
|
||||||
|
@ -351,9 +297,6 @@ class DataTypeArchiveDBChangeSet implements DataTypeArchiveChangeSet, DomainObje
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.model.ChangeSet#write(ghidra.framework.store.db.DBHandle)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void write(DBHandle dbh, boolean isRecoverySave) throws IOException {
|
public synchronized void write(DBHandle dbh, boolean isRecoverySave) throws IOException {
|
||||||
|
|
||||||
|
|
|
@ -20,8 +20,7 @@ import java.io.IOException;
|
||||||
import javax.swing.Icon;
|
import javax.swing.Icon;
|
||||||
import javax.swing.ImageIcon;
|
import javax.swing.ImageIcon;
|
||||||
|
|
||||||
import db.DBConstants;
|
import db.*;
|
||||||
import db.DBHandle;
|
|
||||||
import db.buffers.BufferFile;
|
import db.buffers.BufferFile;
|
||||||
import db.buffers.ManagedBufferFile;
|
import db.buffers.ManagedBufferFile;
|
||||||
import ghidra.framework.data.*;
|
import ghidra.framework.data.*;
|
||||||
|
@ -80,6 +79,9 @@ public class ProgramContentHandler extends DBContentHandler {
|
||||||
success = true;
|
success = true;
|
||||||
return program;
|
return program;
|
||||||
}
|
}
|
||||||
|
catch (Field.UnsupportedFieldException e) {
|
||||||
|
throw new VersionException(false);
|
||||||
|
}
|
||||||
catch (VersionException e) {
|
catch (VersionException e) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
@ -136,6 +138,9 @@ public class ProgramContentHandler extends DBContentHandler {
|
||||||
success = true;
|
success = true;
|
||||||
return program;
|
return program;
|
||||||
}
|
}
|
||||||
|
catch (Field.UnsupportedFieldException e) {
|
||||||
|
throw new VersionException(false);
|
||||||
|
}
|
||||||
catch (VersionException e) {
|
catch (VersionException e) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
@ -199,6 +204,9 @@ public class ProgramContentHandler extends DBContentHandler {
|
||||||
success = true;
|
success = true;
|
||||||
return program;
|
return program;
|
||||||
}
|
}
|
||||||
|
catch (Field.UnsupportedFieldException e) {
|
||||||
|
throw new VersionException(false);
|
||||||
|
}
|
||||||
catch (VersionException e) {
|
catch (VersionException e) {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,9 +126,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
addedTagIds = new HashSet<Long>();
|
addedTagIds = new HashSet<Long>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getAddressSet()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized AddressSetView getAddressSet() {
|
public synchronized AddressSetView getAddressSet() {
|
||||||
SynchronizedAddressSetCollection addressSetCollection =
|
SynchronizedAddressSetCollection addressSetCollection =
|
||||||
|
@ -149,9 +146,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#add(ghidra.program.model.address.AddressSetView)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void add(AddressSetView addrSet) {
|
public synchronized void add(AddressSetView addrSet) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -160,9 +154,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddrs.add(addrSet);
|
tmpAddrs.add(addrSet);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#addRange(ghidra.program.model.address.Address, ghidra.program.model.address.Address)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void addRange(Address addr1, Address addr2) {
|
public synchronized void addRange(Address addr1, Address addr2) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -173,9 +164,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#addRegisterRange(ghidra.program.model.address.Address, ghidra.program.model.address.Address)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void addRegisterRange(Address addr1, Address addr2) {
|
public synchronized void addRegisterRange(Address addr1, Address addr2) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -184,9 +172,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpRegAddrs.addRange(addr1, addr2);
|
tmpRegAddrs.addRange(addr1, addr2);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getRegisterAddressSet()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized AddressSetView getRegisterAddressSet() {
|
public synchronized AddressSetView getRegisterAddressSet() {
|
||||||
SynchronizedAddressSetCollection addressSetCollection =
|
SynchronizedAddressSetCollection addressSetCollection =
|
||||||
|
@ -195,9 +180,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
return addressSetCollection.getCombinedAddressSet();
|
return addressSetCollection.getCombinedAddressSet();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#dataTypeChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void dataTypeChanged(long id) {
|
public synchronized void dataTypeChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -209,9 +191,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#dataTypeAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void dataTypeAdded(long id) {
|
public synchronized void dataTypeAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -220,25 +199,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedDataTypeIds.add(new Long(id));
|
tmpAddedDataTypeIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getDataTypeChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getDataTypeChanges() {
|
public synchronized long[] getDataTypeChanges() {
|
||||||
return getLongs(changedDataTypeIds);
|
return getLongs(changedDataTypeIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getDataTypeAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getDataTypeAdditions() {
|
public synchronized long[] getDataTypeAdditions() {
|
||||||
return getLongs(addedDataTypeIds);
|
return getLongs(addedDataTypeIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#categoryChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void categoryChanged(long id) {
|
public synchronized void categoryChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -250,9 +220,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#categoryAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void categoryAdded(long id) {
|
public synchronized void categoryAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -261,25 +228,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedCategoryIds.add(new Long(id));
|
tmpAddedCategoryIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getCategoryChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getCategoryChanges() {
|
public synchronized long[] getCategoryChanges() {
|
||||||
return getLongs(changedCategoryIds);
|
return getLongs(changedCategoryIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getCategoryAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getCategoryAdditions() {
|
public synchronized long[] getCategoryAdditions() {
|
||||||
return getLongs(addedCategoryIds);
|
return getLongs(addedCategoryIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#programTreeChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void programTreeChanged(long id) {
|
public synchronized void programTreeChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -291,9 +249,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#programTreeAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void programTreeAdded(long id) {
|
public synchronized void programTreeAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -302,25 +257,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedProgramTreeIds.add(new Long(id));
|
tmpAddedProgramTreeIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getProgramTreeChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getProgramTreeChanges() {
|
public synchronized long[] getProgramTreeChanges() {
|
||||||
return getLongs(changedProgramTreeIds);
|
return getLongs(changedProgramTreeIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getProgramTreeAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getProgramTreeAdditions() {
|
public synchronized long[] getProgramTreeAdditions() {
|
||||||
return getLongs(addedProgramTreeIds);
|
return getLongs(addedProgramTreeIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#symbolChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void symbolChanged(long id) {
|
public synchronized void symbolChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -332,9 +278,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#symbolAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void symbolAdded(long id) {
|
public synchronized void symbolAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -343,25 +286,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedSymbolIds.add(new Long(id));
|
tmpAddedSymbolIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getSymbolChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getSymbolChanges() {
|
public synchronized long[] getSymbolChanges() {
|
||||||
return getLongs(changedSymbolIds);
|
return getLongs(changedSymbolIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getSymbolAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getSymbolAdditions() {
|
public synchronized long[] getSymbolAdditions() {
|
||||||
return getLongs(addedSymbolIds);
|
return getLongs(addedSymbolIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#tagChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void tagChanged(long id) {
|
public synchronized void tagChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -373,9 +307,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#tagCreated(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void tagCreated(long id) {
|
public synchronized void tagCreated(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -384,25 +315,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedTagIds.add(new Long(id));
|
tmpAddedTagIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getTagChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getTagChanges() {
|
public synchronized long[] getTagChanges() {
|
||||||
return getLongs(changedTagIds);
|
return getLongs(changedTagIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.program.model.listing.ProgramChangeSet#getTagCreations()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized long[] getTagCreations() {
|
public synchronized long[] getTagCreations() {
|
||||||
return getLongs(addedTagIds);
|
return getLongs(addedTagIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveAdded(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public void sourceArchiveAdded(long id) {
|
public void sourceArchiveAdded(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -411,9 +333,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedSourceArchiveIds.add(new Long(id));
|
tmpAddedSourceArchiveIds.add(new Long(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#archiveChanged(long)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public void sourceArchiveChanged(long id) {
|
public void sourceArchiveChanged(long id) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -425,25 +344,16 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveAdditions()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public long[] getSourceArchiveAdditions() {
|
public long[] getSourceArchiveAdditions() {
|
||||||
return getLongs(addedSourceArchiveIds);
|
return getLongs(addedSourceArchiveIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see ghidra.program.model.listing.DataTypeChangeSet#getArchiveChanges()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public long[] getSourceArchiveChanges() {
|
public long[] getSourceArchiveChanges() {
|
||||||
return getLongs(changedSourceArchiveIds);
|
return getLongs(changedSourceArchiveIds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#clear()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void clearUndo(boolean isCheckedOut) {
|
public synchronized void clearUndo(boolean isCheckedOut) {
|
||||||
if (inTransaction) {
|
if (inTransaction) {
|
||||||
|
@ -470,9 +380,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
clearUndo();
|
clearUndo();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#startTransaction()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void startTransaction() {
|
public synchronized void startTransaction() {
|
||||||
inTransaction = true;
|
inTransaction = true;
|
||||||
|
@ -493,9 +400,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedTagIds = new HashSet<Long>();
|
tmpAddedTagIds = new HashSet<Long>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#endTransaction(boolean)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void endTransaction(boolean commit) {
|
public synchronized void endTransaction(boolean commit) {
|
||||||
if (!inTransaction) {
|
if (!inTransaction) {
|
||||||
|
@ -556,9 +460,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
tmpAddedTagIds = null;
|
tmpAddedTagIds = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#undo()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void undo() {
|
public synchronized void undo() {
|
||||||
ChangeDiff diff = undoList.removeLast();
|
ChangeDiff diff = undoList.removeLast();
|
||||||
|
@ -579,9 +480,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
redoList.addLast(diff);
|
redoList.addLast(diff);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#redo()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void redo() {
|
public synchronized void redo() {
|
||||||
ChangeDiff diff = redoList.removeLast();
|
ChangeDiff diff = redoList.removeLast();
|
||||||
|
@ -602,26 +500,17 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
undoList.addLast(diff);
|
undoList.addLast(diff);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.model.ChangeSet#clearUndo()
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void clearUndo() {
|
public synchronized void clearUndo() {
|
||||||
undoList.clear();
|
undoList.clear();
|
||||||
redoList.clear();
|
redoList.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.data.DomainObjectDBChangeSet#setMaxUndos(int)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void setMaxUndos(int numUndos) {
|
public synchronized void setMaxUndos(int numUndos) {
|
||||||
this.numUndos = numUndos;
|
this.numUndos = numUndos;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.model.ChangeSet#read(ghidra.framework.store.db.DBHandle)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void read(DBHandle dbh) throws IOException {
|
public synchronized void read(DBHandle dbh) throws IOException {
|
||||||
|
|
||||||
|
@ -648,6 +537,9 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
private void readIdRecords(DBHandle dbh, String tableName, Set<Long> ids) throws IOException {
|
private void readIdRecords(DBHandle dbh, String tableName, Set<Long> ids) throws IOException {
|
||||||
Table table = dbh.getTable(tableName);
|
Table table = dbh.getTable(tableName);
|
||||||
if (table != null) {
|
if (table != null) {
|
||||||
|
if (table.getSchema().getVersion() != 0) {
|
||||||
|
throw new IOException("Change data produced with newer version of Ghidra");
|
||||||
|
}
|
||||||
RecordIterator it = table.iterator();
|
RecordIterator it = table.iterator();
|
||||||
while (it.hasNext()) {
|
while (it.hasNext()) {
|
||||||
Record rec = it.next();
|
Record rec = it.next();
|
||||||
|
@ -660,6 +552,9 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Table table = dbh.getTable(tableName);
|
Table table = dbh.getTable(tableName);
|
||||||
if (table != null) {
|
if (table != null) {
|
||||||
|
if (table.getSchema().getVersion() != 0) {
|
||||||
|
throw new IOException("Change data produced with newer version of Ghidra");
|
||||||
|
}
|
||||||
RecordIterator it = table.iterator();
|
RecordIterator it = table.iterator();
|
||||||
while (it.hasNext()) {
|
while (it.hasNext()) {
|
||||||
Record rec = it.next();
|
Record rec = it.next();
|
||||||
|
@ -673,9 +568,6 @@ class ProgramDBChangeSet implements ProgramChangeSet, DomainObjectDBChangeSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see ghidra.framework.model.ChangeSet#write(ghidra.framework.store.db.DBHandle)
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public synchronized void write(DBHandle dbh, boolean isRecoverySave) throws IOException {
|
public synchronized void write(DBHandle dbh, boolean isRecoverySave) throws IOException {
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
*/
|
*/
|
||||||
package ghidra.pcodeCPort.slgh_compile.regression;
|
package ghidra.pcodeCPort.slgh_compile.regression;
|
||||||
|
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
@ -167,11 +167,9 @@ public class SleighCompileRegressionTest extends AbstractGenericTest {
|
||||||
|
|
||||||
private int runActualCompiler(File inputFile, File actualFile)
|
private int runActualCompiler(File inputFile, File actualFile)
|
||||||
throws JDOMException, IOException, RecognitionException {
|
throws JDOMException, IOException, RecognitionException {
|
||||||
return SleighCompileLauncher.runMain(
|
return SleighCompileLauncher.runMain(new String[] { "-DBaseDir=../../../../../../",
|
||||||
new String[] { "-DMIPS=../../../../../../ghidra/Ghidra/Processors/MIPS",
|
"-DMIPS=ghidra/Ghidra/Processors/MIPS", "-D8051=ghidra/Ghidra/Processors/8051",
|
||||||
"-D8051=../../../../../../ghidra/Ghidra/Processors/8051",
|
inputFile.getAbsolutePath(), actualFile.getAbsolutePath() });
|
||||||
inputFile.getAbsolutePath(), actualFile.getAbsolutePath() },
|
|
||||||
new HashMap<String, String>());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Pattern SPACEMATCH = Pattern.compile("^\\s*<print piece=\" \"/>\\s*$");
|
private static final Pattern SPACEMATCH = Pattern.compile("^\\s*<print piece=\" \"/>\\s*$");
|
||||||
|
|
|
@ -7,9 +7,9 @@ apply plugin: 'eclipse'
|
||||||
|
|
||||||
eclipse.project.name = 'Processors AARCH64'
|
eclipse.project.name = 'Processors AARCH64'
|
||||||
|
|
||||||
sleighCompile {
|
sleighCompileOptions = [
|
||||||
args '-l'
|
'-l'
|
||||||
}
|
]
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':Base')
|
compile project(':Base')
|
||||||
|
|
|
@ -10,6 +10,6 @@ dependencies {
|
||||||
compile project(':Base')
|
compile project(':Base')
|
||||||
}
|
}
|
||||||
|
|
||||||
sleighCompile {
|
sleighCompileOptions = [
|
||||||
// args '-l'
|
// '-l'
|
||||||
}
|
]
|
||||||
|
|
|
@ -7,9 +7,9 @@ apply plugin: 'eclipse'
|
||||||
eclipse.project.name = 'Processors Dalvik'
|
eclipse.project.name = 'Processors Dalvik'
|
||||||
|
|
||||||
|
|
||||||
sleighCompile {
|
sleighCompileOptions = [
|
||||||
args '-l'
|
'-l'
|
||||||
}
|
]
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':Base')
|
compile project(':Base')
|
||||||
|
|
|
@ -3,6 +3,6 @@ apply from: "$rootProject.projectDir/gradle/processorProject.gradle"
|
||||||
apply plugin: 'eclipse'
|
apply plugin: 'eclipse'
|
||||||
eclipse.project.name = 'Processors HCS08'
|
eclipse.project.name = 'Processors HCS08'
|
||||||
|
|
||||||
sleighCompile {
|
sleighCompileOptions = [
|
||||||
args '-l'
|
'-l'
|
||||||
}
|
]
|
|
@ -5,9 +5,9 @@ apply plugin: 'eclipse'
|
||||||
|
|
||||||
eclipse.project.name = 'Processors HCS12'
|
eclipse.project.name = 'Processors HCS12'
|
||||||
|
|
||||||
sleighCompile {
|
sleighCompileOptions = [
|
||||||
args '-l'
|
'-l'
|
||||||
}
|
]
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile project(':Base')
|
compile project(':Base')
|
||||||
|
|
|
@ -3,7 +3,6 @@ apply from: "$rootProject.projectDir/gradle/processorProject.gradle"
|
||||||
apply plugin: 'eclipse'
|
apply plugin: 'eclipse'
|
||||||
eclipse.project.name = 'Processors MCS96'
|
eclipse.project.name = 'Processors MCS96'
|
||||||
|
|
||||||
|
sleighCompileOptions = [
|
||||||
sleighCompile {
|
'-l'
|
||||||
args '-l'
|
]
|
||||||
}
|
|
|
@ -10,6 +10,6 @@ dependencies {
|
||||||
compile project(':Base')
|
compile project(':Base')
|
||||||
}
|
}
|
||||||
|
|
||||||
sleighCompile {
|
sleighCompileOptions = [
|
||||||
args '-l'
|
'-l'
|
||||||
}
|
]
|
||||||
|
|
|
@ -1,14 +1,17 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
+ Compile sleigh languages within this source language module.
|
+ Compile sleigh languages within this source language module via Eclipse or
|
||||||
+ Sleigh compiler options are read from build/data/sleighArgs.txt which is
|
+ a command shell.
|
||||||
+ updated by "gradle prepdev" based upon specification within module's build.gradle.
|
|
||||||
+
|
+
|
||||||
+ Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
+ * Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
||||||
+ From command line (requires ant install)
|
+
|
||||||
|
+ * From command line (requires ant install)
|
||||||
+ - cd to data directory containing this file
|
+ - cd to data directory containing this file
|
||||||
+ - run ant
|
+ - run ant
|
||||||
|
+
|
||||||
|
+ Sleigh compiler options are read from build/data/sleighArgs.txt which is
|
||||||
|
+ updated by "gradle prepdev" based upon specification within module's build.gradle.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<project name="privateBuildDeveloper" default="sleigh-compile">
|
<project name="privateBuildDeveloper" default="sleigh-compile">
|
||||||
|
@ -46,8 +49,9 @@
|
||||||
fork="true"
|
fork="true"
|
||||||
failonerror="true">
|
failonerror="true">
|
||||||
<jvmarg value="-Xmx2048M"/>
|
<jvmarg value="-Xmx2048M"/>
|
||||||
|
<arg value="-DBaseDir=../../../../../../" /> <!-- repos root directory -->
|
||||||
<arg value="-i"/>
|
<arg value="-i"/>
|
||||||
<arg value="../build/data/sleighArgs.txt"/>
|
<arg value="../build/tmp/sleighArgs.txt"/>
|
||||||
<arg value="-a"/>
|
<arg value="-a"/>
|
||||||
<arg value="./languages"/>
|
<arg value="./languages"/>
|
||||||
</java>
|
</java>
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
+ Compile sleigh languages within this distribution language module.
|
+ Compile sleigh languages within this distribution language module via Eclipse or
|
||||||
+ Sleigh compiler options are read from the sleighArgs.txt file.
|
+ a command shell.
|
||||||
+
|
+
|
||||||
+ Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
+ * Eclipse: right-click on this file and choose menu item "Run As->Ant Build"
|
||||||
+ From command line (requires ant install)
|
+
|
||||||
|
+ * From command line (requires ant install)
|
||||||
+ - cd to data directory containing this file
|
+ - cd to data directory containing this file
|
||||||
+ - run ant
|
+ - run ant
|
||||||
|
+
|
||||||
|
+ Sleigh compiler options are read from sleighArgs.txt.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<project name="privateBuildDistribution" default="sleigh-compile">
|
<project name="privateBuildDistribution" default="sleigh-compile">
|
||||||
|
@ -39,6 +42,7 @@
|
||||||
fork="true"
|
fork="true"
|
||||||
failonerror="true">
|
failonerror="true">
|
||||||
<jvmarg value="-Xmx2048M"/>
|
<jvmarg value="-Xmx2048M"/>
|
||||||
|
<arg value="-DBaseDir=../../../../../" /> <!-- Ghidra install directory -->
|
||||||
<arg value="-i"/>
|
<arg value="-i"/>
|
||||||
<arg value="sleighArgs.txt"/>
|
<arg value="sleighArgs.txt"/>
|
||||||
<arg value="-a"/>
|
<arg value="-a"/>
|
||||||
|
|
|
@ -7,8 +7,6 @@
|
||||||
apply from: "$rootProject.projectDir/gradle/nativeProject.gradle"
|
apply from: "$rootProject.projectDir/gradle/nativeProject.gradle"
|
||||||
*****************************************************************************************/
|
*****************************************************************************************/
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/*****************************************************************************************
|
/*****************************************************************************************
|
||||||
*
|
*
|
||||||
* Create a configuration so the a dependency can be declared on the the software modeling
|
* Create a configuration so the a dependency can be declared on the the software modeling
|
||||||
|
@ -26,22 +24,72 @@ dependencies {
|
||||||
|
|
||||||
/*****************************************************************************************
|
/*****************************************************************************************
|
||||||
*
|
*
|
||||||
* Task to write sleigh compiler args to build/data/sleighArgs.txt for use with sleigh
|
* Sleigh compile options to be written to sleighArgs.txt in support of the following
|
||||||
* external sleigh compiler.
|
* use cases:
|
||||||
|
* - Ant build using data/build.xml (development and distribution)
|
||||||
|
* - Eclipse Sleigh launcher (development only)
|
||||||
|
* - Ghidra runtime language rebuild (SleighLanguage.reloadLanguage; development and distribution)
|
||||||
|
* - Distribution build (sleighCompile task; development layout)
|
||||||
|
*
|
||||||
|
* This list may be added to or replaced by a specific processor project/module.
|
||||||
|
*
|
||||||
|
* Example: MIPS processor module dependency within a slaspec specified as:
|
||||||
|
*
|
||||||
|
* @include "$(BaseDir)$(MIPS)/data/language/maips.sinc
|
||||||
|
*
|
||||||
|
* with the corresponding MIPS definition specified within the sleighCompileOptions
|
||||||
|
* list specified within the module's build.gradle file:
|
||||||
|
*
|
||||||
|
* sleighCompileOptions.add "-DMIPS=%%MIPS%%"
|
||||||
|
* -or-
|
||||||
|
* sleighCompileOptions = [
|
||||||
|
* "-l",
|
||||||
|
* "-DMIPS=%%MIPS%%"
|
||||||
|
* ]
|
||||||
|
*
|
||||||
|
*****************************************************************************************/
|
||||||
|
ext.sleighCompileOptions = [ ]
|
||||||
|
|
||||||
|
/*****************************************************************************************
|
||||||
|
*
|
||||||
|
* Check for invalid sleighCompileOptions
|
||||||
|
*
|
||||||
|
*****************************************************************************************/
|
||||||
|
def checkSleighCompileOptions() {
|
||||||
|
sleighCompileOptions.each { a ->
|
||||||
|
def arg = a.trim()
|
||||||
|
assert !(arg.startsWith("-a") || arg.startsWith("-i")) : "Invalid sleighCompileOption: ${arg}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*****************************************************************************************
|
||||||
|
*
|
||||||
|
* Task to write sleigh compiler args for use with sleigh compiler.
|
||||||
|
* Due to the possible presence of module dependency paths two different sleighArgs.txt
|
||||||
|
* files are produced: one for development layout (build/tmp/sleighArgs.txt) and
|
||||||
|
* one for distribution layout ([build/]data/sleighArgs.txt). When invoking the
|
||||||
|
* Sleigh compiler and using a sleighArgs.txt file the appropriate 'BaseDir' property
|
||||||
|
* must be specified. Withing a distribution install 'BaseDir' must specifiy the
|
||||||
|
* path to the install directory while in a development layout 'BaseDir' must specify
|
||||||
|
* the repos root directory which contains the 'ghidra' repo directory.
|
||||||
*
|
*
|
||||||
*****************************************************************************************/
|
*****************************************************************************************/
|
||||||
task saveSleighArgs {
|
task saveSleighArgs {
|
||||||
def sleighArgsFile = file("build/data/sleighArgs.txt")
|
def sleighArgsFile = file("build/data/sleighArgs.txt")
|
||||||
outputs.files sleighArgsFile
|
def sleighArgsDevFile = file("build/tmp/sleighArgs.txt")
|
||||||
|
outputs.files sleighArgsFile, sleighArgsDevFile
|
||||||
outputs.upToDateWhen { false }
|
outputs.upToDateWhen { false }
|
||||||
doLast {
|
doLast {
|
||||||
|
checkSleighCompileOptions()
|
||||||
sleighArgsFile.withWriter { out->
|
sleighArgsFile.withWriter { out->
|
||||||
project.sleighCompile.args.each { a->
|
sleighCompileOptions.each { a->
|
||||||
// don't save -a option
|
out.println resolveSleighArg(a, false)
|
||||||
if (!"-a".equals(a)) {
|
|
||||||
out.println a
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
sleighArgsDevFile.withWriter { out->
|
||||||
|
sleighCompileOptions.each { a->
|
||||||
|
out.println resolveSleighArg(a, true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -50,6 +98,7 @@ rootProject.prepDev.dependsOn(saveSleighArgs)
|
||||||
apply plugin: 'base'
|
apply plugin: 'base'
|
||||||
clean {
|
clean {
|
||||||
delete file("build/data/sleighArgs.txt")
|
delete file("build/data/sleighArgs.txt")
|
||||||
|
delete file("build/tmp/sleighArgs.txt")
|
||||||
}
|
}
|
||||||
|
|
||||||
/*****************************************************************************************
|
/*****************************************************************************************
|
||||||
|
@ -94,6 +143,7 @@ rootProject.assembleDistribution {
|
||||||
*
|
*
|
||||||
*****************************************************************************************/
|
*****************************************************************************************/
|
||||||
task sleighCompile (type: JavaExec) {
|
task sleighCompile (type: JavaExec) {
|
||||||
|
dependsOn saveSleighArgs
|
||||||
group = rootProject.GHIDRA_GROUP
|
group = rootProject.GHIDRA_GROUP
|
||||||
description " Compiles all the sleigh languages. [gradle/processorProject.gradle]\n"
|
description " Compiles all the sleigh languages. [gradle/processorProject.gradle]\n"
|
||||||
|
|
||||||
|
@ -104,7 +154,12 @@ task sleighCompile (type: JavaExec) {
|
||||||
// Delay adding the directory argument until the first part of the execution phase, so
|
// Delay adding the directory argument until the first part of the execution phase, so
|
||||||
// that any extra args added by a project override will be added to the arg list before
|
// that any extra args added by a project override will be added to the arg list before
|
||||||
// these arguments.
|
// these arguments.
|
||||||
|
// NOTE: projects should no longer add arguments to this task and should instead
|
||||||
|
// add such args to the sleighCompileOptions list.
|
||||||
doFirst {
|
doFirst {
|
||||||
|
args "-i"
|
||||||
|
args "./build/tmp/sleighArgs.txt"
|
||||||
|
args "-DBaseDir=${getProjectReposRootPath()}"
|
||||||
args '-a'
|
args '-a'
|
||||||
args './data/languages'
|
args './data/languages'
|
||||||
}
|
}
|
||||||
|
@ -115,11 +170,9 @@ task sleighCompile (type: JavaExec) {
|
||||||
// The task that copies the common files to the distribution folder must depend on
|
// The task that copies the common files to the distribution folder must depend on
|
||||||
// the sleigh tasks before executing.
|
// the sleigh tasks before executing.
|
||||||
rootProject.assembleDistribution.dependsOn sleighCompile
|
rootProject.assembleDistribution.dependsOn sleighCompile
|
||||||
rootProject.assembleDistribution.dependsOn saveSleighArgs
|
|
||||||
|
|
||||||
// Add in this projects sleighCompile to the allSleighCompile task
|
// Add in this projects sleighCompile to the allSleighCompile task
|
||||||
rootProject.allSleighCompile.dependsOn sleighCompile
|
rootProject.allSleighCompile.dependsOn sleighCompile
|
||||||
rootProject.allSleighCompile.dependsOn saveSleighArgs
|
|
||||||
|
|
||||||
/*****************************************************************************************
|
/*****************************************************************************************
|
||||||
*
|
*
|
||||||
|
@ -154,3 +207,55 @@ sleighCompile.outputs.files (taskOutputs)
|
||||||
|
|
||||||
// define the sleigh compile inputs to saveSleighArgs to limit task creation to language modules
|
// define the sleigh compile inputs to saveSleighArgs to limit task creation to language modules
|
||||||
saveSleighArgs.inputs.files (taskInputs)
|
saveSleighArgs.inputs.files (taskInputs)
|
||||||
|
|
||||||
|
/*****************************************************************************************
|
||||||
|
*
|
||||||
|
* Gets the absolute repos root directory path with a trailing File separator.
|
||||||
|
* This path may be used for specifying 'BaseDir' to the sleigh compiler within a
|
||||||
|
* development layout.
|
||||||
|
*
|
||||||
|
*****************************************************************************************/
|
||||||
|
def getProjectReposRootPath() {
|
||||||
|
return rootProject.projectDir.getParent() + File.separator
|
||||||
|
}
|
||||||
|
|
||||||
|
/*****************************************************************************************
|
||||||
|
*
|
||||||
|
* Filter a sleigh compiler argument replacing any project/module reference of the form
|
||||||
|
* %%MODULE%% witha that MODULE's relative path. If useDevPath is true the path will
|
||||||
|
* include the containing repo directory (e.g., ghidra/Ghidra/...), otherwise the
|
||||||
|
* path should start at the application root 'Ghidra/'. Only a single replacement per
|
||||||
|
* arg is supported.
|
||||||
|
*
|
||||||
|
* This mechanism relies on the relative depth of a language module project within a
|
||||||
|
* repository directory hierarchy. In general language module projects must reside
|
||||||
|
* within the directory Ghidra/Processors.
|
||||||
|
*
|
||||||
|
*****************************************************************************************/
|
||||||
|
def resolveSleighArg(String arg, boolean useDevPath) {
|
||||||
|
arg = arg.trim()
|
||||||
|
int index = arg.indexOf("%%")
|
||||||
|
if (index < 0) {
|
||||||
|
return arg
|
||||||
|
}
|
||||||
|
String newArg = arg.substring(0, index)
|
||||||
|
String tail = arg.substring(index+2)
|
||||||
|
index = tail.indexOf("%%")
|
||||||
|
assert index > 0 : "Badly formed sleigh path-replacment option: ${arg}"
|
||||||
|
String moduleName = tail.substring(0, index)
|
||||||
|
tail = tail.substring(index+2)
|
||||||
|
def moduleProject = project(":${moduleName}")
|
||||||
|
def modulePath
|
||||||
|
if (useDevPath) {
|
||||||
|
// first path element is the containing repo directory
|
||||||
|
modulePath = moduleProject.projectDir.absolutePath
|
||||||
|
modulePath = modulePath.substring(getProjectReposRootPath().length())
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// first path element is the Ghidra directory
|
||||||
|
modulePath = getZipPath(moduleProject)
|
||||||
|
}
|
||||||
|
newArg += modulePath
|
||||||
|
newArg += tail
|
||||||
|
return newArg
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue