mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-04 18:29:37 +02:00
GT-3294 Added support for sparse DB schemas. No need to version
ProgramUserData which does not utilize index tables
This commit is contained in:
parent
fcb3151f94
commit
6783ae669f
34 changed files with 1306 additions and 379 deletions
|
@ -111,7 +111,7 @@ public class AddressIndexPrimaryKeyIteratorTest extends AbstractGhidraHeadedInte
|
|||
new AddressIndexPrimaryKeyIterator(myTable, 0, addrMap, true);
|
||||
long key = 0;
|
||||
while (iter.hasNext()) {
|
||||
assertEquals(key++, iter.next());
|
||||
assertEquals(key++, iter.next().getLongValue());
|
||||
}
|
||||
assertEquals(0x40, key);
|
||||
}
|
||||
|
@ -124,14 +124,14 @@ public class AddressIndexPrimaryKeyIteratorTest extends AbstractGhidraHeadedInte
|
|||
new AddressIndexPrimaryKeyIterator(myTable, 0, addrMap, minAddr, maxAddr, true);
|
||||
long key = 18;
|
||||
while (iter.hasNext()) {
|
||||
assertEquals(key++, iter.next());
|
||||
assertEquals(key++, iter.next().getLongValue());
|
||||
}
|
||||
assertEquals(37, key);
|
||||
|
||||
iter = new AddressIndexPrimaryKeyIterator(myTable, 0, addrMap, minAddr, maxAddr, false);
|
||||
key = 36;
|
||||
while (iter.hasPrevious()) {
|
||||
assertEquals(key--, iter.previous());
|
||||
assertEquals(key--, iter.previous().getLongValue());
|
||||
}
|
||||
assertEquals(17, key);
|
||||
}
|
||||
|
@ -143,14 +143,14 @@ public class AddressIndexPrimaryKeyIteratorTest extends AbstractGhidraHeadedInte
|
|||
new AddressIndexPrimaryKeyIterator(myTable, 0, addrMap, a, true);
|
||||
long key = 18;
|
||||
while (iter.hasNext()) {
|
||||
assertEquals(key++, iter.next());
|
||||
assertEquals(key++, iter.next().getLongValue());
|
||||
}
|
||||
assertEquals(0x40, key);
|
||||
|
||||
iter = new AddressIndexPrimaryKeyIterator(myTable, 0, addrMap, a, false);
|
||||
key = 18;
|
||||
while (iter.hasPrevious()) {
|
||||
assertEquals(key--, iter.previous());
|
||||
assertEquals(key--, iter.previous().getLongValue());
|
||||
}
|
||||
assertEquals(-1, key);
|
||||
}
|
||||
|
@ -161,25 +161,25 @@ public class AddressIndexPrimaryKeyIteratorTest extends AbstractGhidraHeadedInte
|
|||
set.addRange(addr(0x3002), addr(0x3004));
|
||||
AddressIndexPrimaryKeyIterator iter =
|
||||
new AddressIndexPrimaryKeyIterator(myTable, 0, addrMap, set, true);
|
||||
assertEquals(2, iter.next());
|
||||
assertEquals(3, iter.next());
|
||||
assertEquals(4, iter.next());
|
||||
assertEquals(2, iter.next().getLongValue());
|
||||
assertEquals(3, iter.next().getLongValue());
|
||||
assertEquals(4, iter.next().getLongValue());
|
||||
long key = 18;
|
||||
while (iter.hasNext()) {
|
||||
assertEquals(key++, iter.next());
|
||||
assertEquals(key++, iter.next().getLongValue());
|
||||
}
|
||||
assertEquals(37, key);
|
||||
|
||||
iter = new AddressIndexPrimaryKeyIterator(myTable, 0, addrMap, set, false);
|
||||
key = 36;
|
||||
while (iter.hasPrevious()) {
|
||||
assertEquals(key--, iter.previous());
|
||||
assertEquals(key--, iter.previous().getLongValue());
|
||||
if (key == 17) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assertEquals(4, iter.previous());
|
||||
assertEquals(3, iter.previous());
|
||||
assertEquals(2, iter.previous());
|
||||
assertEquals(4, iter.previous().getLongValue());
|
||||
assertEquals(3, iter.previous().getLongValue());
|
||||
assertEquals(2, iter.previous().getLongValue());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,6 +58,17 @@ public class BinaryField extends Field {
|
|||
this.data = data;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return data == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
checkImmutable();
|
||||
data = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
void checkImmutable() {
|
||||
super.checkImmutable();
|
||||
|
@ -197,8 +208,9 @@ public class BinaryField extends Field {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass())
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
BinaryField f = (BinaryField) obj;
|
||||
return Arrays.equals(f.data, data);
|
||||
}
|
||||
|
|
|
@ -56,6 +56,17 @@ public final class BooleanField extends Field {
|
|||
this(b, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return value == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
checkImmutable();
|
||||
value = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a boolean data field with an initial value of b.
|
||||
* @param b initial value
|
||||
|
@ -116,8 +127,9 @@ public final class BooleanField extends Field {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || !(obj instanceof BooleanField))
|
||||
if (obj == null || !(obj instanceof BooleanField)) {
|
||||
return false;
|
||||
}
|
||||
BooleanField otherField = (BooleanField) obj;
|
||||
return otherField.value == value;
|
||||
}
|
||||
|
@ -125,20 +137,24 @@ public final class BooleanField extends Field {
|
|||
@Override
|
||||
public int compareTo(Field o) {
|
||||
BooleanField f = (BooleanField) o;
|
||||
if (value == f.value)
|
||||
if (value == f.value) {
|
||||
return 0;
|
||||
else if (value < f.value)
|
||||
}
|
||||
else if (value < f.value) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
int compareTo(DataBuffer buffer, int offset) {
|
||||
byte otherValue = buffer.getByte(offset);
|
||||
if (value == otherValue)
|
||||
if (value == otherValue) {
|
||||
return 0;
|
||||
else if (value < otherValue)
|
||||
}
|
||||
else if (value < otherValue) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,10 +35,15 @@ public final class ByteField extends Field {
|
|||
*/
|
||||
public static final ByteField MAX_VALUE = new ByteField(Byte.MAX_VALUE, true);
|
||||
|
||||
/**
|
||||
* Zero byte field value
|
||||
*/
|
||||
public static final ByteField ZERO_VALUE = new ByteField((byte) 0, true);
|
||||
|
||||
/**
|
||||
* Instance intended for defining a {@link Table} {@link Schema}
|
||||
*/
|
||||
public static final ByteField INSTANCE = MIN_VALUE;
|
||||
public static final ByteField INSTANCE = ZERO_VALUE;
|
||||
|
||||
private byte value;
|
||||
|
||||
|
@ -66,6 +71,17 @@ public final class ByteField extends Field {
|
|||
value = b;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return value == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
checkImmutable();
|
||||
value = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte getByteValue() {
|
||||
return value;
|
||||
|
@ -111,33 +127,38 @@ public final class ByteField extends Field {
|
|||
|
||||
@Override
|
||||
public String getValueAsString() {
|
||||
return "0x" + Integer.toHexString(value);
|
||||
return "0x" + Integer.toHexString(value & 0xff);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || !(obj instanceof ByteField))
|
||||
if (obj == null || !(obj instanceof ByteField)) {
|
||||
return false;
|
||||
}
|
||||
return ((ByteField) obj).value == value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Field o) {
|
||||
ByteField f = (ByteField) o;
|
||||
if (value == f.value)
|
||||
if (value == f.value) {
|
||||
return 0;
|
||||
else if (value < f.value)
|
||||
}
|
||||
else if (value < f.value) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
int compareTo(DataBuffer buffer, int offset) {
|
||||
byte otherValue = buffer.getByte(offset);
|
||||
if (value == otherValue)
|
||||
if (value == otherValue) {
|
||||
return 0;
|
||||
else if (value < otherValue)
|
||||
}
|
||||
else if (value < otherValue) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -881,22 +881,24 @@ public class DBHandle {
|
|||
}
|
||||
|
||||
/**
|
||||
* Creates a new table with the given name, version and type.
|
||||
* Creates a new table with the given name and schema.
|
||||
* @param name table name
|
||||
* @param schema table schema
|
||||
* @return new table instance
|
||||
* @throws IOException if IO error occurs during table creation
|
||||
*/
|
||||
public synchronized Table createTable(String name, Schema schema) throws IOException {
|
||||
|
||||
if (tables.containsKey(name)) {
|
||||
throw new IOException("Table already exists");
|
||||
}
|
||||
Table table = new Table(this, masterTable.createTableRecord(name, schema, -1));
|
||||
tables.put(name, table);
|
||||
tableAdded(table);
|
||||
return table;
|
||||
public Table createTable(String name, Schema schema) throws IOException {
|
||||
return createTable(name, schema, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new table with the given name, version and type.
|
||||
* Creates a new table with the given name and schema.
|
||||
* Create secondary indexes as specified by the array of column indexes.
|
||||
* @param name table name
|
||||
* @param schema table schema
|
||||
* @param indexedColumns index table columns or null
|
||||
* @return new table instance
|
||||
* @throws IOException if IO error occurs during table creation
|
||||
*/
|
||||
public synchronized Table createTable(String name, Schema schema, int[] indexedColumns)
|
||||
throws IOException {
|
||||
|
@ -906,9 +908,11 @@ public class DBHandle {
|
|||
}
|
||||
Table table = new Table(this, masterTable.createTableRecord(name, schema, -1));
|
||||
tables.put(name, table);
|
||||
if (indexedColumns != null) {
|
||||
for (int indexedColumn : indexedColumns) {
|
||||
IndexTable.createIndexTable(table, indexedColumn);
|
||||
}
|
||||
}
|
||||
tableAdded(table);
|
||||
return table;
|
||||
}
|
||||
|
|
|
@ -23,18 +23,32 @@ import db.buffers.DataBuffer;
|
|||
* <code>Field</code> is an abstract data wrapper for use with Records.
|
||||
* Note that when comparing two Field instances both must be of the same
|
||||
* class.
|
||||
*
|
||||
* <p>Stored Schema Field Type Encoding:</p>
|
||||
*
|
||||
* <p><U>8-bit Legacy Field Type Encoding (I....FFF)</U></p>
|
||||
* Supported encodings: 0x00..0x06 and 0x80..0x86,
|
||||
* where:
|
||||
* <pre>
|
||||
* FFF - indexed field type (0..6)
|
||||
* I - index field indicator (only long primary keys were supported)
|
||||
* </pre>
|
||||
*
|
||||
* <p><U>8-bit Field Type Encoding (PPPPFFFF)</U></p>
|
||||
* (Reserved for future field extensions: 0x88 and 0xf0..0xff)
|
||||
* <pre>
|
||||
* 0xff - see {@link Schema#FIELD_EXTENSION_INDICATOR}
|
||||
* </pre>
|
||||
* where:
|
||||
* <pre>
|
||||
* FFFF - normal/indexed field type
|
||||
* PPPP - indexed table primary key type (1000b: LegacyIndexField)
|
||||
* </pre>
|
||||
*/
|
||||
public abstract class Field implements Comparable<Field> {
|
||||
|
||||
public static final Field[] EMPTY_ARRAY = new Field[0];
|
||||
|
||||
/**
|
||||
* 8-bit Field Type Encoding (PPPPFFFF)
|
||||
* where:
|
||||
* FFFF - normal/indexed field type
|
||||
* PPPP - indexed table primary key type (1000b indicates LegacyIndexField)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Field type for ByteField
|
||||
* @see db.ByteField
|
||||
|
@ -86,11 +100,21 @@ public abstract class Field implements Comparable<Field> {
|
|||
/**
|
||||
* Legacy Index Primary Key Field type for LongField
|
||||
* which was previously a boolean indicator for an index
|
||||
* field with assumed long primary key.
|
||||
* field with assumed long primary key. Applies only
|
||||
* to upper-nibble. This value in the lower-nibble
|
||||
* is reserved for use in the special-purpose byte value 0x88.
|
||||
* (see {@link LegacyIndexField})
|
||||
*/
|
||||
static final byte LEGACY_INDEX_LONG_TYPE = 8;
|
||||
|
||||
// Available field types (6): 0x9..0xE
|
||||
|
||||
/**
|
||||
* Reserved field encoding. Intended for special purpose
|
||||
* schema used (e.g.
|
||||
*/
|
||||
static final byte FIELD_RESERVED_15_TYPE = 0xf;
|
||||
|
||||
/**
|
||||
* Field base type mask
|
||||
*/
|
||||
|
@ -382,6 +406,19 @@ public abstract class Field implements Comparable<Field> {
|
|||
*/
|
||||
abstract Field getMaxValue();
|
||||
|
||||
/**
|
||||
* Determine if the field value is null (or zero for
|
||||
* fixed-length fields)
|
||||
* @return true if null/zero else false
|
||||
*/
|
||||
abstract boolean isNull();
|
||||
|
||||
/**
|
||||
* Set this field to its null/zero value
|
||||
* @throws IllegalFieldAccessException thrown if this field is immutable or is an index field
|
||||
*/
|
||||
abstract void setNull();
|
||||
|
||||
/**
|
||||
* Performs a fast in-place comparison of this field value with another
|
||||
* field value stored within the specified buffer at the the specified offset.
|
||||
|
@ -400,7 +437,10 @@ public abstract class Field implements Comparable<Field> {
|
|||
* @throws UnsupportedFieldException if unsupported fieldType specified
|
||||
*/
|
||||
static Field getField(byte fieldType) throws UnsupportedFieldException {
|
||||
|
||||
if (fieldType == 0x88) {
|
||||
// 0x88 - Reserved value (future expanded Field encoding)
|
||||
throw new UnsupportedFieldException(fieldType);
|
||||
}
|
||||
if ((fieldType & INDEX_PRIMARY_KEY_TYPE_MASK) == 0) {
|
||||
switch (fieldType & FIELD_TYPE_MASK) {
|
||||
case LONG_TYPE:
|
||||
|
@ -431,6 +471,10 @@ public abstract class Field implements Comparable<Field> {
|
|||
UnsupportedFieldException(byte fieldType) {
|
||||
super("Unsupported DB field type: 0x" + Integer.toHexString(fieldType & 0xff));
|
||||
}
|
||||
|
||||
UnsupportedFieldException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -30,8 +30,6 @@ public class FieldIndexTable extends IndexTable {
|
|||
|
||||
private static final String[] fieldNames = {};
|
||||
|
||||
private final int indexColumn;
|
||||
|
||||
private final IndexField indexKeyType;
|
||||
|
||||
/**
|
||||
|
@ -55,7 +53,6 @@ public class FieldIndexTable extends IndexTable {
|
|||
*/
|
||||
FieldIndexTable(Table primaryTable, TableRecord indexTableRecord) throws IOException {
|
||||
super(primaryTable, indexTableRecord);
|
||||
this.indexColumn = indexTableRecord.getIndexedColumn();
|
||||
indexKeyType = (IndexField) indexTable.getSchema().getKeyFieldType();
|
||||
}
|
||||
|
||||
|
@ -111,7 +108,10 @@ public class FieldIndexTable extends IndexTable {
|
|||
|
||||
@Override
|
||||
void addEntry(Record record) throws IOException {
|
||||
Field indexedField = record.getField(colIndex);
|
||||
Field indexedField = record.getField(indexColumn);
|
||||
if (isSparseIndex && indexedField.isNull()) {
|
||||
return;
|
||||
}
|
||||
IndexField f = indexKeyType.newIndexField(indexedField, record.getKeyField());
|
||||
Record rec = indexTable.getSchema().createRecord(f);
|
||||
indexTable.putRecord(rec);
|
||||
|
@ -119,7 +119,10 @@ public class FieldIndexTable extends IndexTable {
|
|||
|
||||
@Override
|
||||
void deleteEntry(Record record) throws IOException {
|
||||
Field indexedField = record.getField(colIndex);
|
||||
Field indexedField = record.getField(indexColumn);
|
||||
if (isSparseIndex && indexedField.isNull()) {
|
||||
return;
|
||||
}
|
||||
IndexField f = indexKeyType.newIndexField(indexedField, record.getKeyField());
|
||||
indexTable.deleteRecord(f);
|
||||
}
|
||||
|
|
|
@ -21,6 +21,9 @@ package db;
|
|||
*/
|
||||
public abstract class FixedField extends BinaryField {
|
||||
|
||||
@SuppressWarnings("hiding")
|
||||
public static final FixedField10 INSTANCE = null;
|
||||
|
||||
/**
|
||||
* Construct a fixed-length field
|
||||
* @param data initial value
|
||||
|
@ -35,6 +38,9 @@ public abstract class FixedField extends BinaryField {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
abstract boolean isNull();
|
||||
|
||||
@Override
|
||||
void truncate(int length) {
|
||||
throw new UnsupportedOperationException("Field may not be truncated");
|
||||
|
|
|
@ -22,7 +22,8 @@ import generic.util.UnsignedDataUtils;
|
|||
import ghidra.util.BigEndianDataConverter;
|
||||
|
||||
/**
|
||||
* <code>FixedField10</code> is a 10-byte fixed-length binary field.
|
||||
* <code>FixedField10</code> provide an unsigned 10-byte fixed-length field value.
|
||||
* The most-significant byte corresponds to index-0 (i.e., data[0]).
|
||||
*/
|
||||
public class FixedField10 extends FixedField {
|
||||
|
||||
|
@ -36,11 +37,16 @@ public class FixedField10 extends FixedField {
|
|||
*/
|
||||
public static FixedField10 MAX_VALUE = new FixedField10(-1L, (short) -1, true);
|
||||
|
||||
/**
|
||||
* Zero fixed10 field value
|
||||
*/
|
||||
public static final FixedField10 ZERO_VALUE = new FixedField10(null, true);
|
||||
|
||||
/**
|
||||
* Instance intended for defining a {@link Table} {@link Schema}
|
||||
*/
|
||||
@SuppressWarnings("hiding")
|
||||
public static final FixedField10 INSTANCE = MIN_VALUE;
|
||||
public static final FixedField10 INSTANCE = ZERO_VALUE;
|
||||
|
||||
// This implementation uses both a data byte array and short+long variables
|
||||
// for data storage. While the short+long is always available, the data
|
||||
|
@ -83,6 +89,11 @@ public class FixedField10 extends FixedField {
|
|||
this.lo2 = lo2;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return hi8 == 0 && lo2 == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Field o) {
|
||||
if (!(o instanceof FixedField10)) {
|
||||
|
@ -144,10 +155,15 @@ public class FixedField10 extends FixedField {
|
|||
|
||||
@Override
|
||||
public void setBinaryData(byte[] data) {
|
||||
this.data = data;
|
||||
if (data == null) {
|
||||
hi8 = 0;
|
||||
lo2 = 0;
|
||||
return;
|
||||
}
|
||||
if (data.length != 10) {
|
||||
throw new IllegalArgumentException("Invalid FixedField10 length: " + data.length);
|
||||
}
|
||||
this.data = data;
|
||||
hi8 = BigEndianDataConverter.INSTANCE.getLong(data, 0);
|
||||
lo2 = BigEndianDataConverter.INSTANCE.getShort(data, 8);
|
||||
}
|
||||
|
@ -195,15 +211,19 @@ public class FixedField10 extends FixedField {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
if (this == obj) {
|
||||
return true;
|
||||
if (getClass() != obj.getClass())
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
FixedField10 other = (FixedField10) obj;
|
||||
if (hi8 != other.hi8)
|
||||
if (hi8 != other.hi8) {
|
||||
return false;
|
||||
if (lo2 != other.lo2)
|
||||
}
|
||||
if (lo2 != other.lo2) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ import ghidra.util.exception.AssertException;
|
|||
* <pre>
|
||||
* | NodeType(1) | KeyCount(4) | PrevLeafId(4) | NextLeafId(4) | Key0(L) | RecOffset0(4) | IndFlag0(1) |...
|
||||
*
|
||||
* | KeyN(L) | RecOffsetN(4) | IndFlagN(1) |...<FreeSpace>... | RecN |... | Rec1 |
|
||||
* | KeyN(L) | RecOffsetN(4) | IndFlagN(1) |...<FreeSpace>... | RecN |... | Rec0 |
|
||||
* </pre>
|
||||
* IndFlag - if not zero the record has been stored within a chained DBBuffer
|
||||
* whose 4-byte integer buffer ID has been stored within this leaf at the record offset.
|
||||
|
@ -216,8 +216,9 @@ class FixedKeyVarRecNode extends FixedKeyRecordNode {
|
|||
@Override
|
||||
public Record getRecord(Field key, Schema schema) throws IOException {
|
||||
int index = getKeyIndex(key);
|
||||
if (index < 0)
|
||||
if (index < 0) {
|
||||
return null;
|
||||
}
|
||||
return getRecord(schema, index);
|
||||
}
|
||||
|
||||
|
@ -351,7 +352,9 @@ class FixedKeyVarRecNode extends FixedKeyRecordNode {
|
|||
}
|
||||
|
||||
if ((len + entrySize) > getFreeSpace())
|
||||
{
|
||||
return false; // insufficient space for record storage
|
||||
}
|
||||
|
||||
// Make room for new record
|
||||
int offset = moveRecords(index, -len);
|
||||
|
@ -384,8 +387,9 @@ class FixedKeyVarRecNode extends FixedKeyRecordNode {
|
|||
@Override
|
||||
public void remove(int index) throws IOException {
|
||||
|
||||
if (index < 0 || index >= keyCount)
|
||||
if (index < 0 || index >= keyCount) {
|
||||
throw new AssertException();
|
||||
}
|
||||
|
||||
if (hasIndirectStorage(index)) {
|
||||
removeChainedBuffer(buffer.getInt(getRecordDataOffset(index)));
|
||||
|
|
|
@ -55,6 +55,16 @@ class IndexField extends Field {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return false; // not-applicable
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
throw new IllegalFieldAccessException("Index field may not be set null");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the indexed field value. If the original value exceeded
|
||||
* {@link #MAX_INDEX_FIELD_LENGTH} in length the returned value will
|
||||
|
@ -279,6 +289,10 @@ class IndexField extends Field {
|
|||
Field indexedField = Field.getField((byte) (fieldType & FIELD_TYPE_MASK));
|
||||
|
||||
byte primaryKeyFeldType = (byte) (fieldType >> INDEX_FIELD_TYPE_SHIFT & FIELD_TYPE_MASK);
|
||||
if (primaryKeyFeldType == FIELD_RESERVED_15_TYPE) {
|
||||
// 0xf0..0xff - Reserved for Schema use
|
||||
throw new UnsupportedFieldException(fieldType);
|
||||
}
|
||||
if (primaryKeyFeldType == LEGACY_INDEX_LONG_TYPE) {
|
||||
return new LegacyIndexField(indexedField);
|
||||
}
|
||||
|
|
|
@ -53,7 +53,8 @@ abstract class IndexTable {
|
|||
/**
|
||||
* Indexed column within primary table schema.
|
||||
*/
|
||||
protected final int colIndex;
|
||||
protected final int indexColumn;
|
||||
protected final boolean isSparseIndex;
|
||||
|
||||
/**
|
||||
* Construct a new or existing secondary index. An existing index must have
|
||||
|
@ -70,7 +71,8 @@ abstract class IndexTable {
|
|||
this.primaryTable = primaryTable;
|
||||
this.indexTableRecord = indexTableRecord;
|
||||
this.indexTable = new Table(primaryTable.getDBHandle(), indexTableRecord);
|
||||
this.colIndex = indexTableRecord.getIndexedColumn();
|
||||
this.indexColumn = indexTableRecord.getIndexedColumn();
|
||||
this.isSparseIndex = primaryTable.getSchema().isSparseColumn(indexColumn);
|
||||
primaryTable.addIndex(this);
|
||||
}
|
||||
|
||||
|
@ -118,7 +120,8 @@ abstract class IndexTable {
|
|||
* @throws CancelledException if task cancelled
|
||||
*/
|
||||
boolean isConsistent(TaskMonitor monitor) throws IOException, CancelledException {
|
||||
return indexTable.isConsistent(primaryTable.getSchema().getFieldNames()[colIndex], monitor);
|
||||
return indexTable.isConsistent(primaryTable.getSchema().getFieldNames()[indexColumn],
|
||||
monitor);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -142,7 +145,7 @@ abstract class IndexTable {
|
|||
* @return indexed column number
|
||||
*/
|
||||
int getColumnIndex() {
|
||||
return colIndex;
|
||||
return indexColumn;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -152,7 +155,7 @@ abstract class IndexTable {
|
|||
*/
|
||||
TableStatistics getStatistics() throws IOException {
|
||||
TableStatistics stats = indexTable.getStatistics();
|
||||
stats.indexColumn = colIndex;
|
||||
stats.indexColumn = indexColumn;
|
||||
return stats;
|
||||
}
|
||||
|
||||
|
@ -194,10 +197,10 @@ abstract class IndexTable {
|
|||
|
||||
/**
|
||||
* Delete an entry from this index.
|
||||
* @param record deleted record
|
||||
* @param oldRecord deleted record
|
||||
* @throws IOException if IO error occurs
|
||||
*/
|
||||
abstract void deleteEntry(Record record) throws IOException;
|
||||
abstract void deleteEntry(Record oldRecord) throws IOException;
|
||||
|
||||
/**
|
||||
* Delete all records within this index table.
|
||||
|
|
|
@ -35,10 +35,15 @@ public final class IntField extends Field {
|
|||
*/
|
||||
public static final IntField MAX_VALUE = new IntField(Integer.MAX_VALUE, true);
|
||||
|
||||
/**
|
||||
* Zero int field value
|
||||
*/
|
||||
public static final IntField ZERO_VALUE = new IntField(0, true);
|
||||
|
||||
/**
|
||||
* Instance intended for defining a {@link Table} {@link Schema}
|
||||
*/
|
||||
public static final IntField INSTANCE = MIN_VALUE;
|
||||
public static final IntField INSTANCE = ZERO_VALUE;
|
||||
|
||||
private int value;
|
||||
|
||||
|
@ -66,6 +71,17 @@ public final class IntField extends Field {
|
|||
value = i;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return value == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
checkImmutable();
|
||||
value = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getIntValue() {
|
||||
return value;
|
||||
|
@ -116,28 +132,33 @@ public final class IntField extends Field {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || !(obj instanceof IntField))
|
||||
if (obj == null || !(obj instanceof IntField)) {
|
||||
return false;
|
||||
}
|
||||
return ((IntField) obj).value == value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Field o) {
|
||||
IntField f = (IntField) o;
|
||||
if (value == f.value)
|
||||
if (value == f.value) {
|
||||
return 0;
|
||||
else if (value < f.value)
|
||||
}
|
||||
else if (value < f.value) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
int compareTo(DataBuffer buffer, int offset) {
|
||||
int otherValue = buffer.getInt(offset);
|
||||
if (value == otherValue)
|
||||
if (value == otherValue) {
|
||||
return 0;
|
||||
else if (value < otherValue)
|
||||
}
|
||||
else if (value < otherValue) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,10 +35,15 @@ public final class LongField extends Field {
|
|||
*/
|
||||
public static final LongField MAX_VALUE = new LongField(Long.MAX_VALUE, true);
|
||||
|
||||
/**
|
||||
* Zero long field value
|
||||
*/
|
||||
public static final LongField ZERO_VALUE = new LongField(0, true);
|
||||
|
||||
/**
|
||||
* Instance intended for defining a {@link Table} {@link Schema}
|
||||
*/
|
||||
public static final LongField INSTANCE = MIN_VALUE;
|
||||
public static final LongField INSTANCE = ZERO_VALUE;
|
||||
|
||||
private long value;
|
||||
|
||||
|
@ -66,6 +71,17 @@ public final class LongField extends Field {
|
|||
value = l;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return value == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
checkImmutable();
|
||||
value = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLongValue() {
|
||||
return value;
|
||||
|
@ -116,8 +132,9 @@ public final class LongField extends Field {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || !(obj instanceof LongField))
|
||||
if (obj == null || !(obj instanceof LongField)) {
|
||||
return false;
|
||||
}
|
||||
return ((LongField) obj).value == value;
|
||||
}
|
||||
|
||||
|
@ -127,20 +144,24 @@ public final class LongField extends Field {
|
|||
throw new UnsupportedOperationException("may only compare similar Field types");
|
||||
}
|
||||
LongField f = (LongField) o;
|
||||
if (value == f.value)
|
||||
if (value == f.value) {
|
||||
return 0;
|
||||
else if (value < f.value)
|
||||
}
|
||||
else if (value < f.value) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
int compareTo(DataBuffer buffer, int offset) {
|
||||
long otherValue = buffer.getLong(offset);
|
||||
if (value == otherValue)
|
||||
if (value == otherValue) {
|
||||
return 0;
|
||||
else if (value < otherValue)
|
||||
}
|
||||
else if (value < otherValue) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -307,6 +307,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
|
|||
* Insert or Update a record.
|
||||
* @param record data record with long key
|
||||
* @param table table which will be notified when record is inserted or updated.
|
||||
* This must be specified when table has indexed columns.
|
||||
* @return root node which may have changed.
|
||||
* @throws IOException thrown if IO error occurs
|
||||
*/
|
||||
|
@ -318,6 +319,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
|
|||
// Handle record update case
|
||||
if (index >= 0) {
|
||||
if (table != null) {
|
||||
// update index tables associated with table
|
||||
table.updatedRecord(getRecord(table.getSchema(), index), record);
|
||||
}
|
||||
LongKeyNode newRoot = updateRecord(index, record);
|
||||
|
@ -331,6 +333,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
|
|||
parent.keyChanged(getKey(1), key);
|
||||
}
|
||||
if (table != null) {
|
||||
// update index tables associated with table
|
||||
table.insertedRecord(record);
|
||||
}
|
||||
return getRoot();
|
||||
|
@ -340,6 +343,7 @@ abstract class LongKeyRecordNode extends LongKeyNode implements RecordNode {
|
|||
if (index == keyCount) {
|
||||
LongKeyNode newRoot = appendNewLeaf(record);
|
||||
if (table != null) {
|
||||
// update index tables associated with table
|
||||
table.insertedRecord(record);
|
||||
}
|
||||
return newRoot;
|
||||
|
|
|
@ -22,31 +22,45 @@ import ghidra.util.exception.AssertException;
|
|||
|
||||
/**
|
||||
* <code>Record</code> provides a portable container for data
|
||||
* associated with a fixed schema defined by a list of Fields.
|
||||
* A record instance contains both a primary key and zero or more data fields
|
||||
* which define the schema. Either a Field object or a long value
|
||||
* may be used as the primary key.
|
||||
*
|
||||
* associated with a fixed schema.
|
||||
* A record instance contains both a primary key and zero or more data fields.
|
||||
*/
|
||||
public class Record implements Comparable<Record> {
|
||||
|
||||
private Field key;
|
||||
final Schema schema;
|
||||
|
||||
private Field key;
|
||||
private Field[] fieldValues;
|
||||
private boolean dirty = false;
|
||||
|
||||
private int length = -1;
|
||||
private boolean isVariableLength;
|
||||
|
||||
boolean dirty = false;
|
||||
|
||||
/**
|
||||
* Construct a new record.
|
||||
* The schema is derived from the field values supplied.
|
||||
* @param key primary key value
|
||||
* @param schema
|
||||
* Construct an empty record corresponding to the specified schema and record key
|
||||
* @param schema record schema
|
||||
* @param key record key
|
||||
*/
|
||||
Record(Field key, Field[] fieldValues) {
|
||||
Record(Schema schema, Field key) {
|
||||
this.schema = schema;
|
||||
this.key = key;
|
||||
this.fieldValues = fieldValues;
|
||||
if (!schema.getKeyFieldType().isSameType(key)) {
|
||||
throw new IllegalArgumentException("key differs from schema key type");
|
||||
}
|
||||
Field[] schemaFields = schema.getFields();
|
||||
fieldValues = new Field[schemaFields.length];
|
||||
for (int colIndex = 0; colIndex < schemaFields.length; colIndex++) {
|
||||
try {
|
||||
fieldValues[colIndex] = schemaFields[colIndex].newField();
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new AssertException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void invalidateLength() {
|
||||
length = -1;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -54,8 +68,9 @@ public class Record implements Comparable<Record> {
|
|||
* @param key primary key
|
||||
*/
|
||||
public void setKey(long key) {
|
||||
if (!(this.key instanceof LongField))
|
||||
if (!(this.key instanceof LongField)) {
|
||||
throw new AssertException();
|
||||
}
|
||||
this.key = new LongField(key);
|
||||
}
|
||||
|
||||
|
@ -64,8 +79,9 @@ public class Record implements Comparable<Record> {
|
|||
* @param key primary key
|
||||
*/
|
||||
public void setKey(Field key) {
|
||||
if (!this.key.getClass().equals(key.getClass()))
|
||||
if (!this.key.getClass().equals(key.getClass())) {
|
||||
throw new AssertException();
|
||||
}
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
|
@ -153,6 +169,7 @@ public class Record implements Comparable<Record> {
|
|||
if (fieldValues[colIndex].getFieldType() != value.getFieldType()) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
invalidateLength();
|
||||
fieldValues[colIndex] = value;
|
||||
}
|
||||
|
||||
|
@ -203,14 +220,12 @@ public class Record implements Comparable<Record> {
|
|||
* @return Record
|
||||
*/
|
||||
public Record copy() {
|
||||
|
||||
Field newKey = key.copyField();
|
||||
Field[] fields = new Field[fieldValues.length];
|
||||
Record r = schema.createRecord(key.copyField());
|
||||
Field[] fields = r.getFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
Field f = fieldValues[i];
|
||||
fields[i] = f.copyField();
|
||||
r.setField(i, fields[i].copyField());
|
||||
}
|
||||
return new Record(newKey, fields);
|
||||
return r;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -219,18 +234,25 @@ public class Record implements Comparable<Record> {
|
|||
* fields within this record when written to a standard Buffer.
|
||||
* @return int stored record length
|
||||
*/
|
||||
public int length() {
|
||||
public final int length() {
|
||||
if (length < 0) {
|
||||
length = 0;
|
||||
isVariableLength = false;
|
||||
for (int i = 0; i < fieldValues.length; i++) {
|
||||
length += fieldValues[i].length();
|
||||
isVariableLength |= fieldValues[i].isVariableLength();
|
||||
}
|
||||
length = computeLength();
|
||||
}
|
||||
return length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute record storage length
|
||||
* @return record storage length
|
||||
*/
|
||||
int computeLength() {
|
||||
int len = 0;
|
||||
for (Field fieldValue : fieldValues) {
|
||||
len += fieldValue.length();
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the long value for the specified field.
|
||||
* @param colIndex field index
|
||||
|
@ -355,7 +377,7 @@ public class Record implements Comparable<Record> {
|
|||
*/
|
||||
public void setBinaryData(int colIndex, byte[] bytes) {
|
||||
dirty = true;
|
||||
length = -1;
|
||||
invalidateLength();
|
||||
fieldValues[colIndex].setBinaryData(bytes);
|
||||
}
|
||||
|
||||
|
@ -377,7 +399,7 @@ public class Record implements Comparable<Record> {
|
|||
*/
|
||||
public void setString(int colIndex, String str) {
|
||||
dirty = true;
|
||||
length = -1;
|
||||
invalidateLength();
|
||||
fieldValues[colIndex].setString(str);
|
||||
}
|
||||
|
||||
|
@ -388,8 +410,8 @@ public class Record implements Comparable<Record> {
|
|||
* @throws IOException thrown if IO error occurs
|
||||
*/
|
||||
public void write(Buffer buf, int offset) throws IOException {
|
||||
for (int i = 0; i < fieldValues.length; i++) {
|
||||
offset = fieldValues[i].write(buf, offset);
|
||||
for (Field fieldValue : fieldValues) {
|
||||
offset = fieldValue.write(buf, offset);
|
||||
}
|
||||
dirty = false;
|
||||
}
|
||||
|
@ -401,8 +423,8 @@ public class Record implements Comparable<Record> {
|
|||
* @throws IOException thrown if IO error occurs
|
||||
*/
|
||||
public void read(Buffer buf, int offset) throws IOException {
|
||||
for (int i = 0; i < fieldValues.length; i++) {
|
||||
offset = fieldValues[i].read(buf, offset);
|
||||
for (Field fieldValue : fieldValues) {
|
||||
offset = fieldValue.read(buf, offset);
|
||||
}
|
||||
dirty = false;
|
||||
}
|
||||
|
@ -427,8 +449,9 @@ public class Record implements Comparable<Record> {
|
|||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof Record))
|
||||
if (!(obj instanceof Record)) {
|
||||
return false;
|
||||
}
|
||||
Record rec = (Record) obj;
|
||||
return key.equals(rec.key) && Arrays.equals(fieldValues, rec.fieldValues);
|
||||
}
|
||||
|
|
|
@ -15,8 +15,13 @@
|
|||
*/
|
||||
package db;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.StringTokenizer;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.ImmutableSet.Builder;
|
||||
import com.google.common.primitives.Bytes;
|
||||
|
||||
import db.Field.UnsupportedFieldException;
|
||||
import ghidra.util.exception.AssertException;
|
||||
|
@ -28,6 +33,10 @@ public class Schema {
|
|||
|
||||
private static final String NAME_SEPARATOR = ";";
|
||||
|
||||
static final byte FIELD_EXTENSION_INDICATOR = -1;
|
||||
|
||||
private static final byte SPARSE_FIELD_LIST_EXTENSION = 1;
|
||||
|
||||
private int version;
|
||||
|
||||
private Field keyType;
|
||||
|
@ -35,6 +44,7 @@ public class Schema {
|
|||
|
||||
private Field[] fields;
|
||||
private String[] fieldNames;
|
||||
private Set<Integer> sparseColumnSet;
|
||||
|
||||
private boolean isVariableLength;
|
||||
private int fixedLength;
|
||||
|
@ -48,17 +58,21 @@ public class Schema {
|
|||
* @param keyName primary key name
|
||||
* @param fields array of column fields (representative instances)
|
||||
* @param fieldNames array of column field names
|
||||
* @param sparseColumns column indexes corresponding to those
|
||||
* columns which utilize sparse storage (null if no sparse columns).
|
||||
* Valid sparse column indexes are in the range 0..127.
|
||||
* @throws IllegalArgumentException invalid parameters
|
||||
*/
|
||||
public Schema(int version, Field keyField, String keyName, Field[] fields,
|
||||
String[] fieldNames) {
|
||||
String[] fieldNames, int[] sparseColumns) {
|
||||
this.version = version;
|
||||
this.keyType = keyField;
|
||||
this.keyName = keyName;
|
||||
this.fields = fields;
|
||||
this.fieldNames = fieldNames;
|
||||
if (fields.length != fieldNames.length)
|
||||
if (fields.length != fieldNames.length) {
|
||||
throw new IllegalArgumentException("fieldNames and fields lengths differ");
|
||||
}
|
||||
isVariableLength = false;
|
||||
fixedLength = 0;
|
||||
for (int colIndex = 0; colIndex < fields.length; colIndex++) {
|
||||
|
@ -67,14 +81,35 @@ public class Schema {
|
|||
isVariableLength = true;
|
||||
}
|
||||
fixedLength += field.length();
|
||||
if (fieldNames[colIndex].indexOf(NAME_SEPARATOR) >= 0)
|
||||
if (fieldNames[colIndex].indexOf(NAME_SEPARATOR) >= 0) {
|
||||
throw new IllegalArgumentException("field names may not contain ';'");
|
||||
}
|
||||
}
|
||||
try {
|
||||
initializeSparseColumnSet(ArrayUtils.toObject(sparseColumns));
|
||||
}
|
||||
catch (UnsupportedFieldException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
if (isVariableLength) {
|
||||
fixedLength = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new Schema.
|
||||
* @param version schema version
|
||||
* @param keyField field associated with primary key (representative instance)
|
||||
* @param keyName primary key name
|
||||
* @param fields array of column fields (representative instances)
|
||||
* @param fieldNames array of column field names
|
||||
* @throws IllegalArgumentException invalid parameters
|
||||
*/
|
||||
public Schema(int version, Field keyField, String keyName, Field[] fields,
|
||||
String[] fieldNames) {
|
||||
this(version, keyField, keyName, fields, fieldNames, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new Schema which uses a long key.
|
||||
* @param version schema version
|
||||
|
@ -84,7 +119,23 @@ public class Schema {
|
|||
* @throws IllegalArgumentException invalid parameters
|
||||
*/
|
||||
public Schema(int version, String keyName, Field[] fields, String[] fieldNames) {
|
||||
this(version, LongField.INSTANCE, keyName, fields, fieldNames);
|
||||
this(version, LongField.INSTANCE, keyName, fields, fieldNames, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new Schema which uses a long key.
|
||||
* @param version schema version
|
||||
* @param keyName primary key name
|
||||
* @param fields array of column fields (representative instances)
|
||||
* @param fieldNames array of column field names
|
||||
* @param sparseColumns column indexes corresponding to those
|
||||
* columns which utilize sparse storage (null if no sparse columns).
|
||||
* Valid sparse column indexes are in the range 0..127.
|
||||
* @throws IllegalArgumentException invalid parameters
|
||||
*/
|
||||
public Schema(int version, String keyName, Field[] fields, String[] fieldNames,
|
||||
int[] sparseColumns) {
|
||||
this(version, LongField.INSTANCE, keyName, fields, fieldNames, sparseColumns);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -98,7 +149,25 @@ public class Schema {
|
|||
*/
|
||||
public Schema(int version, Class<?> keyClass, String keyName, Class<?>[] fieldClasses,
|
||||
String[] fieldNames) {
|
||||
this(version, getField(keyClass), keyName, getFields(fieldClasses), fieldNames);
|
||||
this(version, getField(keyClass), keyName, getFields(fieldClasses), fieldNames, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new Schema.
|
||||
* @param version schema version
|
||||
* @param keyClass field class associated with primary key
|
||||
* @param keyName primary key name
|
||||
* @param fieldClasses array of column field classes
|
||||
* @param fieldNames array of column field names
|
||||
* @param sparseColumns column indexes corresponding to those
|
||||
* columns which utilize sparse storage (null if no sparse columns).
|
||||
* Valid sparse column indexes are in the range 0..127.
|
||||
* @throws IllegalArgumentException invalid parameters
|
||||
*/
|
||||
public Schema(int version, Class<?> keyClass, String keyName, Class<?>[] fieldClasses,
|
||||
String[] fieldNames, int[] sparseColumns) {
|
||||
this(version, getField(keyClass), keyName, getFields(fieldClasses), fieldNames,
|
||||
sparseColumns);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -110,11 +179,28 @@ public class Schema {
|
|||
* @throws IllegalArgumentException invalid parameters
|
||||
*/
|
||||
public Schema(int version, String keyName, Class<?>[] fieldClasses, String[] fieldNames) {
|
||||
this(version, LongField.INSTANCE, keyName, getFields(fieldClasses), fieldNames);
|
||||
this(version, LongField.INSTANCE, keyName, getFields(fieldClasses), fieldNames, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new Schema with the given number of columns
|
||||
* Construct a new Schema which uses a long key.
|
||||
* @param version schema version
|
||||
* @param keyName primary key name
|
||||
* @param fieldClasses array of column field classes
|
||||
* @param fieldNames array of column field names
|
||||
* @param sparseColumns column indexes corresponding to those
|
||||
* columns which utilize sparse storage (null if no sparse columns).
|
||||
* Valid sparse column indexes are in the range 0..127.
|
||||
* @throws IllegalArgumentException invalid parameters
|
||||
*/
|
||||
public Schema(int version, String keyName, Class<?>[] fieldClasses, String[] fieldNames,
|
||||
int[] sparseColumns) {
|
||||
this(version, LongField.INSTANCE, keyName, getFields(fieldClasses), fieldNames,
|
||||
sparseColumns);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a Schema based upon encoded
|
||||
* @param version schema version
|
||||
* @param encodedKeyFieldType key field type
|
||||
* @param encodedFieldTypes encoded field types array.
|
||||
|
@ -129,21 +215,122 @@ public class Schema {
|
|||
parseNames(packedFieldNames);
|
||||
isVariableLength = false;
|
||||
fixedLength = 0;
|
||||
fields = new Field[encodedFieldTypes.length];
|
||||
for (int i = 0; i < encodedFieldTypes.length; i++) {
|
||||
byte b = encodedFieldTypes[i];
|
||||
|
||||
initializeFields(encodedFieldTypes); // initializes fields and sparseColumns
|
||||
|
||||
if (fieldNames.length != fields.length) {
|
||||
throw new IllegalArgumentException("fieldNames and column types differ in length");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if schema employs sparse column storage
|
||||
* @return true if schema employs sparse column storage
|
||||
*/
|
||||
public boolean hasSparseColumns() {
|
||||
return sparseColumnSet != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the specified column index has been designated as a sparse
|
||||
* column within the associated record storage
|
||||
* @param columnIndex column index
|
||||
* @return true if designated column uses sparse storage
|
||||
*/
|
||||
public boolean isSparseColumn(int columnIndex) {
|
||||
return sparseColumnSet != null && sparseColumnSet.contains(columnIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize field types and related field extensions (e.g., sparse field list).
|
||||
* The presence of field extensions within the encodedFieldTypes is indicated by a
|
||||
* -1 (field extension indicator) following the encoded field types.
|
||||
* The byte value following the field extension indicator
|
||||
* is the extension type which is followed by the extension data if applicable.
|
||||
* A -1 byte is used to separate each extension byte sequence.
|
||||
* @param encodedFieldTypes encoded field type data
|
||||
* @throws UnsupportedFieldException if decoding of the encodedFieldTypes fails
|
||||
*/
|
||||
private void initializeFields(byte[] encodedFieldTypes) throws UnsupportedFieldException {
|
||||
|
||||
if (encodedFieldTypes.length == 0) {
|
||||
fields = new Field[0];
|
||||
return;
|
||||
}
|
||||
|
||||
int index = 0;
|
||||
|
||||
ArrayList<Field> fieldList = new ArrayList<>();
|
||||
while (index < encodedFieldTypes.length) {
|
||||
byte b = encodedFieldTypes[index++];
|
||||
if (b == FIELD_EXTENSION_INDICATOR) {
|
||||
break;
|
||||
}
|
||||
Field f = Field.getField(b);
|
||||
fields[i] = f;
|
||||
fieldList.add(f);
|
||||
if (f.isVariableLength()) {
|
||||
isVariableLength = true;
|
||||
}
|
||||
fixedLength += f.length();
|
||||
}
|
||||
fields = fieldList.toArray(new Field[fieldList.size()]);
|
||||
|
||||
while (index < encodedFieldTypes.length) {
|
||||
int extensionType = encodedFieldTypes[index++];
|
||||
if (extensionType == SPARSE_FIELD_LIST_EXTENSION) {
|
||||
index += parseSparseColumnIndexes(encodedFieldTypes, index);
|
||||
}
|
||||
else {
|
||||
throw new UnsupportedFieldException(
|
||||
"Unsupported field extension type: " + extensionType);
|
||||
}
|
||||
}
|
||||
|
||||
if (isVariableLength) {
|
||||
fixedLength = 0;
|
||||
}
|
||||
if (fieldNames.length != encodedFieldTypes.length) {
|
||||
throw new IllegalArgumentException("fieldNames and column types differ in length");
|
||||
}
|
||||
|
||||
private void initializeSparseColumnSet(Integer[] sparseColumns) throws UnsupportedFieldException {
|
||||
if (sparseColumns == null || sparseColumns.length == 0) {
|
||||
return;
|
||||
}
|
||||
Builder<Integer> builder = ImmutableSet.builder();
|
||||
for (int i : sparseColumns) {
|
||||
if (i < 0 || i > Byte.MAX_VALUE || i >= fields.length) {
|
||||
throw new UnsupportedFieldException("Sparse column entry out of range: " + i);
|
||||
}
|
||||
builder.add(i);
|
||||
}
|
||||
sparseColumnSet = builder.build();
|
||||
if (sparseColumnSet.size() != sparseColumns.length) {
|
||||
throw new UnsupportedFieldException("Sparse column set contains duplicate entry");
|
||||
}
|
||||
isVariableLength = true; // sparse records are variable length
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the sparse column indexes contained within the encodedFieldTypes data
|
||||
* @param encodedFieldTypes encoded data bytes
|
||||
* @param index of first extension data byte within encodedFieldTypes array
|
||||
* @return number of encoded data bytes consumed
|
||||
*/
|
||||
private int parseSparseColumnIndexes(byte[] encodedFieldTypes, int index)
|
||||
throws UnsupportedFieldException {
|
||||
try {
|
||||
int consumed = 0;
|
||||
ArrayList<Integer> columnIndexes = new ArrayList<>();
|
||||
while (index < encodedFieldTypes.length &&
|
||||
encodedFieldTypes[index] != FIELD_EXTENSION_INDICATOR) {
|
||||
columnIndexes.add((int) encodedFieldTypes[index++]);
|
||||
++consumed;
|
||||
}
|
||||
Integer[] sparseColumns = columnIndexes.toArray(new Integer[columnIndexes.size()]);
|
||||
initializeSparseColumnSet(sparseColumns);
|
||||
return consumed;
|
||||
}
|
||||
catch (ArrayIndexOutOfBoundsException e) {
|
||||
throw new UnsupportedFieldException("Incomplete sparse column data");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -271,8 +458,8 @@ public class Schema {
|
|||
StringBuffer buf = new StringBuffer();
|
||||
buf.append(keyName);
|
||||
buf.append(NAME_SEPARATOR);
|
||||
for (int i = 0; i < fieldNames.length; i++) {
|
||||
buf.append(fieldNames[i]);
|
||||
for (String fieldName : fieldNames) {
|
||||
buf.append(fieldName);
|
||||
buf.append(NAME_SEPARATOR);
|
||||
}
|
||||
return buf.toString();
|
||||
|
@ -287,12 +474,22 @@ public class Schema {
|
|||
* @return byte[] field type list as an encoded byte array.
|
||||
*/
|
||||
byte[] getEncodedFieldTypes() {
|
||||
ArrayList<Byte> encodedDataList = new ArrayList<>();
|
||||
|
||||
byte[] encodedFieldTypes = new byte[fields.length];
|
||||
for (int colIndex = 0; colIndex < fields.length; colIndex++) {
|
||||
encodedFieldTypes[colIndex] = fields[colIndex].getFieldType();
|
||||
// add field type encodings
|
||||
for (Field field : fields) {
|
||||
encodedDataList.add(field.getFieldType());
|
||||
}
|
||||
return encodedFieldTypes;
|
||||
|
||||
// add sparse field extension data
|
||||
if (sparseColumnSet != null) {
|
||||
encodedDataList.add(FIELD_EXTENSION_INDICATOR);
|
||||
encodedDataList.add(SPARSE_FIELD_LIST_EXTENSION);
|
||||
for (int col : sparseColumnSet) {
|
||||
encodedDataList.add((byte) col);
|
||||
}
|
||||
}
|
||||
return Bytes.toArray(encodedDataList);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -334,19 +531,7 @@ public class Schema {
|
|||
* @return new record
|
||||
*/
|
||||
public Record createRecord(Field key) {
|
||||
if (!keyType.isSameType(key)) {
|
||||
throw new IllegalArgumentException("key differs from schema key type");
|
||||
}
|
||||
Field[] fieldValues = new Field[fields.length];
|
||||
for (int colIndex = 0; colIndex < fields.length; colIndex++) {
|
||||
try {
|
||||
fieldValues[colIndex] = fields[colIndex].newField();
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new AssertException(e);
|
||||
}
|
||||
}
|
||||
return new Record(key, fieldValues);
|
||||
return hasSparseColumns() ? new SparseRecord(this, key) : new Record(this, key);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -363,6 +548,40 @@ public class Schema {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two schemas for equality.
|
||||
* Field names are ignored in this comparison. Instance variables such as {@link #fixedLength},
|
||||
* {@link Schema#isVariableLength} and {@link #forceUseVariableLengthKeyNodes} are also ignored.
|
||||
* @see java.lang.Object#equals(java.lang.Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!(obj instanceof Schema)) {
|
||||
return false;
|
||||
}
|
||||
Schema otherSchema = (Schema) obj;
|
||||
if (version != otherSchema.version ||
|
||||
!keyType.getClass().equals(otherSchema.keyType.getClass()) ||
|
||||
fields.length != otherSchema.fields.length) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
if (!fields[i].getClass().equals(otherSchema.fields[i].getClass())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!Objects.equals(sparseColumnSet, otherSchema.sparseColumnSet)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// Schemas are not intended to be hashed
|
||||
return super.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
|
|
|
@ -35,10 +35,15 @@ public final class ShortField extends Field {
|
|||
*/
|
||||
public static final ShortField MAX_VALUE = new ShortField(Short.MAX_VALUE, true);
|
||||
|
||||
/**
|
||||
* Zero short field value
|
||||
*/
|
||||
public static final ShortField ZERO_VALUE = new ShortField((short) 0, true);
|
||||
|
||||
/**
|
||||
* Instance intended for defining a {@link Table} {@link Schema}
|
||||
*/
|
||||
public static final ShortField INSTANCE = MIN_VALUE;
|
||||
public static final ShortField INSTANCE = ZERO_VALUE;
|
||||
|
||||
private short value;
|
||||
|
||||
|
@ -66,6 +71,17 @@ public final class ShortField extends Field {
|
|||
value = s;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return value == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
checkImmutable();
|
||||
value = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public short getShortValue() {
|
||||
return value;
|
||||
|
@ -111,33 +127,38 @@ public final class ShortField extends Field {
|
|||
|
||||
@Override
|
||||
public String getValueAsString() {
|
||||
return "0x" + Integer.toHexString(value);
|
||||
return "0x" + Integer.toHexString(value & 0xffff);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || !(obj instanceof ShortField))
|
||||
if (obj == null || !(obj instanceof ShortField)) {
|
||||
return false;
|
||||
}
|
||||
return ((ShortField) obj).value == value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Field o) {
|
||||
ShortField f = (ShortField) o;
|
||||
if (value == f.value)
|
||||
if (value == f.value) {
|
||||
return 0;
|
||||
else if (value < f.value)
|
||||
}
|
||||
else if (value < f.value) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
int compareTo(DataBuffer buffer, int offset) {
|
||||
short otherValue = buffer.getShort(offset);
|
||||
if (value == otherValue)
|
||||
if (value == otherValue) {
|
||||
return 0;
|
||||
else if (value < otherValue)
|
||||
}
|
||||
else if (value < otherValue) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
|
145
Ghidra/Framework/DB/src/main/java/db/SparseRecord.java
Normal file
145
Ghidra/Framework/DB/src/main/java/db/SparseRecord.java
Normal file
|
@ -0,0 +1,145 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class SparseRecord extends Record {
|
||||
|
||||
SparseRecord(Schema schema, Field key) {
|
||||
super(schema, key);
|
||||
}
|
||||
|
||||
@Override
|
||||
int computeLength() {
|
||||
int len = 1; // sparse field count always written as byte after non-sparse fields
|
||||
Field[] fields = getFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
Field f = fields[i];
|
||||
if (schema.isSparseColumn(i)) {
|
||||
if (!f.isNull()) {
|
||||
// sparse field if present will be prefixed by a byte
|
||||
// indicating the column index
|
||||
len += f.length() + 1;
|
||||
}
|
||||
}
|
||||
else {
|
||||
len += f.length();
|
||||
}
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Buffer buf, int offset) throws IOException {
|
||||
ArrayList<Integer> sparseFieldIndexes = new ArrayList<>();
|
||||
Field[] fields = getFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
if (schema.isSparseColumn(i)) {
|
||||
if (!fields[i].isNull()) {
|
||||
sparseFieldIndexes.add(i);
|
||||
}
|
||||
}
|
||||
else {
|
||||
offset = fields[i].write(buf, offset);
|
||||
}
|
||||
}
|
||||
// write sparse field count
|
||||
buf.putByte(offset++, (byte) sparseFieldIndexes.size());
|
||||
// write each non-null sparse field
|
||||
for (int i : sparseFieldIndexes) {
|
||||
Field f = fields[i];
|
||||
if (!f.isNull()) {
|
||||
// sparse field if present will be prefixed by a byte
|
||||
// indicating the column index
|
||||
buf.putByte(offset++, (byte) i); // sparse field index
|
||||
offset = f.write(buf, offset); // sparse field data
|
||||
}
|
||||
}
|
||||
dirty = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void read(Buffer buf, int offset) throws IOException {
|
||||
Field[] fields = getFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
Field f = fields[i];
|
||||
if (schema.isSparseColumn(i)) {
|
||||
f.setNull();
|
||||
}
|
||||
else {
|
||||
offset = f.read(buf, offset);
|
||||
}
|
||||
}
|
||||
int sparseFieldCount = buf.getByte(offset++);
|
||||
for (int i = 0; i < sparseFieldCount; i++) {
|
||||
int index = buf.getByte(offset++); // sparse field index
|
||||
offset = fields[index].read(buf, offset); // sparse field data
|
||||
}
|
||||
dirty = false;
|
||||
}
|
||||
|
||||
private boolean changeInSparseStorage(int colIndex, long newValue) {
|
||||
if (!schema.isSparseColumn(colIndex)) {
|
||||
return false;
|
||||
}
|
||||
boolean oldSparse = getField(colIndex).isNull();
|
||||
boolean newSparse = newValue == 0;
|
||||
return oldSparse != newSparse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLongValue(int colIndex, long value) {
|
||||
if (changeInSparseStorage(colIndex, value)) {
|
||||
invalidateLength();
|
||||
}
|
||||
super.setLongValue(colIndex, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setIntValue(int colIndex, int value) {
|
||||
if (changeInSparseStorage(colIndex, value)) {
|
||||
invalidateLength();
|
||||
}
|
||||
super.setIntValue(colIndex, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setShortValue(int colIndex, short value) {
|
||||
if (changeInSparseStorage(colIndex, value)) {
|
||||
invalidateLength();
|
||||
}
|
||||
super.setShortValue(colIndex, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setByteValue(int colIndex, byte value) {
|
||||
if (changeInSparseStorage(colIndex, value)) {
|
||||
invalidateLength();
|
||||
}
|
||||
super.setByteValue(colIndex, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBooleanValue(int colIndex, boolean value) {
|
||||
if (changeInSparseStorage(colIndex, value ? 1 : 0)) {
|
||||
invalidateLength();
|
||||
}
|
||||
super.setBooleanValue(colIndex, value);
|
||||
}
|
||||
|
||||
}
|
|
@ -27,10 +27,15 @@ import ghidra.util.exception.AssertException;
|
|||
*/
|
||||
public final class StringField extends Field {
|
||||
|
||||
/**
|
||||
* Null string field value
|
||||
*/
|
||||
public static final StringField NULL_VALUE = new StringField(null, true);
|
||||
|
||||
/**
|
||||
* Instance intended for defining a {@link Table} {@link Schema}
|
||||
*/
|
||||
public static final StringField INSTANCE = new StringField(null, true);
|
||||
public static final StringField INSTANCE = NULL_VALUE;
|
||||
|
||||
private static String ENCODING = "UTF-8";
|
||||
|
||||
|
@ -61,6 +66,18 @@ public final class StringField extends Field {
|
|||
doSetString(str);
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean isNull() {
|
||||
return bytes == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setNull() {
|
||||
checkImmutable();
|
||||
str = null;
|
||||
bytes = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getString() {
|
||||
return str;
|
||||
|
@ -136,36 +153,17 @@ public final class StringField extends Field {
|
|||
|
||||
@Override
|
||||
public String getValueAsString() {
|
||||
if (str == null) {
|
||||
return "null";
|
||||
}
|
||||
return "\"" + str + "\"";
|
||||
}
|
||||
|
||||
// /**
|
||||
// * Get first 8 bytes of string as long value.
|
||||
// * First string byte corresponds to most significant byte
|
||||
// * of long value.
|
||||
// * If string is null, Long.MIN_VALUE is returned.
|
||||
// * @see ghidra.framework.store.db.Field#getLongValue()
|
||||
// */
|
||||
// public long getLongValue() {
|
||||
// if (str == null)
|
||||
// return Long.MIN_VALUE;
|
||||
// long value = 0;
|
||||
// byte[] data;
|
||||
// try {
|
||||
// data = (str == null) ? new byte[0] : str.getBytes(Buffer.ASCII);
|
||||
// } catch (UnsupportedEncodingException e) {
|
||||
// throw new AssertException();
|
||||
// }
|
||||
// for (int i = 0; i < 8 && i < data.length; i++) {
|
||||
// value = (value << 8) | ((long)data[i] & 0x000000ff);
|
||||
// }
|
||||
// return value;
|
||||
// }
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || !(obj instanceof StringField))
|
||||
if (obj == null || !(obj instanceof StringField)) {
|
||||
return false;
|
||||
}
|
||||
StringField f = (StringField) obj;
|
||||
if (str == null) {
|
||||
return (f.str == null);
|
||||
|
@ -207,8 +205,9 @@ public final class StringField extends Field {
|
|||
public int compareTo(Field o) {
|
||||
StringField f = (StringField) o;
|
||||
if (str == null) {
|
||||
if (f.str == null)
|
||||
if (f.str == null) {
|
||||
return 0;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
else if (f.str == null) {
|
||||
|
|
|
@ -173,8 +173,9 @@ public class Table {
|
|||
* @throws IOException thrown if IO error occurs
|
||||
*/
|
||||
private void accumulateNodeStatistics(TableStatistics stats, int bufferId) throws IOException {
|
||||
if (bufferId < 0)
|
||||
if (bufferId < 0) {
|
||||
return;
|
||||
}
|
||||
BTreeNode node = getBTreeNode(bufferId);
|
||||
++stats.bufferCount;
|
||||
|
||||
|
@ -293,8 +294,9 @@ public class Table {
|
|||
|
||||
db.checkTransaction();
|
||||
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
BTreeNode rootNode = getBTreeNode(rootBufferId);
|
||||
|
@ -373,8 +375,9 @@ public class Table {
|
|||
throws IOException, CancelledException {
|
||||
synchronized (db) {
|
||||
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
monitor.setMessage("Check Table " + getName());
|
||||
|
||||
|
@ -492,8 +495,9 @@ public class Table {
|
|||
public void deleteAll() throws IOException {
|
||||
synchronized (db) {
|
||||
db.checkTransaction();
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
BTreeNode rootNode = getBTreeNode(rootBufferId);
|
||||
try {
|
||||
|
@ -635,8 +639,9 @@ public class Table {
|
|||
*/
|
||||
public boolean hasRecord(long key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return false;
|
||||
}
|
||||
boolean result = false;
|
||||
try {
|
||||
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
|
||||
|
@ -660,8 +665,9 @@ public class Table {
|
|||
if (schema.useLongKeyNodes()) {
|
||||
return hasRecord(key.getLongValue());
|
||||
}
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return false;
|
||||
}
|
||||
boolean result = false;
|
||||
try {
|
||||
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
|
||||
|
@ -683,8 +689,9 @@ public class Table {
|
|||
*/
|
||||
public Record getRecord(long key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecord(key, schema);
|
||||
|
@ -704,8 +711,9 @@ public class Table {
|
|||
*/
|
||||
public Record getRecord(Field key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
}
|
||||
if (key instanceof LongField) {
|
||||
return getRecord(key.getLongValue());
|
||||
}
|
||||
|
@ -734,8 +742,9 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordBefore(long key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordBefore(key, schema);
|
||||
|
@ -756,10 +765,12 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordBefore(Field key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
if (key instanceof LongField)
|
||||
}
|
||||
if (key instanceof LongField) {
|
||||
return getRecordBefore(key.getLongValue());
|
||||
}
|
||||
try {
|
||||
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordBefore(key, schema);
|
||||
|
@ -780,8 +791,9 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordAfter(long key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordAfter(key, schema);
|
||||
|
@ -802,10 +814,12 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordAfter(Field key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
if (key instanceof LongField)
|
||||
}
|
||||
if (key instanceof LongField) {
|
||||
return getRecordAfter(key.getLongValue());
|
||||
}
|
||||
try {
|
||||
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordAfter(key, schema);
|
||||
|
@ -826,8 +840,9 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordAtOrBefore(long key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordAtOrBefore(key, schema);
|
||||
|
@ -848,10 +863,12 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordAtOrBefore(Field key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
if (key instanceof LongField)
|
||||
}
|
||||
if (key instanceof LongField) {
|
||||
return getRecordAtOrBefore(key.getLongValue());
|
||||
}
|
||||
try {
|
||||
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordAtOrBefore(key, schema);
|
||||
|
@ -872,8 +889,9 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordAtOrAfter(long key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
LongKeyRecordNode leaf = nodeMgr.getLongKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordAtOrAfter(key, schema);
|
||||
|
@ -894,10 +912,12 @@ public class Table {
|
|||
*/
|
||||
public Record getRecordAtOrAfter(Field key) throws IOException {
|
||||
synchronized (db) {
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return null;
|
||||
if (key instanceof LongField)
|
||||
}
|
||||
if (key instanceof LongField) {
|
||||
return getRecordAtOrAfter(key.getLongValue());
|
||||
}
|
||||
try {
|
||||
FieldKeyRecordNode leaf = getFieldKeyNode(rootBufferId).getLeafNode(key);
|
||||
return leaf.getRecordAtOrAfter(key, schema);
|
||||
|
@ -1033,10 +1053,12 @@ public class Table {
|
|||
db.checkTransaction();
|
||||
boolean result = false;
|
||||
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return false;
|
||||
if (!schema.useLongKeyNodes())
|
||||
}
|
||||
if (!schema.useLongKeyNodes()) {
|
||||
throw new IllegalArgumentException("Field key required");
|
||||
}
|
||||
try {
|
||||
++modCount;
|
||||
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
|
||||
|
@ -1080,8 +1102,9 @@ public class Table {
|
|||
db.checkTransaction();
|
||||
boolean result = false;
|
||||
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return false;
|
||||
}
|
||||
if (key instanceof LongField) {
|
||||
return deleteRecord(key.getLongValue());
|
||||
}
|
||||
|
@ -1127,14 +1150,17 @@ public class Table {
|
|||
public boolean deleteRecords(long startKey, long endKey) throws IOException {
|
||||
synchronized (db) {
|
||||
db.checkTransaction();
|
||||
if (startKey > endKey)
|
||||
if (startKey > endKey) {
|
||||
throw new IllegalArgumentException();
|
||||
if (!schema.useLongKeyNodes())
|
||||
}
|
||||
if (!schema.useLongKeyNodes()) {
|
||||
throw new IllegalArgumentException("Long key required");
|
||||
}
|
||||
|
||||
boolean result = false;
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
++modCount;
|
||||
|
@ -1251,14 +1277,17 @@ public class Table {
|
|||
public boolean deleteRecords(Field startKey, Field endKey) throws IOException {
|
||||
synchronized (db) {
|
||||
db.checkTransaction();
|
||||
if (startKey.compareTo(endKey) > 0)
|
||||
if (startKey.compareTo(endKey) > 0) {
|
||||
throw new IllegalArgumentException();
|
||||
if (schema.useLongKeyNodes())
|
||||
}
|
||||
if (schema.useLongKeyNodes()) {
|
||||
throw new IllegalArgumentException("Field key required");
|
||||
}
|
||||
|
||||
boolean result = false;
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
++modCount;
|
||||
|
@ -1380,8 +1409,9 @@ public class Table {
|
|||
public Field[] findRecords(Field field, int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.findPrimaryKeys(field);
|
||||
}
|
||||
}
|
||||
|
@ -1399,8 +1429,9 @@ public class Table {
|
|||
public int getMatchingRecordCount(Field field, int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.getKeyCount(field);
|
||||
}
|
||||
}
|
||||
|
@ -1417,8 +1448,9 @@ public class Table {
|
|||
public boolean hasRecord(Field field, int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.hasRecord(field);
|
||||
}
|
||||
}
|
||||
|
@ -1434,8 +1466,9 @@ public class Table {
|
|||
public DBFieldIterator indexFieldIterator(int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.indexIterator();
|
||||
}
|
||||
}
|
||||
|
@ -1455,8 +1488,9 @@ public class Table {
|
|||
int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.indexIterator(minField, maxField, before);
|
||||
}
|
||||
}
|
||||
|
@ -1479,8 +1513,9 @@ public class Table {
|
|||
boolean before, int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.indexIterator(minField, maxField, startField, before);
|
||||
}
|
||||
}
|
||||
|
@ -1497,8 +1532,9 @@ public class Table {
|
|||
public RecordIterator indexIterator(int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return new KeyToRecordIterator(this, indexTable.keyIterator());
|
||||
}
|
||||
}
|
||||
|
@ -1521,8 +1557,9 @@ public class Table {
|
|||
boolean atStart) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return new KeyToRecordIterator(this,
|
||||
indexTable.keyIterator(startValue, endValue, atStart));
|
||||
}
|
||||
|
@ -1543,8 +1580,9 @@ public class Table {
|
|||
public RecordIterator indexIteratorAfter(int columnIndex, Field startValue) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return new KeyToRecordIterator(this, indexTable.keyIteratorAfter(startValue));
|
||||
}
|
||||
}
|
||||
|
@ -1565,8 +1603,9 @@ public class Table {
|
|||
throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return new KeyToRecordIterator(this, indexTable.keyIteratorBefore(startValue));
|
||||
}
|
||||
}
|
||||
|
@ -1590,8 +1629,9 @@ public class Table {
|
|||
throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return new KeyToRecordIterator(this,
|
||||
indexTable.keyIteratorAfter(startValue, primaryKey));
|
||||
}
|
||||
|
@ -1616,8 +1656,9 @@ public class Table {
|
|||
throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return new KeyToRecordIterator(this,
|
||||
indexTable.keyIteratorBefore(startValue, primaryKey));
|
||||
}
|
||||
|
@ -1632,8 +1673,9 @@ public class Table {
|
|||
public DBFieldIterator indexKeyIterator(int columnIndex) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.keyIterator();
|
||||
}
|
||||
}
|
||||
|
@ -1651,8 +1693,9 @@ public class Table {
|
|||
throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.keyIteratorBefore(startField);
|
||||
}
|
||||
}
|
||||
|
@ -1671,8 +1714,9 @@ public class Table {
|
|||
throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.keyIteratorAfter(startField);
|
||||
}
|
||||
}
|
||||
|
@ -1692,8 +1736,9 @@ public class Table {
|
|||
Field primaryKey) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.keyIteratorBefore(startField, primaryKey);
|
||||
}
|
||||
}
|
||||
|
@ -1713,8 +1758,9 @@ public class Table {
|
|||
Field primaryKey) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.keyIteratorAfter(startField, primaryKey);
|
||||
}
|
||||
}
|
||||
|
@ -1738,8 +1784,9 @@ public class Table {
|
|||
boolean atMin) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.keyIterator(minField, maxField, atMin);
|
||||
}
|
||||
}
|
||||
|
@ -1760,8 +1807,9 @@ public class Table {
|
|||
Field startField, boolean before) throws IOException {
|
||||
synchronized (db) {
|
||||
IndexTable indexTable = secondaryIndexes.get(columnIndex);
|
||||
if (indexTable == null)
|
||||
if (indexTable == null) {
|
||||
throw new IOException("Index required (" + getName() + "," + columnIndex + ")");
|
||||
}
|
||||
return indexTable.keyIterator(minField, maxField, startField, before);
|
||||
}
|
||||
}
|
||||
|
@ -1789,8 +1837,9 @@ public class Table {
|
|||
*/
|
||||
public RecordIterator iterator(long startKey) throws IOException {
|
||||
synchronized (db) {
|
||||
if (!schema.useLongKeyNodes())
|
||||
if (!schema.useLongKeyNodes()) {
|
||||
throw new IllegalArgumentException("Field key required");
|
||||
}
|
||||
return new LongKeyRecordIterator(Long.MIN_VALUE, Long.MAX_VALUE, startKey);
|
||||
}
|
||||
}
|
||||
|
@ -1808,8 +1857,9 @@ public class Table {
|
|||
*/
|
||||
public RecordIterator iterator(long minKey, long maxKey, long startKey) throws IOException {
|
||||
synchronized (db) {
|
||||
if (!schema.useLongKeyNodes())
|
||||
if (!schema.useLongKeyNodes()) {
|
||||
throw new IllegalArgumentException("Field key required");
|
||||
}
|
||||
return new LongKeyRecordIterator(minKey, maxKey, startKey);
|
||||
}
|
||||
}
|
||||
|
@ -1859,8 +1909,9 @@ public class Table {
|
|||
*/
|
||||
public DBLongIterator longKeyIterator() throws IOException {
|
||||
synchronized (db) {
|
||||
if (!schema.useLongKeyNodes())
|
||||
if (!schema.useLongKeyNodes()) {
|
||||
throw new AssertException();
|
||||
}
|
||||
return new LongKeyIterator();
|
||||
}
|
||||
}
|
||||
|
@ -1874,8 +1925,9 @@ public class Table {
|
|||
*/
|
||||
public DBLongIterator longKeyIterator(long startKey) throws IOException {
|
||||
synchronized (db) {
|
||||
if (!schema.useLongKeyNodes())
|
||||
if (!schema.useLongKeyNodes()) {
|
||||
throw new AssertException();
|
||||
}
|
||||
return new LongKeyIterator(Long.MIN_VALUE, Long.MAX_VALUE, startKey);
|
||||
}
|
||||
}
|
||||
|
@ -1892,8 +1944,9 @@ public class Table {
|
|||
public DBLongIterator longKeyIterator(long minKey, long maxKey, long startKey)
|
||||
throws IOException {
|
||||
synchronized (db) {
|
||||
if (!schema.useLongKeyNodes())
|
||||
if (!schema.useLongKeyNodes()) {
|
||||
throw new AssertException();
|
||||
}
|
||||
return new LongKeyIterator(minKey, maxKey, startKey);
|
||||
}
|
||||
}
|
||||
|
@ -1905,8 +1958,9 @@ public class Table {
|
|||
*/
|
||||
public DBFieldIterator fieldKeyIterator() throws IOException {
|
||||
synchronized (db) {
|
||||
if (schema.useLongKeyNodes())
|
||||
if (schema.useLongKeyNodes()) {
|
||||
throw new AssertException();
|
||||
}
|
||||
return new FieldKeyIterator(null, null, null);
|
||||
}
|
||||
}
|
||||
|
@ -1920,8 +1974,9 @@ public class Table {
|
|||
*/
|
||||
public DBFieldIterator fieldKeyIterator(Field startKey) throws IOException {
|
||||
synchronized (db) {
|
||||
if (schema.useLongKeyNodes())
|
||||
if (schema.useLongKeyNodes()) {
|
||||
throw new AssertException();
|
||||
}
|
||||
return new FieldKeyIterator(null, null, startKey);
|
||||
}
|
||||
}
|
||||
|
@ -1939,8 +1994,9 @@ public class Table {
|
|||
public DBFieldIterator fieldKeyIterator(Field minKey, Field maxKey, Field startKey)
|
||||
throws IOException {
|
||||
synchronized (db) {
|
||||
if (schema.useLongKeyNodes())
|
||||
if (schema.useLongKeyNodes()) {
|
||||
throw new AssertException();
|
||||
}
|
||||
return new FieldKeyIterator(minKey, maxKey, startKey);
|
||||
}
|
||||
}
|
||||
|
@ -1958,8 +2014,9 @@ public class Table {
|
|||
public DBFieldIterator fieldKeyIterator(Field minKey, Field maxKey, boolean before)
|
||||
throws IOException {
|
||||
synchronized (db) {
|
||||
if (schema.useLongKeyNodes())
|
||||
if (schema.useLongKeyNodes()) {
|
||||
throw new AssertException();
|
||||
}
|
||||
return new FieldKeyIterator(minKey, maxKey, before);
|
||||
}
|
||||
}
|
||||
|
@ -2009,11 +2066,13 @@ public class Table {
|
|||
this.minKey = minKey;
|
||||
this.maxKey = maxKey;
|
||||
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (minKey > maxKey)
|
||||
if (minKey > maxKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
LongKeyNode rootNode = nodeMgr.getLongKeyNode(rootBufferId);
|
||||
|
@ -2034,8 +2093,9 @@ public class Table {
|
|||
hasPrev = leaf.getKey(recordIndex) >= minKey;
|
||||
if (!hasPrev) {
|
||||
leaf = leaf.getNextLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return;
|
||||
}
|
||||
recordIndex = 0;
|
||||
hasNext = leaf.getKey(recordIndex) <= maxKey;
|
||||
}
|
||||
|
@ -2046,8 +2106,9 @@ public class Table {
|
|||
// position to previous record
|
||||
if (recordIndex == 0) {
|
||||
leaf = leaf.getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return;
|
||||
}
|
||||
recordIndex = leaf.keyCount - 1;
|
||||
}
|
||||
else {
|
||||
|
@ -2085,8 +2146,9 @@ public class Table {
|
|||
*/
|
||||
private LongKeyRecordNode getRecordLeaf(boolean recoverPrev) throws IOException {
|
||||
|
||||
if (rootBufferId < 0 || record == null)
|
||||
if (rootBufferId < 0 || record == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
LongKeyRecordNode leaf = null;
|
||||
isNext = false;
|
||||
|
@ -2142,8 +2204,9 @@ public class Table {
|
|||
try {
|
||||
// Check for modification to storage of previous record
|
||||
LongKeyRecordNode leaf = getRecordLeaf(false);
|
||||
if (leaf == null || hasNext)
|
||||
return hasNext;
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Position to next record
|
||||
int nextIndex = recordIndex;
|
||||
|
@ -2153,8 +2216,9 @@ public class Table {
|
|||
int nextBufferId = bufferId;
|
||||
if (nextIndex == leaf.keyCount) {
|
||||
leaf = leaf.getNextLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
nextBufferId = leaf.getBufferId();
|
||||
nextIndex = 0;
|
||||
}
|
||||
|
@ -2187,8 +2251,9 @@ public class Table {
|
|||
try {
|
||||
// Check for modification to storage of next record
|
||||
LongKeyRecordNode leaf = getRecordLeaf(true);
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Position to previous record
|
||||
int prevIndex = recordIndex;
|
||||
|
@ -2198,8 +2263,9 @@ public class Table {
|
|||
int prevBufferId = bufferId;
|
||||
if (prevIndex < 0) {
|
||||
leaf = leaf.getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
prevBufferId = leaf.getBufferId();
|
||||
prevIndex = leaf.keyCount - 1;
|
||||
}
|
||||
|
@ -2247,8 +2313,9 @@ public class Table {
|
|||
|
||||
@Override
|
||||
public boolean delete() throws IOException {
|
||||
if (lastRecord == null)
|
||||
if (lastRecord == null) {
|
||||
return false;
|
||||
}
|
||||
deleteRecord(lastRecord.getKey());
|
||||
lastRecord = null;
|
||||
return true;
|
||||
|
@ -2292,11 +2359,13 @@ public class Table {
|
|||
this.minKey = minKey;
|
||||
this.maxKey = maxKey;
|
||||
|
||||
if (rootBufferId < 0)
|
||||
if (rootBufferId < 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (minKey != null && maxKey != null && minKey.compareTo(maxKey) > 0)
|
||||
if (minKey != null && maxKey != null && minKey.compareTo(maxKey) > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (startKey != null) {
|
||||
// if (minKey != null && startKey.compareTo(minKey) < 0)
|
||||
|
@ -2342,8 +2411,9 @@ public class Table {
|
|||
: (leaf.getKeyField(recordIndex).compareTo(minKey) >= 0);
|
||||
if (!hasPrev) {
|
||||
leaf = leaf.getNextLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return;
|
||||
}
|
||||
recordIndex = 0;
|
||||
hasNext = maxKey == null ? true
|
||||
: (leaf.getKeyField(recordIndex).compareTo(maxKey) <= 0);
|
||||
|
@ -2356,8 +2426,9 @@ public class Table {
|
|||
// position to previous record
|
||||
if (recordIndex == 0) {
|
||||
leaf = leaf.getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return;
|
||||
}
|
||||
recordIndex = leaf.getKeyCount() - 1;
|
||||
}
|
||||
else {
|
||||
|
@ -2398,8 +2469,9 @@ public class Table {
|
|||
*/
|
||||
private FieldKeyRecordNode getRecordLeaf(boolean recoverPrev) throws IOException {
|
||||
|
||||
if (rootBufferId < 0 || record == null)
|
||||
if (rootBufferId < 0 || record == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Field key = record.getKeyField();
|
||||
FieldKeyRecordNode leaf = null;
|
||||
|
@ -2457,8 +2529,9 @@ public class Table {
|
|||
try {
|
||||
// Check for modification to storage of previous record
|
||||
FieldKeyRecordNode leaf = getRecordLeaf(false);
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Position to next record
|
||||
int nextIndex = recordIndex;
|
||||
|
@ -2468,8 +2541,9 @@ public class Table {
|
|||
int nextBufferId = bufferId;
|
||||
if (nextIndex == leaf.getKeyCount()) {
|
||||
leaf = leaf.getNextLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
nextBufferId = leaf.getBufferId();
|
||||
nextIndex = 0;
|
||||
}
|
||||
|
@ -2503,8 +2577,9 @@ public class Table {
|
|||
try {
|
||||
// Check for modification to storage of next record
|
||||
FieldKeyRecordNode leaf = getRecordLeaf(true);
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Position to previous record
|
||||
int prevIndex = recordIndex;
|
||||
|
@ -2514,8 +2589,9 @@ public class Table {
|
|||
int prevBufferId = bufferId;
|
||||
if (prevIndex < 0) {
|
||||
leaf = leaf.getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
prevBufferId = leaf.getBufferId();
|
||||
prevIndex = leaf.getKeyCount() - 1;
|
||||
}
|
||||
|
@ -2564,8 +2640,9 @@ public class Table {
|
|||
|
||||
@Override
|
||||
public boolean delete() throws IOException {
|
||||
if (lastRecord == null)
|
||||
if (lastRecord == null) {
|
||||
return false;
|
||||
}
|
||||
deleteRecord(lastRecord.getKeyField());
|
||||
lastRecord = null;
|
||||
return true;
|
||||
|
@ -2743,8 +2820,9 @@ public class Table {
|
|||
|
||||
// Empty leaf node - special case
|
||||
int leafRecCount = leaf.keyCount;
|
||||
if (leafRecCount == 0)
|
||||
if (leafRecCount == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
keyIndex = leaf.getKeyIndex(targetKey);
|
||||
getKeys(leaf);
|
||||
|
@ -2852,12 +2930,14 @@ public class Table {
|
|||
// Process next leaf if needed
|
||||
if (nextIndex >= keys.length) {
|
||||
try {
|
||||
if (bufferId == -1)
|
||||
if (bufferId == -1) {
|
||||
return false;
|
||||
}
|
||||
LongKeyRecordNode leaf = ((LongKeyRecordNode) nodeMgr.getLongKeyNode(
|
||||
bufferId)).getNextLeaf();
|
||||
if (leaf == null || leaf.getKey(0) > maxKey)
|
||||
if (leaf == null || leaf.getKey(0) > maxKey) {
|
||||
return false;
|
||||
}
|
||||
getKeys(leaf);
|
||||
key = keys[0];
|
||||
keyIndex = 0;
|
||||
|
@ -2898,15 +2978,18 @@ public class Table {
|
|||
// Process previous leaf if needed
|
||||
if (prevIndex < 0 || keys.length == 0) {
|
||||
try {
|
||||
if (bufferId == -1)
|
||||
if (bufferId == -1) {
|
||||
return false;
|
||||
}
|
||||
LongKeyRecordNode leaf = ((LongKeyRecordNode) nodeMgr.getLongKeyNode(
|
||||
bufferId)).getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
prevIndex = leaf.keyCount - 1;
|
||||
if (leaf.getKey(prevIndex) < minKey)
|
||||
if (leaf.getKey(prevIndex) < minKey) {
|
||||
return false;
|
||||
}
|
||||
getKeys(leaf);
|
||||
key = keys[prevIndex];
|
||||
keyIndex = prevIndex;
|
||||
|
@ -3111,8 +3194,9 @@ public class Table {
|
|||
}
|
||||
if (!hasNext) {
|
||||
|
||||
if (bufferId < 0 || keyIndex < 0)
|
||||
if (bufferId < 0 || keyIndex < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check next key index
|
||||
int nextIndex = keyIndex + 1;
|
||||
|
@ -3123,11 +3207,13 @@ public class Table {
|
|||
(LongKeyRecordNode) nodeMgr.getLongKeyNode(bufferId);
|
||||
if (nextIndex >= leaf.keyCount) {
|
||||
leaf = leaf.getNextLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
long nextKey = leaf.getKey(0);
|
||||
if (nextKey > maxKey)
|
||||
if (nextKey > maxKey) {
|
||||
return false;
|
||||
}
|
||||
bufferId = leaf.getBufferId();
|
||||
key = nextKey;
|
||||
keyIndex = 0;
|
||||
|
@ -3163,8 +3249,9 @@ public class Table {
|
|||
}
|
||||
if (!hasPrev) {
|
||||
|
||||
if (bufferId < 0 || keyIndex < 0)
|
||||
if (bufferId < 0 || keyIndex < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check previous key index
|
||||
int prevIndex = keyIndex - 1;
|
||||
|
@ -3175,12 +3262,14 @@ public class Table {
|
|||
(LongKeyRecordNode) nodeMgr.getLongKeyNode(bufferId);
|
||||
if (prevIndex < 0) {
|
||||
leaf = leaf.getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
prevIndex = leaf.keyCount - 1;
|
||||
long prevKey = leaf.getKey(prevIndex);
|
||||
if (prevKey < minKey)
|
||||
if (prevKey < minKey) {
|
||||
return false;
|
||||
}
|
||||
bufferId = leaf.getBufferId();
|
||||
key = prevKey;
|
||||
keyIndex = prevIndex;
|
||||
|
@ -3424,8 +3513,9 @@ public class Table {
|
|||
getKeys(leaf);
|
||||
|
||||
// Empty leaf node - special case
|
||||
if (keys.length == 0)
|
||||
if (keys.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
keyIndex = leaf.getKeyIndex(targetKey);
|
||||
}
|
||||
|
@ -3527,8 +3617,9 @@ public class Table {
|
|||
}
|
||||
if (!hasNext) {
|
||||
|
||||
if (bufferId < 0)
|
||||
if (bufferId < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check next key index
|
||||
int nextIndex = keyIndex + 1;
|
||||
|
@ -3539,8 +3630,9 @@ public class Table {
|
|||
FieldKeyRecordNode leaf =
|
||||
((FieldKeyRecordNode) getFieldKeyNode(bufferId)).getNextLeaf();
|
||||
if (leaf == null ||
|
||||
(maxKey != null && leaf.getKeyField(0).compareTo(maxKey) > 0))
|
||||
(maxKey != null && leaf.getKeyField(0).compareTo(maxKey) > 0)) {
|
||||
return false;
|
||||
}
|
||||
getKeys(leaf);
|
||||
key = keys[0];
|
||||
keyIndex = 0;
|
||||
|
@ -3575,8 +3667,9 @@ public class Table {
|
|||
}
|
||||
if (!hasPrev) {
|
||||
|
||||
if (bufferId < 0)
|
||||
if (bufferId < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check previous key index
|
||||
int prevIndex = keyIndex - 1;
|
||||
|
@ -3586,11 +3679,14 @@ public class Table {
|
|||
try {
|
||||
FieldKeyRecordNode leaf =
|
||||
((FieldKeyRecordNode) getFieldKeyNode(bufferId)).getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
prevIndex = leaf.getKeyCount() - 1;
|
||||
if (minKey != null && leaf.getKeyField(prevIndex).compareTo(minKey) < 0)
|
||||
if (minKey != null &&
|
||||
leaf.getKeyField(prevIndex).compareTo(minKey) < 0) {
|
||||
return false;
|
||||
}
|
||||
getKeys(leaf);
|
||||
key = keys[prevIndex];
|
||||
keyIndex = prevIndex;
|
||||
|
@ -3796,8 +3892,9 @@ public class Table {
|
|||
}
|
||||
if (!hasNext) {
|
||||
|
||||
if (bufferId < 0 || keyIndex < 0)
|
||||
if (bufferId < 0 || keyIndex < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check next key index
|
||||
int nextIndex = keyIndex + 1;
|
||||
|
@ -3807,11 +3904,13 @@ public class Table {
|
|||
FieldKeyRecordNode leaf = (FieldKeyRecordNode) getFieldKeyNode(bufferId);
|
||||
if (nextIndex >= leaf.getKeyCount()) {
|
||||
leaf = leaf.getNextLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
Field nextKey = leaf.getKeyField(0);
|
||||
if (maxKey != null && nextKey.compareTo(maxKey) > 0)
|
||||
if (maxKey != null && nextKey.compareTo(maxKey) > 0) {
|
||||
return false;
|
||||
}
|
||||
bufferId = leaf.getBufferId();
|
||||
key = nextKey;
|
||||
keyIndex = 0;
|
||||
|
@ -3846,8 +3945,9 @@ public class Table {
|
|||
}
|
||||
if (!hasPrev) {
|
||||
|
||||
if (bufferId < 0 || keyIndex < 0)
|
||||
if (bufferId < 0 || keyIndex < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check previous key index
|
||||
int prevIndex = keyIndex - 1;
|
||||
|
@ -3857,12 +3957,14 @@ public class Table {
|
|||
FieldKeyRecordNode leaf = (FieldKeyRecordNode) getFieldKeyNode(bufferId);
|
||||
if (prevIndex < 0) {
|
||||
leaf = leaf.getPreviousLeaf();
|
||||
if (leaf == null)
|
||||
if (leaf == null) {
|
||||
return false;
|
||||
}
|
||||
prevIndex = leaf.getKeyCount() - 1;
|
||||
Field prevKey = leaf.getKeyField(prevIndex);
|
||||
if (minKey != null && prevKey.compareTo(minKey) < 0)
|
||||
if (minKey != null && prevKey.compareTo(minKey) < 0) {
|
||||
return false;
|
||||
}
|
||||
bufferId = leaf.getBufferId();
|
||||
key = prevKey;
|
||||
keyIndex = prevIndex;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -27,22 +26,27 @@ public class TranslatedRecordIterator implements RecordIterator {
|
|||
this.translator = translator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() throws IOException {
|
||||
return it.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasPrevious() throws IOException {
|
||||
return it.hasPrevious();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Record next() throws IOException {
|
||||
return translator.translateRecord(it.next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Record previous() throws IOException {
|
||||
return translator.translateRecord(it.previous());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean delete() throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import ghidra.util.exception.AssertException;
|
|||
* <pre>
|
||||
* | NodeType(1) | KeyCount(4) | PrevLeafId(4) | NextLeafId(4) | Key0(8) | RecOffset0(4) | IndFlag0(1) |...
|
||||
*
|
||||
* | KeyN(8) | RecOffsetN(4) | IndFlagN(1) |...<FreeSpace>... | RecN |... | Rec1 |
|
||||
* | KeyN(8) | RecOffsetN(4) | IndFlagN(1) |...<FreeSpace>... | RecN |... | Rec0 |
|
||||
* </pre>
|
||||
* IndFlag - if not zero the record has been stored within a chained DBBuffer
|
||||
* whose 4-byte integer buffer ID has been stored within this leaf at the record offset.
|
||||
|
@ -214,8 +214,9 @@ class VarRecNode extends LongKeyRecordNode {
|
|||
@Override
|
||||
Record getRecord(long key, Schema schema) throws IOException {
|
||||
int index = getKeyIndex(key);
|
||||
if (index < 0)
|
||||
if (index < 0) {
|
||||
return null;
|
||||
}
|
||||
return getRecord(schema, index);
|
||||
}
|
||||
|
||||
|
@ -340,7 +341,9 @@ class VarRecNode extends LongKeyRecordNode {
|
|||
}
|
||||
|
||||
if ((len + ENTRY_SIZE) > getFreeSpace())
|
||||
{
|
||||
return false; // insufficient space for record storage
|
||||
}
|
||||
|
||||
// Make room for new record
|
||||
int offset = moveRecords(index, -len);
|
||||
|
@ -373,8 +376,9 @@ class VarRecNode extends LongKeyRecordNode {
|
|||
@Override
|
||||
public void remove(int index) throws IOException {
|
||||
|
||||
if (index < 0 || index >= keyCount)
|
||||
if (index < 0 || index >= keyCount) {
|
||||
throw new AssertException();
|
||||
}
|
||||
|
||||
if (hasIndirectStorage(index)) {
|
||||
removeChainedBuffer(buffer.getInt(getRecordDataOffset(index)));
|
||||
|
@ -382,6 +386,7 @@ class VarRecNode extends LongKeyRecordNode {
|
|||
}
|
||||
|
||||
int len = getRecordLength(index);
|
||||
|
||||
moveRecords(index + 1, len);
|
||||
|
||||
int start = KEY_BASE_OFFSET + ((index + 1) * ENTRY_SIZE);
|
||||
|
|
|
@ -89,7 +89,7 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
private Record[] createRandomTableRecords(int schemaType, int recordCnt, int varDataSize)
|
||||
throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createFixedKeyTable(dbh, table1Name, schemaType, true);
|
||||
Table table = DBTestUtils.createFixedKeyTable(dbh, table1Name, schemaType, true, false);
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int i = 0; i < recordCnt; i++) {
|
||||
try {
|
||||
|
@ -113,7 +113,7 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
private Record[] createOrderedTableRecords(int schemaType, int recordCnt, long keyIncrement,
|
||||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createFixedKeyTable(dbh, table1Name, schemaType, true);
|
||||
Table table = DBTestUtils.createFixedKeyTable(dbh, table1Name, schemaType, true, false);
|
||||
FixedField key = new FixedField10(new byte[] { 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff,
|
||||
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff });
|
||||
Record[] recs = new Record[recordCnt];
|
||||
|
@ -158,8 +158,7 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
Table table = dbh.getTable(table1Name);
|
||||
int[] indexedColumns = table.getIndexedColumns();
|
||||
int step = recordCnt / findCnt;
|
||||
for (int n = 0; n < indexedColumns.length; n++) {
|
||||
int indexColumn = indexedColumns[n];
|
||||
for (int indexColumn : indexedColumns) {
|
||||
for (int i = 0; i < recordCnt; i += step) {
|
||||
Field[] keys = table.findRecords(recs[i].getField(indexColumn), indexColumn);
|
||||
Arrays.sort(keys);
|
||||
|
@ -174,14 +173,14 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
public void testEmptyFixedKeyIterator() throws IOException {
|
||||
createRandomTableRecords(DBTestUtils.ALL_TYPES, 0, 1);
|
||||
|
||||
dbh.undo();
|
||||
dbh.redo();
|
||||
|
||||
saveAsAndReopen(dbName);
|
||||
|
||||
Table table = dbh.getTable(table1Name);
|
||||
assertEquals(0, table.getRecordCount());
|
||||
|
||||
dbh.undo();
|
||||
dbh.redo();
|
||||
|
||||
Field startKey = new FixedField10(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 2, 0 });
|
||||
Field minKey = new FixedField10(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 });
|
||||
Field maxKey = new FixedField10(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 3, 0 });
|
||||
|
@ -805,8 +804,6 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
assertEquals(maxIx + 1, ix);
|
||||
|
||||
// Index field iterator (reverse range of unique index values)
|
||||
// minIx = indexFields.size() / 10;
|
||||
// maxIx = minIx * 2;
|
||||
fiter = table.indexFieldIterator(indexFields.get(minIx), indexFields.get(maxIx), false,
|
||||
colIx);
|
||||
ix = maxIx;
|
||||
|
@ -829,8 +826,6 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
assertEquals(-1, ix);
|
||||
|
||||
// Index field iterator (forward range of unique index values)
|
||||
// minIx = indexFields.size() / 10;
|
||||
// maxIx = minIx * 2;
|
||||
startIx = (minIx + maxIx) / 2;
|
||||
fiter = table.indexFieldIterator(indexFields.get(minIx), indexFields.get(maxIx),
|
||||
indexFields.get(startIx), true, colIx);
|
||||
|
@ -842,9 +837,6 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
assertEquals(maxIx + 1, ix);
|
||||
|
||||
// Index field iterator (reverse range of unique index values)
|
||||
// minIx = indexFields.size() / 10;
|
||||
// maxIx = minIx * 2;
|
||||
// startIx = (minIx + maxIx) / 2;
|
||||
fiter = table.indexFieldIterator(indexFields.get(minIx), indexFields.get(maxIx),
|
||||
indexFields.get(startIx), false, colIx);
|
||||
ix = startIx;
|
||||
|
@ -1144,7 +1136,7 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
public void testRecordIteratorExtents() throws IOException {
|
||||
|
||||
Record[] recs = null;
|
||||
recs = createOrderedRecordRange(DBTestUtils.SINGLE_SHORT, 30, 2, 1);
|
||||
recs = createOrderedTableRecords(DBTestUtils.SINGLE_SHORT, 30, 2, 1);
|
||||
Table table = dbh.getTable(table1Name);
|
||||
assertEquals(recs.length, table.getRecordCount());
|
||||
|
||||
|
@ -1152,8 +1144,6 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
int colIx = 0;
|
||||
Arrays.sort(recs, new RecColumnComparator(colIx));
|
||||
int recIx = recs.length - 1;
|
||||
// RecordIterator iter = table.indexIterator(colIx, recs[minIx].getField(colIx),
|
||||
// recs[maxIx].getField(colIx), false);
|
||||
Field minField = new ShortField(Short.MIN_VALUE);
|
||||
Field maxField = new ShortField(Short.MAX_VALUE);
|
||||
RecordIterator iter = table.indexIterator(colIx, minField, maxField, false);
|
||||
|
@ -1164,23 +1154,6 @@ public class DBFixedKeyIndexedTableTest extends AbstractGenericTest {
|
|||
assertEquals(recIx, -1);
|
||||
}
|
||||
|
||||
private Record[] createOrderedRecordRange(int schemaType, int recordCnt, long keyIncrement,
|
||||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, schemaType, true);
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int key = 0; key < recordCnt; key++) {
|
||||
try {
|
||||
recs[key] = DBTestUtils.createMidRangeRecord(table, key, varDataSize, true);
|
||||
}
|
||||
catch (DuplicateKeyException e) {
|
||||
Assert.fail("Duplicate key error");
|
||||
}
|
||||
}
|
||||
dbh.endTransaction(txId, true);
|
||||
return recs;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecordIteratorDelete() throws IOException {
|
||||
for (int colIx : DBTestUtils.getIndexedColumns(DBTestUtils.ALL_TYPES)) {
|
||||
|
|
|
@ -0,0 +1,200 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package db;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.junit.*;
|
||||
|
||||
import db.buffers.*;
|
||||
import generic.test.AbstractGenericTest;
|
||||
import ghidra.util.exception.CancelledException;
|
||||
import utilities.util.FileUtilities;
|
||||
|
||||
public class DBFixedKeySparseIndexedTableTest extends AbstractGenericTest {
|
||||
|
||||
private static final int BUFFER_SIZE = 2048;// keep small for chained buffer testing
|
||||
private static final int CACHE_SIZE = 4 * 1024 * 1024;
|
||||
|
||||
private static final int ITER_REC_CNT = 1000;
|
||||
|
||||
private static final String table1Name = "TABLE1";
|
||||
|
||||
private File testDir;
|
||||
private static final String dbName = "test";
|
||||
|
||||
private BufferFileManager fileMgr;
|
||||
private DBHandle dbh;
|
||||
private BufferFile bfile;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
|
||||
testDir = createTempDirectory(getClass().getSimpleName());
|
||||
dbh = new DBHandle(BUFFER_SIZE, CACHE_SIZE);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (dbh != null) {
|
||||
dbh.close();
|
||||
}
|
||||
if (bfile != null) {
|
||||
bfile.close();
|
||||
}
|
||||
FileUtilities.deleteDir(testDir);
|
||||
|
||||
}
|
||||
|
||||
private void saveAsAndReopen(String name) throws IOException {
|
||||
try {
|
||||
BufferFileManager mgr = DBTestUtils.getBufferFileManager(testDir, name);
|
||||
BufferFile bf = new LocalManagedBufferFile(dbh.getBufferSize(), mgr, -1);
|
||||
dbh.saveAs(bf, true, null);
|
||||
dbh.close();
|
||||
fileMgr = mgr;
|
||||
}
|
||||
catch (CancelledException e) {
|
||||
Assert.fail("Should not happen");
|
||||
}
|
||||
bfile = new LocalManagedBufferFile(fileMgr, true, -1, -1);
|
||||
dbh = new DBHandle(bfile);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyFixedKeyIterator() throws IOException {
|
||||
|
||||
long txId = dbh.startTransaction();
|
||||
Table table =
|
||||
DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, true, true);
|
||||
Schema schema = table.getSchema();
|
||||
for (int i = 0; i < schema.getFieldCount(); i++) {
|
||||
assertTrue(schema.isSparseColumn(i));
|
||||
}
|
||||
dbh.endTransaction(txId, true);
|
||||
|
||||
dbh.undo();
|
||||
dbh.redo();
|
||||
|
||||
saveAsAndReopen(dbName);
|
||||
|
||||
table = dbh.getTable(table1Name);
|
||||
assertEquals(0, table.getRecordCount());
|
||||
|
||||
assertEquals(schema, table.getSchema());
|
||||
|
||||
Field startKey = new FixedField10(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 2, 0 });
|
||||
Field minKey = new FixedField10(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 });
|
||||
Field maxKey = new FixedField10(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 3, 0 });
|
||||
DBFieldIterator iter = table.fieldKeyIterator();
|
||||
assertTrue(!iter.hasPrevious());
|
||||
assertTrue(!iter.hasNext());
|
||||
|
||||
iter = table.fieldKeyIterator(startKey);
|
||||
assertTrue(!iter.hasPrevious());
|
||||
assertTrue(!iter.hasNext());
|
||||
|
||||
iter = table.fieldKeyIterator(minKey, maxKey, startKey);
|
||||
assertTrue(!iter.hasPrevious());
|
||||
assertTrue(!iter.hasNext());
|
||||
|
||||
startKey = FixedField10.INSTANCE.getMinValue();
|
||||
iter = table.fieldKeyIterator(minKey, maxKey, startKey);
|
||||
assertTrue(!iter.hasPrevious());
|
||||
assertTrue(!iter.hasNext());
|
||||
|
||||
startKey = FixedField10.INSTANCE.getMaxValue();
|
||||
iter = table.fieldKeyIterator(minKey, maxKey, startKey);
|
||||
assertTrue(!iter.hasPrevious());
|
||||
assertTrue(!iter.hasNext());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFixedKeyIterator() throws IOException {
|
||||
|
||||
long txId = dbh.startTransaction();
|
||||
Table table =
|
||||
DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, true, true);
|
||||
Schema schema = table.getSchema();
|
||||
for (int i = 0; i < schema.getFieldCount(); i++) {
|
||||
assertTrue(schema.isSparseColumn(i));
|
||||
}
|
||||
|
||||
int cnt = schema.getFieldCount();
|
||||
for (int i = 0; i < cnt; i++) {
|
||||
Field key = new FixedField10(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 1, (byte) i });
|
||||
Record r = schema.createRecord(key);
|
||||
|
||||
Field f = schema.getField(i);
|
||||
if (f.isVariableLength()) {
|
||||
f.setBinaryData(new byte[] { 'X' });
|
||||
}
|
||||
else {
|
||||
f = f.getMaxValue();
|
||||
}
|
||||
r.setField(i, f);
|
||||
|
||||
int nextCol = i + 1;
|
||||
if (nextCol < cnt) {
|
||||
f = schema.getField(nextCol);
|
||||
if (f.isVariableLength()) {
|
||||
f.setBinaryData(new byte[] { 'x' });
|
||||
}
|
||||
else {
|
||||
f = f.getMinValue();
|
||||
}
|
||||
r.setField(nextCol, f);
|
||||
}
|
||||
|
||||
table.putRecord(r);
|
||||
}
|
||||
|
||||
dbh.endTransaction(txId, true);
|
||||
|
||||
saveAsAndReopen(dbName);
|
||||
|
||||
table = dbh.getTable(table1Name);
|
||||
assertEquals(cnt, table.getRecordCount());
|
||||
|
||||
// see DBTestUtils for schema column types
|
||||
|
||||
// Index does not track null/zero values
|
||||
assertEquals(0, table.findRecords(IntField.ZERO_VALUE, 2).length);
|
||||
assertEquals(0, table.findRecords(ShortField.ZERO_VALUE, 3).length);
|
||||
assertEquals(0, table.findRecords(LongField.ZERO_VALUE, 4).length);
|
||||
assertEquals(0, table.findRecords(StringField.NULL_VALUE, 5).length);
|
||||
assertEquals(0, table.findRecords(new BinaryField(), 6).length);
|
||||
assertEquals(0, table.findRecords(FixedField10.ZERO_VALUE, 7).length);
|
||||
|
||||
assertEquals(1, table.findRecords(IntField.MAX_VALUE, 2).length);
|
||||
assertEquals(1, table.findRecords(ShortField.MAX_VALUE, 3).length);
|
||||
assertEquals(1, table.findRecords(LongField.MAX_VALUE, 4).length);
|
||||
assertEquals(1, table.findRecords(new StringField("X"), 5).length);
|
||||
assertEquals(1, table.findRecords(new BinaryField(new byte[] { 'X' }), 6).length);
|
||||
assertEquals(1, table.findRecords(FixedField10.MAX_VALUE, 7).length);
|
||||
|
||||
assertEquals(1, table.findRecords(IntField.MIN_VALUE, 2).length);
|
||||
assertEquals(1, table.findRecords(ShortField.MIN_VALUE, 3).length);
|
||||
assertEquals(1, table.findRecords(LongField.MIN_VALUE, 4).length);
|
||||
assertEquals(1, table.findRecords(new StringField("x"), 5).length);
|
||||
assertEquals(1, table.findRecords(new BinaryField(new byte[] { 'x' }), 6).length);
|
||||
assertEquals(0, table.findRecords(FixedField10.MIN_VALUE, 7).length); // same as zero/null
|
||||
}
|
||||
|
||||
}
|
|
@ -90,7 +90,7 @@ public class DBFixedKeyTableTest extends AbstractGenericTest {
|
|||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table =
|
||||
DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false);
|
||||
DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false, false);
|
||||
Record rec = null;
|
||||
try {
|
||||
rec = DBTestUtils.createFixedKeyRecord(table, varDataSize, true);
|
||||
|
@ -280,7 +280,8 @@ public class DBFixedKeyTableTest extends AbstractGenericTest {
|
|||
throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
if (table == null) {
|
||||
table = DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false);
|
||||
table = DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false,
|
||||
false);
|
||||
}
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int i = 0; i < recordCnt; i++) {
|
||||
|
@ -305,7 +306,7 @@ public class DBFixedKeyTableTest extends AbstractGenericTest {
|
|||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table =
|
||||
DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false);
|
||||
DBTestUtils.createFixedKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false, false);
|
||||
FixedField10 key = new FixedField10(new byte[] { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int i = 0; i < recordCnt; i++) {
|
||||
|
|
|
@ -90,7 +90,7 @@ public class DBIndexedTableTest extends AbstractGenericTest {
|
|||
private Record[] createRandomTableRecords(int schemaType, int recordCnt, int varDataSize)
|
||||
throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, schemaType, true);
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, schemaType, true, false);
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int i = 0; i < recordCnt; i++) {
|
||||
try {
|
||||
|
@ -114,7 +114,7 @@ public class DBIndexedTableTest extends AbstractGenericTest {
|
|||
private Record[] createOrderedTableRecords(int schemaType, int recordCnt, long keyIncrement,
|
||||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, schemaType, true);
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, schemaType, true, false);
|
||||
long key = 0;
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int i = 0; i < recordCnt; i++) {
|
||||
|
@ -1183,7 +1183,7 @@ public class DBIndexedTableTest extends AbstractGenericTest {
|
|||
private Record[] createOrderedRecordRange(int schemaType, int recordCnt, long keyIncrement,
|
||||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, schemaType, true);
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, schemaType, true, false);
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int key = 0; key < recordCnt; key++) {
|
||||
try {
|
||||
|
|
|
@ -81,7 +81,8 @@ public class DBLongKeyTableTest extends AbstractGenericTest {
|
|||
private long insertOneLongKeyRecord(boolean testStoredDB, boolean testGetRecord,
|
||||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false);
|
||||
Table table =
|
||||
DBTestUtils.createLongKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false, false);
|
||||
Record rec = null;
|
||||
try {
|
||||
rec = DBTestUtils.createLongKeyRecord(table, true, varDataSize, true);
|
||||
|
@ -265,7 +266,8 @@ public class DBLongKeyTableTest extends AbstractGenericTest {
|
|||
throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
if (table == null) {
|
||||
table = DBTestUtils.createLongKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false);
|
||||
table = DBTestUtils.createLongKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false,
|
||||
false);
|
||||
}
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int i = 0; i < recordCnt; i++) {
|
||||
|
@ -289,7 +291,8 @@ public class DBLongKeyTableTest extends AbstractGenericTest {
|
|||
private Record[] createOrderedLongKeyTableRecords(int recordCnt, long keyIncrement,
|
||||
int varDataSize) throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false);
|
||||
Table table =
|
||||
DBTestUtils.createLongKeyTable(dbh, table1Name, DBTestUtils.ALL_TYPES, false, false);
|
||||
long key = 0;
|
||||
Record[] recs = new Record[recordCnt];
|
||||
for (int i = 0; i < recordCnt; i++) {
|
||||
|
|
|
@ -163,7 +163,8 @@ public class DBTest extends AbstractGenericTest {
|
|||
@Test
|
||||
public void testCreateLongKeyTable() throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
Table table = DBTestUtils.createLongKeyTable(dbh, "TABLE1", DBTestUtils.ALL_TYPES, false);
|
||||
Table table =
|
||||
DBTestUtils.createLongKeyTable(dbh, "TABLE1", DBTestUtils.ALL_TYPES, false, false);
|
||||
dbh.endTransaction(txId, true);
|
||||
String[] names = table.getSchema().getFieldNames();
|
||||
assertTrue(Arrays.equals(DBTestUtils.getFieldNames(DBTestUtils.ALL_TYPES), names));
|
||||
|
@ -181,7 +182,7 @@ public class DBTest extends AbstractGenericTest {
|
|||
@Test
|
||||
public void testStoredCreateLongKeyTable() throws IOException {
|
||||
long txId = dbh.startTransaction();
|
||||
DBTestUtils.createLongKeyTable(dbh, "TABLE1", DBTestUtils.ALL_TYPES, false);
|
||||
DBTestUtils.createLongKeyTable(dbh, "TABLE1", DBTestUtils.ALL_TYPES, false, false);
|
||||
dbh.endTransaction(txId, true);
|
||||
saveAsAndReopen(dbName);
|
||||
Table table = dbh.getTable("TABLE1");
|
||||
|
@ -209,7 +210,7 @@ public class DBTest extends AbstractGenericTest {
|
|||
long txId = dbh.startTransaction();
|
||||
for (int i = 0; i < cnt; i++) {
|
||||
DBTestUtils.createLongKeyTable(dbh, "TABLE" + i, i % (DBTestUtils.MAX_SCHEMA_TYPE + 1),
|
||||
false);
|
||||
false, false);
|
||||
}
|
||||
dbh.endTransaction(txId, true);
|
||||
assertEquals(cnt, dbh.getTableCount());
|
||||
|
@ -247,7 +248,7 @@ public class DBTest extends AbstractGenericTest {
|
|||
// All schema fields are indexed
|
||||
long txId = dbh.startTransaction();
|
||||
for (int i = 0; i <= DBTestUtils.MAX_SCHEMA_TYPE; i++) {
|
||||
DBTestUtils.createLongKeyTable(dbh, "TABLE" + i, i, true);
|
||||
DBTestUtils.createLongKeyTable(dbh, "TABLE" + i, i, true, false);
|
||||
}
|
||||
assertEquals(DBTestUtils.MAX_SCHEMA_TYPE + 1, dbh.getTableCount());
|
||||
dbh.endTransaction(txId, true);
|
||||
|
@ -414,7 +415,7 @@ public class DBTest extends AbstractGenericTest {
|
|||
|
||||
}
|
||||
Schema schema = lastTable.getSchema();
|
||||
assertEquals(schema.getFields().length, indexCnt);
|
||||
assertEquals(schema.getFields().length - 2, indexCnt); // ByteField and BooleanField do not support indexing
|
||||
assertEquals(totalTableCnt, tableCnt);
|
||||
|
||||
}
|
||||
|
|
|
@ -41,9 +41,10 @@ public class DBTestUtils {
|
|||
static final int SINGLE_STRING = 6;
|
||||
static final int SINGLE_BINARY = 7;
|
||||
static final int SINGLE_FIXED = 8;
|
||||
static final int ALL_TYPES = 9;
|
||||
static final int ALL_FIXED = 9;
|
||||
static final int ALL_TYPES = 10;
|
||||
|
||||
static final int MAX_SCHEMA_TYPE = 9;
|
||||
static final int MAX_SCHEMA_TYPE = 10;
|
||||
|
||||
//@formatter:off
|
||||
private static final Field[][] schemaFields = {
|
||||
|
@ -56,18 +57,22 @@ public class DBTestUtils {
|
|||
{ StringField.INSTANCE },
|
||||
{ BinaryField.INSTANCE },
|
||||
{ FixedField10.INSTANCE },
|
||||
{ BooleanField.INSTANCE, ByteField.INSTANCE, IntField.INSTANCE, ShortField.INSTANCE,
|
||||
LongField.INSTANCE, FixedField10.INSTANCE },
|
||||
{ BooleanField.INSTANCE, ByteField.INSTANCE, IntField.INSTANCE, ShortField.INSTANCE,
|
||||
LongField.INSTANCE, StringField.INSTANCE, BinaryField.INSTANCE, FixedField10.INSTANCE } };
|
||||
//@formatter:on
|
||||
|
||||
private static final int[][] schemaIndexedColumns =
|
||||
{ {}, {}, {}, { 0 }, { 0 }, { 0 }, { 0 }, { 0 }, { 0 }, { 2, 3, 4, 5, 6, 7 } };
|
||||
{ {}, {}, {}, { 0 }, { 0 }, { 0 }, { 0 }, { 0 }, { 0 }, { 2, 3, 4, 5 },
|
||||
{ 2, 3, 4, 5, 6, 7 } };
|
||||
|
||||
//@formatter:off
|
||||
private static final String[][] schemaFieldNames = {
|
||||
{}, // no columns
|
||||
{ "Boolean" }, { "Byte" }, { "Int" }, { "Short" }, { "Long" },
|
||||
{ "String" }, { "Binary" }, { "Fixed" },
|
||||
{ "Boolean", "Byte", "Int", "Short", "Long", "Fixed" },
|
||||
{ "Boolean", "Byte", "Int", "Short", "Long", "String", "Binary", "Fixed" }
|
||||
};
|
||||
//@formatter:on
|
||||
|
@ -118,30 +123,51 @@ public class DBTestUtils {
|
|||
* @param name name of table
|
||||
* @param schemaType type of schema (use static identifier)
|
||||
* @param createIndex all fields will be indexed if true
|
||||
* @param useSparseColumns all fields will use sparse storage if true
|
||||
* @return Table new table
|
||||
* @throws IOException
|
||||
*/
|
||||
static Table createLongKeyTable(DBHandle db, String name, int schemaType, boolean createIndex)
|
||||
static Table createLongKeyTable(DBHandle db, String name, int schemaType, boolean createIndex,
|
||||
boolean useSparseColumns)
|
||||
throws IOException {
|
||||
Table t;
|
||||
int indexCnt = 0;
|
||||
int[] indexedColumns = null;
|
||||
|
||||
Schema[] schemas = longKeySchemas;
|
||||
if (useSparseColumns) {
|
||||
for (int i = 0; i < schemas.length; i++) {
|
||||
schemas[i] = createSparseSchema(schemas[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (createIndex) {
|
||||
indexCnt = getIndexedColumnCount(schemaType);
|
||||
int[] indexedColumns = getAllowedIndexColumns(schemaFields[schemaType]);
|
||||
t = db.createTable(name, longKeySchemas[schemaType], indexedColumns);
|
||||
}
|
||||
else {
|
||||
t = db.createTable(name, longKeySchemas[schemaType]);
|
||||
indexedColumns = getAllowedIndexColumns(schemaFields[schemaType]);
|
||||
}
|
||||
|
||||
t = db.createTable(name, schemas[schemaType], indexedColumns);
|
||||
|
||||
Assert.assertEquals(name, t.getName());
|
||||
Assert.assertEquals(indexCnt, t.getIndexedColumns().length);
|
||||
Assert.assertEquals(Long.MIN_VALUE, t.getMaxKey());
|
||||
Assert.assertEquals(0, t.getRecordCount());
|
||||
Assert.assertEquals(longKeySchemas[schemaType], t.getSchema());
|
||||
Assert.assertEquals(schemas[schemaType], t.getSchema());
|
||||
Assert.assertTrue(t.useLongKeys());
|
||||
return t;
|
||||
}
|
||||
|
||||
private static Schema createSparseSchema(Schema schema) {
|
||||
|
||||
Field[] fields = schema.getFields();
|
||||
int[] sparseColumnIndexes = new int[fields.length];
|
||||
for (int i = 0; i < sparseColumnIndexes.length; i++) {
|
||||
sparseColumnIndexes[i] = i;
|
||||
}
|
||||
return new Schema(schema.getVersion(), schema.getKeyFieldType(), schema.getKeyName(),
|
||||
fields, schema.getFieldNames(), sparseColumnIndexes);
|
||||
}
|
||||
|
||||
static int[] getAllowedIndexColumns(Field[] columnFields) {
|
||||
ArrayList<Integer> list = new ArrayList<>();
|
||||
for (int i = 0; i < columnFields.length; i++) {
|
||||
|
@ -162,25 +188,40 @@ public class DBTestUtils {
|
|||
* @param name name of table
|
||||
* @param schemaType type of schema (use static identifier)
|
||||
* @param createIndex all fields will be indexed if true
|
||||
* @param useSparseColumns all fields will use sparse storage if true
|
||||
* @return Table new table
|
||||
* @throws IOException
|
||||
*/
|
||||
static Table createFixedKeyTable(DBHandle db, String name, int schemaType, boolean createIndex)
|
||||
static Table createFixedKeyTable(DBHandle db, String name, int schemaType, boolean createIndex,
|
||||
boolean useSparseColumns)
|
||||
throws IOException {
|
||||
Table t;
|
||||
int indexCnt = 0;
|
||||
int[] indexedColumns = null;
|
||||
|
||||
Schema[] schemas = fixedKeySchemas;
|
||||
if (useSparseColumns) {
|
||||
for (int i = 0; i < schemas.length; i++) {
|
||||
schemas[i] = createSparseSchema(schemas[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (createIndex) {
|
||||
indexCnt = getIndexedColumnCount(schemaType);
|
||||
indexedColumns = getAllowedIndexColumns(schemaFields[schemaType]);
|
||||
}
|
||||
|
||||
t = db.createTable(name, schemas[schemaType], indexedColumns);
|
||||
|
||||
if (createIndex) {
|
||||
int[] indexedColumns = getAllowedIndexColumns(schemaFields[schemaType]);
|
||||
t = db.createTable(name, fixedKeySchemas[schemaType], indexedColumns);
|
||||
Assert.assertArrayEquals(schemaIndexedColumns[schemaType], t.getIndexedColumns());
|
||||
}
|
||||
else {
|
||||
t = db.createTable(name, fixedKeySchemas[schemaType]);
|
||||
Assert.assertEquals(0, t.getIndexedColumns().length);
|
||||
}
|
||||
|
||||
Assert.assertEquals(name, t.getName());
|
||||
Assert.assertEquals(indexCnt, t.getIndexedColumns().length);
|
||||
Assert.assertEquals(Long.MIN_VALUE, t.getMaxKey());
|
||||
Assert.assertEquals(0, t.getRecordCount());
|
||||
Assert.assertEquals(fixedKeySchemas[schemaType], t.getSchema());
|
||||
Assert.assertEquals(schemas[schemaType], t.getSchema());
|
||||
Assert.assertTrue(!t.useLongKeys());
|
||||
return t;
|
||||
}
|
||||
|
|
|
@ -15,8 +15,7 @@
|
|||
*/
|
||||
package db;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import org.junit.*;
|
||||
|
||||
|
@ -47,7 +46,8 @@ public class TableConcurrencyTest extends AbstractGenericTest {
|
|||
|
||||
dbh = new DBHandle(BUFFER_SIZE, CACHE_SIZE);
|
||||
txId = dbh.startTransaction();
|
||||
table1 = DBTestUtils.createLongKeyTable(dbh, "TABLE1", DBTestUtils.SINGLE_LONG, false);
|
||||
table1 =
|
||||
DBTestUtils.createLongKeyTable(dbh, "TABLE1", DBTestUtils.SINGLE_LONG, false, false);
|
||||
table2 = DBTestUtils.createBinaryKeyTable(dbh, "TABLE2", DBTestUtils.SINGLE_LONG, false);
|
||||
schema1 = table1.getSchema();
|
||||
schema2 = table2.getSchema();
|
||||
|
|
|
@ -50,8 +50,11 @@ class ProgramUserDataDB extends DomainObjectAdapterDB implements ProgramUserData
|
|||
/**
|
||||
* DB_VERSION should be incremented any time a change is made to the overall
|
||||
* database schema associated with any of the managers.
|
||||
*
|
||||
* NOTE: 19-Jun-2020 Corrections to DB index tables should have no impact on user data
|
||||
* PropertyMaps which are not indexed.
|
||||
*/
|
||||
static final int DB_VERSION = 2;
|
||||
static final int DB_VERSION = 1;
|
||||
|
||||
/**
|
||||
* UPGRADE_REQUIRED_BFORE_VERSION should be changed to DB_VERSION any time the
|
||||
|
@ -59,7 +62,7 @@ class ProgramUserDataDB extends DomainObjectAdapterDB implements ProgramUserData
|
|||
* until upgrade is performed). It is assumed that read-only mode is supported
|
||||
* if the data's version is >= UPGRADE_REQUIRED_BEFORE_VERSION and <= DB_VERSION.
|
||||
*/
|
||||
private static final int UPGRADE_REQUIRED_BEFORE_VERSION = 2;
|
||||
private static final int UPGRADE_REQUIRED_BEFORE_VERSION = 1;
|
||||
|
||||
private static final String TABLE_NAME = "ProgramUserData";
|
||||
private final static Field[] COL_FIELDS = new Field[] { StringField.INSTANCE };
|
||||
|
|
|
@ -776,19 +776,18 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
currentHandler = currentHandler.getSubsequentHandler();
|
||||
}
|
||||
|
||||
SourceArchive sourceArchive = dataType.getSourceArchive();
|
||||
if (sourceArchive != null && sourceArchive.getArchiveType() == ArchiveType.BUILT_IN) {
|
||||
return resolveBuiltIn(dataType, currentHandler);
|
||||
}
|
||||
|
||||
resolvedDataType = getCachedResolve(dataType);
|
||||
if (resolvedDataType != null) {
|
||||
return resolvedDataType;
|
||||
}
|
||||
|
||||
SourceArchive sourceArchive = dataType.getSourceArchive();
|
||||
if (sourceArchive != null && sourceArchive.getArchiveType() == ArchiveType.BUILT_IN) {
|
||||
resolvedDataType = resolveBuiltIn(dataType, currentHandler);
|
||||
}
|
||||
else if (sourceArchive == null || dataType.getUniversalID() == null) {
|
||||
// if the dataType has no source or it has no ID (datatypes with no ID are
|
||||
// always local i.e. pointers)
|
||||
if (sourceArchive == null || dataType.getUniversalID() == null) {
|
||||
resolvedDataType = resolveNoSourceDataType(dataType, currentHandler);
|
||||
}
|
||||
else if (!sourceArchive.getSourceArchiveID().equals(getUniversalID()) &&
|
||||
|
@ -3690,11 +3689,6 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
|
||||
@Override
|
||||
public DataType getDataType(SourceArchive sourceArchive, UniversalID datatypeID) {
|
||||
if (datatypeID.getValue() == 0) {
|
||||
// DT remove this check
|
||||
throw new AssertException("should not be called with id of 0");
|
||||
}
|
||||
|
||||
UniversalID sourceID = sourceArchive == null ? null : sourceArchive.getSourceArchiveID();
|
||||
return idsToDataTypeMap.getDataType(sourceID, datatypeID);
|
||||
}
|
||||
|
|
|
@ -29,9 +29,9 @@ import ghidra.util.Saveable;
|
|||
*/
|
||||
public class GenericSaveable implements Saveable {
|
||||
|
||||
Record record;
|
||||
Schema schema;
|
||||
Class<?>[] fieldClasses = new Class<?>[0];
|
||||
final Record record;
|
||||
final Schema schema;
|
||||
final Class<?>[] fieldClasses = new Class<?>[0];
|
||||
|
||||
/**
|
||||
* Creates a generic saveable that can be used by the property map manager
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue