mirror of
https://github.com/NationalSecurityAgency/ghidra.git
synced 2025-10-06 03:50:02 +02:00
Replace uses of LongObjectHashTable with Java's HashMap
This commit is contained in:
parent
7af55169c0
commit
e3aebe3adb
22 changed files with 407 additions and 767 deletions
|
@ -23,7 +23,6 @@ import org.jdom.input.SAXBuilder;
|
|||
import org.jdom.output.XMLOutputter;
|
||||
|
||||
import ghidra.framework.store.*;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.xml.GenericXMLOutputter;
|
||||
import ghidra.util.xml.XmlUtilities;
|
||||
|
||||
|
@ -40,7 +39,7 @@ class CheckoutManager {
|
|||
private long nextCheckoutId = 1;
|
||||
|
||||
// checkouts maps long checkoutId to ItemCheckoutStatus objects
|
||||
private LongObjectHashtable<ItemCheckoutStatus> checkouts;
|
||||
private Map<Long, ItemCheckoutStatus> checkouts;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
|
@ -53,7 +52,7 @@ class CheckoutManager {
|
|||
CheckoutManager(LocalFolderItem item, boolean create) throws IOException {
|
||||
this.item = item;
|
||||
if (create) {
|
||||
checkouts = new LongObjectHashtable<ItemCheckoutStatus>();
|
||||
checkouts = new HashMap<>();
|
||||
writeCheckoutsFile();
|
||||
}
|
||||
}
|
||||
|
@ -164,8 +163,7 @@ class CheckoutManager {
|
|||
*/
|
||||
synchronized boolean isCheckedOut(int version) throws IOException {
|
||||
validate();
|
||||
long[] ids = checkouts.getKeys();
|
||||
for (long id : ids) {
|
||||
for (long id : checkouts.keySet()) {
|
||||
ItemCheckoutStatus coStatus = checkouts.get(id);
|
||||
if (coStatus.getCheckoutVersion() == version) {
|
||||
return true;
|
||||
|
@ -199,13 +197,9 @@ class CheckoutManager {
|
|||
*/
|
||||
synchronized ItemCheckoutStatus[] getAllCheckouts() throws IOException {
|
||||
validate();
|
||||
long[] ids = checkouts.getKeys();
|
||||
Arrays.sort(ids);
|
||||
ItemCheckoutStatus[] list = new ItemCheckoutStatus[ids.length];
|
||||
for (int i = 0; i < ids.length; i++) {
|
||||
list[i] = checkouts.get(ids[i]);
|
||||
}
|
||||
return list;
|
||||
List<ItemCheckoutStatus> list = new ArrayList<>(checkouts.values());
|
||||
Collections.sort(list, (a, b) -> (int) (a.getCheckoutId() - b.getCheckoutId()));
|
||||
return list.toArray(new ItemCheckoutStatus[list.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -219,7 +213,6 @@ class CheckoutManager {
|
|||
checkouts = null;
|
||||
}
|
||||
if (checkouts == null) {
|
||||
LongObjectHashtable<ItemCheckoutStatus> oldCheckouts = checkouts;
|
||||
long oldNextCheckoutId = nextCheckoutId;
|
||||
boolean success = false;
|
||||
try {
|
||||
|
@ -229,7 +222,7 @@ class CheckoutManager {
|
|||
finally {
|
||||
if (!success) {
|
||||
nextCheckoutId = oldNextCheckoutId;
|
||||
checkouts = oldCheckouts;
|
||||
checkouts = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -243,7 +236,7 @@ class CheckoutManager {
|
|||
@SuppressWarnings("unchecked")
|
||||
private void readCheckoutsFile() throws IOException {
|
||||
|
||||
checkouts = new LongObjectHashtable<ItemCheckoutStatus>();
|
||||
checkouts = new HashMap<>();
|
||||
|
||||
File checkoutsFile = getCheckoutsFile();
|
||||
if (!checkoutsFile.exists()) {
|
||||
|
@ -317,8 +310,7 @@ class CheckoutManager {
|
|||
Element root = new Element("CHECKOUT_LIST");
|
||||
root.setAttribute("NEXT_ID", Long.toString(nextCheckoutId));
|
||||
|
||||
long[] ids = checkouts.getKeys();
|
||||
for (long id : ids) {
|
||||
for (long id : checkouts.keySet()) {
|
||||
ItemCheckoutStatus coStatus = checkouts.get(id);
|
||||
// TRANSIENT checkout data must not be persisted - the existence
|
||||
// of such checkouts is retained in-memory only
|
||||
|
|
|
@ -15,10 +15,9 @@
|
|||
*/
|
||||
package ghidra.util.datastruct;
|
||||
|
||||
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.util.LinkedList;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* <code>ObjectClass</code> provides a fixed-size long-key-based object cache.
|
||||
|
@ -30,23 +29,23 @@ import java.util.LinkedList;
|
|||
* an object in the weak cache.
|
||||
*/
|
||||
public class ObjectCache {
|
||||
|
||||
private LongObjectHashtable<Object> hashTable;
|
||||
|
||||
private Map<Long, KeyedSoftReference<?>> hashTable;
|
||||
private ReferenceQueue<Object> refQueue;
|
||||
private LinkedList<Object> hardCache;
|
||||
private int hardCacheSize;
|
||||
|
||||
|
||||
/**
|
||||
* Construct a keyed-object cache of size hardCacheSize.
|
||||
* @param hardCacheSize hard cache size.
|
||||
*/
|
||||
public ObjectCache(int hardCacheSize) {
|
||||
this.hardCacheSize = hardCacheSize;
|
||||
hashTable = new LongObjectHashtable<>();
|
||||
hashTable = new HashMap<>();
|
||||
refQueue = new ReferenceQueue<Object>();
|
||||
hardCache = new LinkedList<Object>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Determine if the keyed-object exists in the cache.
|
||||
* @param key object key
|
||||
|
@ -54,34 +53,35 @@ public class ObjectCache {
|
|||
*/
|
||||
public synchronized boolean contains(long key) {
|
||||
processQueue();
|
||||
return hashTable.contains(key);
|
||||
return hashTable.containsKey(key);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the object from cache which corresponds to the specified key.
|
||||
* @param key object key
|
||||
* @return cached object
|
||||
*/
|
||||
public synchronized Object get(long key) {
|
||||
WeakReference<?> ref = (WeakReference<?>)hashTable.get(key);
|
||||
WeakReference<?> ref = hashTable.get(key);
|
||||
if (ref != null) {
|
||||
Object obj = ref.get();
|
||||
if (obj == null) {
|
||||
hashTable.remove(key);
|
||||
}
|
||||
addToHardCache(obj);
|
||||
return obj;
|
||||
return obj;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return the hard cache size
|
||||
* @return the hard cache size
|
||||
*/
|
||||
public int size() {
|
||||
return hardCacheSize;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adjust the hard cache size
|
||||
* @param size new hard cache size
|
||||
|
@ -92,7 +92,7 @@ public class ObjectCache {
|
|||
}
|
||||
this.hardCacheSize = size;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add an object to the cache
|
||||
* @param key object key
|
||||
|
@ -111,16 +111,14 @@ public class ObjectCache {
|
|||
*/
|
||||
public synchronized void clear() {
|
||||
processQueue();
|
||||
long[] keys = hashTable.getKeys();
|
||||
for ( long element : keys ) {
|
||||
KeyedSoftReference<?> ref = (KeyedSoftReference<?>)hashTable.get(element);
|
||||
for (KeyedSoftReference<?> ref : hashTable.values()) {
|
||||
ref.clear();
|
||||
}
|
||||
|
||||
hashTable.removeAll();
|
||||
|
||||
hashTable.clear();
|
||||
refQueue = new ReferenceQueue<Object>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Remove the specified range of keyed objects from both hard and weak caches.
|
||||
* A cache range should be cleared when the corresponding objects have become invalid.
|
||||
|
@ -128,21 +126,16 @@ public class ObjectCache {
|
|||
* @param endKey maximum object key value
|
||||
*/
|
||||
public synchronized void remove(long startKey, long endKey) {
|
||||
if ((endKey>>1)-(startKey>>1) < (hashTable.size()>>1)) {
|
||||
for(long i=startKey;i<=endKey;i++) {
|
||||
if ((endKey >> 1) - (startKey >> 1) < (hashTable.size() >> 1)) {
|
||||
for (long i = startKey; i <= endKey; i++) {
|
||||
remove(i);
|
||||
}
|
||||
}
|
||||
else {
|
||||
long[] keys = hashTable.getKeys();
|
||||
for ( long element : keys ) {
|
||||
if (element >= startKey && element <= endKey) {
|
||||
remove(element);
|
||||
}
|
||||
}
|
||||
hashTable.keySet().removeIf(key -> (key >= startKey && key <= endKey));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Remove the specified keyed object from both hard and weak caches.
|
||||
* An object should be removed from the cache when it becomes invalid.
|
||||
|
@ -150,13 +143,13 @@ public class ObjectCache {
|
|||
*/
|
||||
public synchronized void remove(long key) {
|
||||
processQueue();
|
||||
KeyedSoftReference<?> ref = (KeyedSoftReference<?>)hashTable.get(key);
|
||||
KeyedSoftReference<?> ref = hashTable.get(key);
|
||||
if (ref != null) {
|
||||
ref.clear();
|
||||
hashTable.remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add the specified object to the hard cache.
|
||||
* @param obj object
|
||||
|
@ -167,23 +160,23 @@ public class ObjectCache {
|
|||
hardCache.removeFirst();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Cleanup weak cache
|
||||
*/
|
||||
private void processQueue() {
|
||||
KeyedSoftReference<?> ref;
|
||||
while((ref = (KeyedSoftReference<?>)refQueue.poll()) != null) {
|
||||
hashTable.remove(ref.getKey());
|
||||
while ((ref = (KeyedSoftReference<?>) refQueue.poll()) != null) {
|
||||
hashTable.remove(ref.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Provides a weak wrapper for a keyed-object
|
||||
*/
|
||||
private class KeyedSoftReference<T> extends WeakReference<T> {
|
||||
private long key;
|
||||
|
||||
|
||||
/**
|
||||
* Construct a keyed-object reference
|
||||
* @param key object key
|
||||
|
@ -194,13 +187,14 @@ public class ObjectCache {
|
|||
super(obj, queue);
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Return object key
|
||||
* @return object key
|
||||
*/
|
||||
long getKey() {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,163 +0,0 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.util.datastruct;
|
||||
|
||||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.SoftReference;
|
||||
|
||||
/**
|
||||
* Soft reference cache class that caches objects for long keys. This cache will
|
||||
* store at most "cacheSize" number of entries, but since it uses soft references
|
||||
* for the cached values, those object may be reclaimed.
|
||||
*/
|
||||
|
||||
public class SoftCacheLongKeyMap {
|
||||
private int cacheSize;
|
||||
private ReferenceQueue<Object> refQueue;
|
||||
private Entry head;
|
||||
private LongObjectHashtable<Object> map;
|
||||
|
||||
/**
|
||||
* Construct a new SoftCacheLongKeyMap that caches at most cacheSize number of entries
|
||||
* @param cacheSize the max number of entries to cache.
|
||||
*/
|
||||
public SoftCacheLongKeyMap(int cacheSize) {
|
||||
this.cacheSize = Math.max(cacheSize, 10);
|
||||
map = new LongObjectHashtable<>();
|
||||
head = new Entry(0, null);
|
||||
head.nextEntry = head;
|
||||
head.prevEntry = head;
|
||||
refQueue = new ReferenceQueue<>();
|
||||
}
|
||||
/**
|
||||
* Caches the given value for the given key
|
||||
* @param key the key
|
||||
* @param value the cached value for the given key
|
||||
* @return any previous object that is cached for the given key.
|
||||
*/
|
||||
public Object put(long key, Object value) {
|
||||
processQueue();
|
||||
if (map.size() == cacheSize) {
|
||||
remove(head.nextEntry.key);
|
||||
}
|
||||
Object obj = map.remove(key);
|
||||
Entry entry = new Entry(key, value);
|
||||
head.addBefore(entry);
|
||||
map.put(key, entry);
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the cached value for the given key, if it exists.
|
||||
* @param key the key for which to get a cached value.
|
||||
* @return the object that was cached for that key, or null if none exists.
|
||||
*/
|
||||
public Object get(long key) {
|
||||
processQueue();
|
||||
Entry entry = (Entry)map.get(key);
|
||||
if (entry != null) {
|
||||
entry.delete();
|
||||
head.addBefore(entry);
|
||||
return entry.get();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of items in the cache. Can change from one call to
|
||||
* the next even if no entries were added or deleted.
|
||||
*/
|
||||
public int size() {
|
||||
processQueue();
|
||||
return map.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all entries from the cache
|
||||
*/
|
||||
public void clear() {
|
||||
map.removeAll();
|
||||
refQueue = new ReferenceQueue<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the cache is empty. If true, it will remain empty until a new
|
||||
* entry is added. However if false, it may return true even if nothing was removed
|
||||
*/
|
||||
public boolean isEmpty() {
|
||||
processQueue();
|
||||
return map.size() == 0;
|
||||
}
|
||||
/**
|
||||
* Returns true if the cache currently contains the given key. Not useful since even
|
||||
* if it returns true, there is no guarentee that a get will work after containsKey
|
||||
* returns true.
|
||||
* @param key the Key to check
|
||||
*/
|
||||
public boolean containsKey(long key) {
|
||||
processQueue();
|
||||
return map.contains(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes any cached value for the given key.
|
||||
* @param key the key for which to remove cached values.
|
||||
* @return the cached object that was stored for the given key, or null
|
||||
*/
|
||||
public Object remove(long key) {
|
||||
Entry entry = (Entry)map.remove(key);
|
||||
if (entry != null) {
|
||||
entry.delete();
|
||||
return entry.get();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of all current keys.
|
||||
*/
|
||||
public long[] getKeys() {
|
||||
processQueue();
|
||||
return map.getKeys();
|
||||
}
|
||||
private void processQueue() {
|
||||
Entry entry;
|
||||
while((entry = (Entry)refQueue.poll()) != null) {
|
||||
remove(entry.key);
|
||||
}
|
||||
}
|
||||
|
||||
class Entry extends SoftReference<Object> {
|
||||
long key;
|
||||
Entry nextEntry;
|
||||
Entry prevEntry;
|
||||
Entry(long key, Object value) {
|
||||
super(value, refQueue);
|
||||
this.key = key;
|
||||
}
|
||||
void addBefore(Entry entry) {
|
||||
entry.nextEntry = this;
|
||||
entry.prevEntry = this.prevEntry;
|
||||
this.prevEntry.nextEntry = entry;
|
||||
this.prevEntry = entry;
|
||||
}
|
||||
void delete() {
|
||||
prevEntry.nextEntry = nextEntry;
|
||||
nextEntry.prevEntry = prevEntry;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -15,8 +15,9 @@
|
|||
*/
|
||||
package ghidra.util.graph.attributes;
|
||||
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.NoValueException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import ghidra.util.graph.KeyIndexableSet;
|
||||
import ghidra.util.graph.KeyedObject;
|
||||
|
||||
|
@ -25,7 +26,7 @@ import ghidra.util.graph.KeyedObject;
|
|||
*/
|
||||
public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||
//private Object[] values;
|
||||
private LongObjectHashtable<Object> values;
|
||||
private Map<Long, Object> values;
|
||||
private static String attributeType = AttributeManager.OBJECT_TYPE;
|
||||
|
||||
/** Constructor.
|
||||
|
@ -36,7 +37,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
public ObjectAttribute(String name, KeyIndexableSet<T> set) {
|
||||
super(name, set);
|
||||
//this.values = new Object[set.capacity()];
|
||||
values = new LongObjectHashtable<Object>();
|
||||
values = new HashMap<>();
|
||||
}
|
||||
|
||||
/** Set the value of this attribute for the specified KeyedObject.
|
||||
|
@ -93,7 +94,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
/** Removes all assigned values of this attribute. */
|
||||
@Override
|
||||
public void clear() {
|
||||
values.removeAll();
|
||||
values.clear();
|
||||
}
|
||||
|
||||
/** Return the attribute of the specified KeyedObject as a String.
|
||||
|
@ -101,7 +102,7 @@ public class ObjectAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
@Override
|
||||
public String getValueAsString(KeyedObject o) {
|
||||
Object v;
|
||||
if (values.contains(o.key())) {
|
||||
if (values.containsKey(o.key())) {
|
||||
v = getValue(o);
|
||||
if (v != null) {
|
||||
return v.toString();
|
||||
|
|
|
@ -15,20 +15,17 @@
|
|||
*/
|
||||
package ghidra.util.graph.attributes;
|
||||
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.NoValueException;
|
||||
import java.util.*;
|
||||
|
||||
import ghidra.util.graph.KeyIndexableSet;
|
||||
import ghidra.util.graph.KeyedObject;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
||||
/** This class provides a storage mechanism for String-valued information about
|
||||
* the elements of a KeyIndexableSet, e.g. the vertices of a DirectedGraph.
|
||||
*/
|
||||
public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
||||
//private String[] values;
|
||||
private ghidra.util.datastruct.LongObjectHashtable values;
|
||||
private Map<Long, String> values;
|
||||
private static String attributeType = AttributeManager.STRING_TYPE;
|
||||
|
||||
/** Constructor.
|
||||
|
@ -38,7 +35,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
*/
|
||||
public StringAttribute(String name, KeyIndexableSet<T> set) {
|
||||
super(name, set);
|
||||
this.values = new LongObjectHashtable(set.capacity());// String[set.capacity()];
|
||||
this.values = new HashMap<>(set.capacity());// String[set.capacity()];
|
||||
}
|
||||
|
||||
/** Set the value of this attribute for the specified KeyedObject.
|
||||
|
@ -49,8 +46,9 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
* not a member of the owningSet.
|
||||
*/
|
||||
public boolean setValue(T o, String value) {
|
||||
if (value == null)
|
||||
if (value == null) {
|
||||
return false;
|
||||
}
|
||||
if (owningSet().contains(o)) {
|
||||
//values[ owningSet().index( o ) ] = value;
|
||||
values.put(o.key(), value);
|
||||
|
@ -66,7 +64,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
public String getValue(KeyedObject o) //throws NoValueException
|
||||
{
|
||||
//return values[ owningSet().index( o ) ];
|
||||
return (String) values.get(o.key());
|
||||
return values.get(o.key());
|
||||
}
|
||||
|
||||
// /** Debug printing. */
|
||||
|
@ -129,8 +127,9 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
else if ((ko1.key() - ko2.key()) > 0) {
|
||||
return +1;
|
||||
}
|
||||
else
|
||||
else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
//ko1 is ok, ko2 fails.
|
||||
return -1;
|
||||
|
@ -144,8 +143,9 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
else if ((ko1.key() - ko2.key()) > 0) {
|
||||
return +1;
|
||||
}
|
||||
else
|
||||
else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -160,7 +160,7 @@ public class StringAttribute<T extends KeyedObject> extends Attribute<T> {
|
|||
/** Removes all assigned values of this attribute. */
|
||||
@Override
|
||||
public void clear() {
|
||||
values.removeAll();
|
||||
values.clear();
|
||||
}
|
||||
|
||||
/** Return the attribute of the specified KeyedObject as a String.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
* REVIEWED: YES
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
@ -16,39 +15,39 @@
|
|||
*/
|
||||
package ghidra.util.prop;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import ghidra.util.LongIterator;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.datastruct.NoSuchIndexException;
|
||||
import ghidra.util.exception.AssertException;
|
||||
import ghidra.util.exception.NoValueException;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
/**
|
||||
* Base class for managing properties that are accessed by an index. Property
|
||||
* values are determined by the derived class.
|
||||
*/
|
||||
public abstract class PropertySet implements Serializable {
|
||||
private final static long serialVersionUID = 1;
|
||||
private final static long serialVersionUID = 1;
|
||||
protected static final NoValueException noValueException = new NoValueException();
|
||||
private final static int DEFAULT_NUMBER_PAGE_BITS=12;
|
||||
private final static int DEFAULT_NUMBER_PAGE_BITS = 12;
|
||||
private final static int MIN_NUMBER_PAGE_BITS = 8;
|
||||
private final static int MAX_NUMBER_PAGE_BITS = 15; // must be kept less than
|
||||
// size of a short
|
||||
|
||||
private String name;
|
||||
protected PropertyPageIndex propertyPageIndex; // table of pageIDs
|
||||
private String name;
|
||||
protected PropertyPageIndex propertyPageIndex; // table of pageIDs
|
||||
private int numPageBits; // number of bits from long used as page offset
|
||||
private long pageMask; // a mask for the offset bits, i.e. has a 1 if and only if
|
||||
// the bit is part of the offset
|
||||
// the bit is part of the offset
|
||||
protected short pageSize; // max elements in each page
|
||||
protected int numProperties;
|
||||
private LongObjectHashtable<PropertyPage> ht;
|
||||
private Class<?> objectClass;
|
||||
|
||||
protected int numProperties;
|
||||
private Map<Long, PropertyPage> ht;
|
||||
private Class<?> objectClass;
|
||||
|
||||
protected PropertySet(String name, Class<?> objectClass) {
|
||||
this(name, DEFAULT_NUMBER_PAGE_BITS, objectClass);
|
||||
this(name, DEFAULT_NUMBER_PAGE_BITS, objectClass);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -59,22 +58,22 @@ public abstract class PropertySet implements Serializable {
|
|||
* more than 15.
|
||||
*/
|
||||
protected PropertySet(String name, int numPageBits, Class<?> objectClass) {
|
||||
this.objectClass = objectClass;
|
||||
ht = new LongObjectHashtable<PropertyPage>();
|
||||
this.objectClass = objectClass;
|
||||
ht = new HashMap<>();
|
||||
|
||||
this.name = name;
|
||||
if(numPageBits > MAX_NUMBER_PAGE_BITS) {
|
||||
if (numPageBits > MAX_NUMBER_PAGE_BITS) {
|
||||
numPageBits = MAX_NUMBER_PAGE_BITS;
|
||||
}
|
||||
else if(numPageBits < MIN_NUMBER_PAGE_BITS) {
|
||||
else if (numPageBits < MIN_NUMBER_PAGE_BITS) {
|
||||
numPageBits = MIN_NUMBER_PAGE_BITS;
|
||||
}
|
||||
this.numPageBits = numPageBits;
|
||||
// compute the page mask
|
||||
pageMask = -1L;
|
||||
pageMask = pageMask >>> (64-numPageBits); // 64 = size of long
|
||||
pageMask = pageMask >>> (64 - numPageBits); // 64 = size of long
|
||||
|
||||
pageSize = (short)(pageMask + 1);
|
||||
pageSize = (short) (pageMask + 1);
|
||||
propertyPageIndex = new PropertyPageIndex();
|
||||
}
|
||||
|
||||
|
@ -84,15 +83,15 @@ public abstract class PropertySet implements Serializable {
|
|||
* @return the size (in bytes) of the data that is stored in this property
|
||||
* set.
|
||||
*/
|
||||
public abstract int getDataSize();
|
||||
|
||||
/**
|
||||
public abstract int getDataSize();
|
||||
|
||||
/**
|
||||
* Get the name for this property manager.
|
||||
*/
|
||||
public synchronized String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns property object class associated with this set.
|
||||
*/
|
||||
|
@ -100,19 +99,19 @@ public abstract class PropertySet implements Serializable {
|
|||
return objectClass;
|
||||
}
|
||||
|
||||
protected PropertyPage getPage(long pageId) {
|
||||
return ht.get(pageId);
|
||||
}
|
||||
|
||||
protected PropertyPage getOrCreatePage(long pageID) {
|
||||
PropertyPage page = getPage(pageID);
|
||||
if(page == null) {
|
||||
page = new PropertyPage(pageSize,pageID,getDataSize(),objectClass);
|
||||
ht.put(pageID,page);
|
||||
protected PropertyPage getPage(long pageId) {
|
||||
return ht.get(pageId);
|
||||
}
|
||||
|
||||
protected PropertyPage getOrCreatePage(long pageID) {
|
||||
PropertyPage page = getPage(pageID);
|
||||
if (page == null) {
|
||||
page = new PropertyPage(pageSize, pageID, getDataSize(), objectClass);
|
||||
ht.put(pageID, page);
|
||||
propertyPageIndex.add(pageID);
|
||||
}
|
||||
return page;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given two indices it indicates whether there is an index in
|
||||
|
@ -123,21 +122,22 @@ public abstract class PropertySet implements Serializable {
|
|||
* @return boolean true if at least one index in the range
|
||||
* has the property, false otherwise.
|
||||
*/
|
||||
public boolean intersects(long start,long end) {
|
||||
if (hasProperty(start)) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
long index = this.getNextPropertyIndex(start);
|
||||
if (index <= end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch(NoSuchIndexException e) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
public boolean intersects(long start, long end) {
|
||||
if (hasProperty(start)) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
long index = this.getNextPropertyIndex(start);
|
||||
if (index <= end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch (NoSuchIndexException e) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all property values within a given range.
|
||||
* @param start begin range
|
||||
|
@ -147,23 +147,23 @@ public abstract class PropertySet implements Serializable {
|
|||
*/
|
||||
public synchronized boolean removeRange(long start, long end) {
|
||||
|
||||
boolean status=false;
|
||||
// go from start to end
|
||||
// get the page starting at start
|
||||
// get page start index and end index
|
||||
// subtract page.getSize() from numProperties
|
||||
// remove the entire page
|
||||
// increment start by size of page
|
||||
// else
|
||||
// for (i<endofPage; start++)
|
||||
// call slow remove(index);
|
||||
boolean status = false;
|
||||
// go from start to end
|
||||
// get the page starting at start
|
||||
// get page start index and end index
|
||||
// subtract page.getSize() from numProperties
|
||||
// remove the entire page
|
||||
// increment start by size of page
|
||||
// else
|
||||
// for (i<endofPage; start++)
|
||||
// call slow remove(index);
|
||||
|
||||
while (start <= end) {
|
||||
// get page containing start
|
||||
long pageID = getPageID(start);
|
||||
short offset = getPageOffset(start);
|
||||
while (start <= end) {
|
||||
// get page containing start
|
||||
long pageID = getPageID(start);
|
||||
short offset = getPageOffset(start);
|
||||
|
||||
PropertyPage page = getPage(pageID);
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
if (page == null) {
|
||||
long nextPageId = propertyPageIndex.getNext(pageID);
|
||||
|
@ -174,27 +174,27 @@ public abstract class PropertySet implements Serializable {
|
|||
continue;
|
||||
}
|
||||
|
||||
// if start is beginning of page && end of page is still less than start
|
||||
if (offset == 0 && (pageSize+start) <= end) {
|
||||
// if start is beginning of page && end of page is still less than start
|
||||
if (offset == 0 && (pageSize + start) <= end) {
|
||||
|
||||
// decrement # properties on a page
|
||||
this.numProperties -= page.getSize();
|
||||
// decrement # properties on a page
|
||||
this.numProperties -= page.getSize();
|
||||
|
||||
// remove the entire page
|
||||
ht.remove(pageID);
|
||||
propertyPageIndex.remove(pageID);
|
||||
// remove the entire page
|
||||
ht.remove(pageID);
|
||||
propertyPageIndex.remove(pageID);
|
||||
|
||||
status = true;
|
||||
long nextPageId = propertyPageIndex.getNext(pageID);
|
||||
start = nextPageId << numPageBits;
|
||||
}
|
||||
else {
|
||||
// start at offset, and remove each property
|
||||
for (; offset < pageSize && start <= end; offset++, start++) {
|
||||
status |= removeFromPage(page, pageID, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
status = true;
|
||||
long nextPageId = propertyPageIndex.getNext(pageID);
|
||||
start = nextPageId << numPageBits;
|
||||
}
|
||||
else {
|
||||
// start at offset, and remove each property
|
||||
for (; offset < pageSize && start <= end; offset++, start++) {
|
||||
status |= removeFromPage(page, pageID, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
@ -211,35 +211,36 @@ public abstract class PropertySet implements Serializable {
|
|||
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
return removeFromPage(page, pageID, offset);
|
||||
}
|
||||
return removeFromPage(page, pageID, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the property on page at offset. If Page is now empty, remove it.
|
||||
*/
|
||||
private boolean removeFromPage(PropertyPage page, long pageID, short offset) {
|
||||
if(page != null) {
|
||||
/**
|
||||
* Remove the property on page at offset. If Page is now empty, remove it.
|
||||
*/
|
||||
private boolean removeFromPage(PropertyPage page, long pageID, short offset) {
|
||||
if (page != null) {
|
||||
|
||||
boolean removed = page.remove(offset);
|
||||
if (removed) {
|
||||
numProperties--;
|
||||
}
|
||||
if (removed) {
|
||||
numProperties--;
|
||||
}
|
||||
|
||||
if(page.isEmpty()) {
|
||||
ht.remove(pageID);
|
||||
if (page.isEmpty()) {
|
||||
ht.remove(pageID);
|
||||
propertyPageIndex.remove(pageID);
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns whether there is a property value at index.
|
||||
* @param index the long representation of an address.
|
||||
*/
|
||||
public synchronized boolean hasProperty(long index) {
|
||||
PropertyPage page = getPage(getPageID(index));
|
||||
if(page == null) {
|
||||
if (page == null) {
|
||||
return false;
|
||||
}
|
||||
return page.hasProperty(getPageOffset(index));
|
||||
|
@ -256,29 +257,29 @@ public abstract class PropertySet implements Serializable {
|
|||
short offset = getPageOffset(index);
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short nextOffset = page.getNext(offset);
|
||||
|
||||
if(nextOffset >= 0) {
|
||||
if (nextOffset >= 0) {
|
||||
return getIndex(pageID, nextOffset);
|
||||
}
|
||||
}
|
||||
|
||||
pageID = propertyPageIndex.getNext(pageID);
|
||||
|
||||
if(pageID >= 0) {
|
||||
if (pageID >= 0) {
|
||||
page = getPage(pageID);
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short nextOffset = page.getFirst();
|
||||
if (nextOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'first' offset");
|
||||
}
|
||||
if (nextOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'first' offset");
|
||||
}
|
||||
return getIndex(pageID, nextOffset);
|
||||
}
|
||||
}
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -295,36 +296,37 @@ public abstract class PropertySet implements Serializable {
|
|||
|
||||
PropertyPage page = getPage(pageID);
|
||||
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short prevOffset = page.getPrevious(offset);
|
||||
if(prevOffset >= 0) {
|
||||
if (prevOffset >= 0) {
|
||||
return getIndex(pageID, prevOffset);
|
||||
}
|
||||
}
|
||||
|
||||
pageID = propertyPageIndex.getPrevious(pageID);
|
||||
|
||||
if(pageID >= 0) {
|
||||
if (pageID >= 0) {
|
||||
page = getPage(pageID);
|
||||
if(page != null) {
|
||||
if (page != null) {
|
||||
short prevOffset = page.getLast();
|
||||
if (prevOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'last' offset");
|
||||
}
|
||||
if (prevOffset < 0) {
|
||||
throw new AssertException(
|
||||
"Page (" + pageID +
|
||||
") exists but there is no 'last' offset");
|
||||
}
|
||||
return getIndex(pageID, prevOffset);
|
||||
}
|
||||
}
|
||||
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
throw NoSuchIndexException.noSuchIndexException;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first index where a property value exists.
|
||||
* @throws NoSuchIndexException when there is no property value for any index.
|
||||
*/
|
||||
public synchronized long getFirstPropertyIndex() throws NoSuchIndexException {
|
||||
if(hasProperty(0)) {
|
||||
if (hasProperty(0)) {
|
||||
return 0;
|
||||
}
|
||||
return getNextPropertyIndex(0);
|
||||
|
@ -337,18 +339,19 @@ public abstract class PropertySet implements Serializable {
|
|||
*/
|
||||
public synchronized long getLastPropertyIndex() throws NoSuchIndexException {
|
||||
// -1 should be the highest possible address
|
||||
if(hasProperty(-1)) {
|
||||
if (hasProperty(-1)) {
|
||||
return -1;
|
||||
}
|
||||
return getPreviousPropertyIndex(-1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of properties in the set.
|
||||
* @return the number of properties
|
||||
*/
|
||||
public int getSize() {
|
||||
return numProperties;
|
||||
}
|
||||
public int getSize() {
|
||||
return numProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the page ID from the given index.
|
||||
|
@ -364,7 +367,7 @@ public abstract class PropertySet implements Serializable {
|
|||
* @param index the long representation of an address.
|
||||
*/
|
||||
protected final short getPageOffset(long index) {
|
||||
return(short)(index & pageMask);
|
||||
return (short) (index & pageMask);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -372,9 +375,9 @@ public abstract class PropertySet implements Serializable {
|
|||
* @return the long representation of an address.
|
||||
*/
|
||||
protected final long getIndex(long pageID, short offset) {
|
||||
return(pageID << numPageBits) | offset;
|
||||
return (pageID << numPageBits) | offset;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Move the range of properties to the newStart index.
|
||||
* @param start the beginning of the property range to move
|
||||
|
@ -382,48 +385,50 @@ public abstract class PropertySet implements Serializable {
|
|||
* @param newStart the new beginning of the property range after the move
|
||||
*/
|
||||
public void moveRange(long start, long end, long newStart) {
|
||||
if (newStart < start) {
|
||||
long clearSize = end-start+1;
|
||||
long offset = start - newStart;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
removeRange(newStart, newStart+clearSize-1);
|
||||
LongIterator it = getPropertyIterator(start, end);
|
||||
while(it.hasNext()) {
|
||||
long index = it.next();
|
||||
moveIndex(index, index-offset);
|
||||
}
|
||||
}
|
||||
else {
|
||||
long clearSize = end-start+1;
|
||||
long offset = newStart - start;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
if (newStart > end) {
|
||||
removeRange(newStart, newStart+clearSize-1);
|
||||
}
|
||||
else {
|
||||
removeRange(end+1, end+clearSize);
|
||||
}
|
||||
if (newStart < start) {
|
||||
long clearSize = end - start + 1;
|
||||
long offset = start - newStart;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
removeRange(newStart, newStart + clearSize - 1);
|
||||
LongIterator it = getPropertyIterator(start, end);
|
||||
while (it.hasNext()) {
|
||||
long index = it.next();
|
||||
moveIndex(index, index - offset);
|
||||
}
|
||||
}
|
||||
else {
|
||||
long clearSize = end - start + 1;
|
||||
long offset = newStart - start;
|
||||
if (offset < clearSize) {
|
||||
clearSize = offset;
|
||||
}
|
||||
if (newStart > end) {
|
||||
removeRange(newStart, newStart + clearSize - 1);
|
||||
}
|
||||
else {
|
||||
removeRange(end + 1, end + clearSize);
|
||||
}
|
||||
|
||||
LongIterator it = getPropertyIterator(end+1);
|
||||
while(it.hasPrevious()) {
|
||||
long index = it.previous();
|
||||
if (index < start) {
|
||||
break;
|
||||
}
|
||||
moveIndex(index, index+offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
LongIterator it = getPropertyIterator(end + 1);
|
||||
while (it.hasPrevious()) {
|
||||
long index = it.previous();
|
||||
if (index < start) {
|
||||
break;
|
||||
}
|
||||
moveIndex(index, index + offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void moveIndex(long from, long to);
|
||||
protected abstract void saveProperty(ObjectOutputStream oos, long addr)
|
||||
throws IOException;
|
||||
protected abstract void restoreProperty(ObjectInputStream ois, long addr)
|
||||
throws IOException, ClassNotFoundException;
|
||||
|
||||
protected abstract void saveProperty(ObjectOutputStream oos, long addr)
|
||||
throws IOException;
|
||||
|
||||
protected abstract void restoreProperty(ObjectInputStream ois, long addr)
|
||||
throws IOException, ClassNotFoundException;
|
||||
|
||||
/**
|
||||
* Creates an iterator over all the indexes that have this property within
|
||||
|
@ -433,10 +438,10 @@ public abstract class PropertySet implements Serializable {
|
|||
* @param end The end address to search
|
||||
* @return LongIterator Iterator over indexes that have properties.
|
||||
*/
|
||||
public LongIterator getPropertyIterator(long start,long end) {
|
||||
public LongIterator getPropertyIterator(long start, long end) {
|
||||
return new LongIteratorImpl(this, start, end);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates an iterator over all the indexes that have this property within
|
||||
* the given range.
|
||||
|
@ -451,6 +456,7 @@ public abstract class PropertySet implements Serializable {
|
|||
public LongIterator getPropertyIterator(long start, long end, boolean atStart) {
|
||||
return new LongIteratorImpl(this, start, end, atStart);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an iterator over the indices having the given property
|
||||
* value.
|
||||
|
@ -478,7 +484,6 @@ public abstract class PropertySet implements Serializable {
|
|||
return new LongIteratorImpl(this, start, before);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Saves all property values between start and end to the output stream
|
||||
* @param oos the output stream
|
||||
|
@ -487,15 +492,15 @@ public abstract class PropertySet implements Serializable {
|
|||
* @throws IOException if an I/O error occurs on the write.
|
||||
*/
|
||||
public void saveProperties(ObjectOutputStream oos, long start, long end)
|
||||
throws IOException{
|
||||
throws IOException {
|
||||
|
||||
oos.writeLong(start);
|
||||
oos.writeLong(end);
|
||||
if (hasProperty(start)) {
|
||||
oos.writeLong(start);
|
||||
oos.writeLong(end);
|
||||
if (hasProperty(start)) {
|
||||
oos.writeByte(1);
|
||||
oos.writeLong(start);
|
||||
saveProperty(oos, start);
|
||||
}
|
||||
}
|
||||
try {
|
||||
long index = start;
|
||||
while ((index = getNextPropertyIndex(index)) <= end) {
|
||||
|
@ -504,10 +509,11 @@ public abstract class PropertySet implements Serializable {
|
|||
saveProperty(oos, index);
|
||||
}
|
||||
}
|
||||
catch(NoSuchIndexException e) {}
|
||||
catch (NoSuchIndexException e) {
|
||||
}
|
||||
oos.writeByte(0);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Restores all the properties from the input stream. Any existing
|
||||
* properties will first be removed.
|
||||
|
@ -516,56 +522,22 @@ public abstract class PropertySet implements Serializable {
|
|||
* @throws ClassNotFoundException if the a class cannot be determined for
|
||||
* the property value.
|
||||
*/
|
||||
public void restoreProperties(ObjectInputStream ois) throws
|
||||
IOException, ClassNotFoundException {
|
||||
long start = ois.readLong();
|
||||
long end = ois.readLong();
|
||||
this.removeRange(start,end);
|
||||
while(ois.readByte() != 0) {
|
||||
public void restoreProperties(ObjectInputStream ois)
|
||||
throws IOException, ClassNotFoundException {
|
||||
long start = ois.readLong();
|
||||
long end = ois.readLong();
|
||||
this.removeRange(start, end);
|
||||
while (ois.readByte() != 0) {
|
||||
long index = ois.readLong();
|
||||
restoreProperty(ois, index);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves all properties to the given output stream.
|
||||
* @param out the output stream.
|
||||
* @throws IOException I/O error occurs while writing output.
|
||||
*/
|
||||
public void saveAll(ObjectOutputStream out) throws IOException {
|
||||
out.writeObject(name);
|
||||
out.writeObject(propertyPageIndex);
|
||||
out.writeInt(numPageBits);
|
||||
out.writeLong(pageMask);
|
||||
out.writeShort(pageSize);
|
||||
out.writeInt(numProperties);
|
||||
out.writeObject(ht);
|
||||
out.writeObject(objectClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores all properties values from the input stream.
|
||||
* @param in the input stream.
|
||||
* @throws IOException if I/O error occurs while reading from stream.
|
||||
* @throws ClassNotFoundException if the a class cannot be determined for
|
||||
* the property value.
|
||||
*/
|
||||
@SuppressWarnings("unchecked") // the type must match or it is a bug
|
||||
public void restoreAll(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
||||
name = (String)in.readObject();
|
||||
propertyPageIndex = (PropertyPageIndex)in.readObject();
|
||||
numPageBits = in.readInt();
|
||||
pageMask = in.readLong();
|
||||
pageSize = in.readShort();
|
||||
numProperties = in.readInt();
|
||||
ht = (LongObjectHashtable<PropertyPage>) in.readObject();
|
||||
objectClass = (Class<?>)in.readObject();
|
||||
}
|
||||
/**
|
||||
* Based upon the type of property manager that this is, the appropriate
|
||||
* visit() method will be called within the PropertyVisitor.
|
||||
* @param visitor object implementing the PropertyVisitor interface.
|
||||
* @param addr the address of where to visit (get) the property.
|
||||
* Based upon the type of property manager that this is, the appropriate
|
||||
* visit() method will be called within the PropertyVisitor.
|
||||
* @param visitor object implementing the PropertyVisitor interface.
|
||||
* @param addr the address of where to visit (get) the property.
|
||||
*/
|
||||
public abstract void applyValue(PropertyVisitor visitor, long addr);
|
||||
|
||||
|
|
|
@ -1,108 +0,0 @@
|
|||
/* ###
|
||||
* IP: GHIDRA
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package ghidra.util.datastruct;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import generic.test.AbstractGenericTest;
|
||||
|
||||
public class SoftCacheLongKeyMapTest extends AbstractGenericTest {
|
||||
SoftCacheLongKeyMap cache;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public SoftCacheLongKeyMapTest() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMap() {
|
||||
cache = new SoftCacheLongKeyMap(20);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
|
||||
assertEquals(3, cache.size());
|
||||
assertEquals("aaa", cache.get(0));
|
||||
assertEquals("bbb", cache.get(1));
|
||||
assertEquals("ccc", cache.get(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testlru() {
|
||||
cache = new SoftCacheLongKeyMap(10);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
cache.put(3, "ddd");
|
||||
cache.put(4, "eee");
|
||||
cache.put(5, "fff");
|
||||
cache.put(6, "ggg");
|
||||
cache.put(7, "hhh");
|
||||
cache.put(8, "iii");
|
||||
cache.put(9, "jjj");
|
||||
|
||||
assertEquals(10, cache.size());
|
||||
cache.put(10, "kkk");
|
||||
assertEquals(10, cache.size());
|
||||
assertNull(cache.get(0));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testlru2() {
|
||||
cache = new SoftCacheLongKeyMap(10);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
cache.put(3, "ddd");
|
||||
cache.put(4, "eee");
|
||||
cache.put(5, "fff");
|
||||
cache.put(6, "ggg");
|
||||
cache.put(7, "hhh");
|
||||
cache.put(8, "iii");
|
||||
cache.put(9, "jjj");
|
||||
cache.get(0);
|
||||
assertEquals(10, cache.size());
|
||||
cache.put(10, "kkk");
|
||||
assertEquals(10, cache.size());
|
||||
assertEquals("aaa", cache.get(0));
|
||||
assertNull(cache.get(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRemove() {
|
||||
cache = new SoftCacheLongKeyMap(10);
|
||||
cache.put(0, "aaa");
|
||||
cache.put(1, "bbb");
|
||||
cache.put(2, "ccc");
|
||||
cache.put(3, "ddd");
|
||||
cache.remove(1);
|
||||
cache.remove(0);
|
||||
cache.remove(3);
|
||||
cache.remove(2);
|
||||
assertEquals(0, cache.size());
|
||||
cache.put(5, "zzz");
|
||||
assertEquals(1, cache.size());
|
||||
cache.remove(5);
|
||||
assertEquals(0, cache.size());
|
||||
|
||||
}
|
||||
}
|
|
@ -21,7 +21,6 @@ package ghidra.program.database;
|
|||
import java.lang.ref.ReferenceQueue;
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import db.Record;
|
||||
import ghidra.program.model.address.KeyRange;
|
||||
|
@ -199,18 +198,21 @@ public class DBObjectCache<T extends DatabaseObject> {
|
|||
* @param keyRanges key ranges to delete
|
||||
*/
|
||||
private void deleteLargeKeyRanges(List<KeyRange> keyRanges) {
|
||||
map.keySet()
|
||||
.stream()
|
||||
.filter(key -> keyRangesContain(keyRanges, key))
|
||||
.collect(Collectors.toList())
|
||||
.forEach(key -> {
|
||||
KeyedSoftReference ref = map.remove(key);
|
||||
DatabaseObject obj = ref.get();
|
||||
if (obj != null) {
|
||||
obj.setDeleted();
|
||||
ref.clear();
|
||||
}
|
||||
});
|
||||
map.values().removeIf(ref -> checkRef(ref, keyRanges));
|
||||
}
|
||||
|
||||
private boolean checkRef(KeyedSoftReference ref, List<KeyRange> keyRanges) {
|
||||
long key = ref.getKey();
|
||||
if (keyRangesContain(keyRanges, key)) {
|
||||
DatabaseObject obj = ref.get();
|
||||
if (obj != null) {
|
||||
obj.setDeleted();
|
||||
ref.clear();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -267,27 +269,6 @@ public class DBObjectCache<T extends DatabaseObject> {
|
|||
return invalidateCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates a range of objects in the cache.
|
||||
* @param startKey the first key in the range to invalidate.
|
||||
* @param endKey the last key in the range to invalidate.
|
||||
*/
|
||||
public synchronized void invalidate(long startKey, long endKey) {
|
||||
processQueue();
|
||||
if (endKey - startKey < map.size()) {
|
||||
for (long i = startKey; i <= endKey; i++) {
|
||||
doInvalidate(i);
|
||||
}
|
||||
}
|
||||
else {
|
||||
map.keySet()
|
||||
.stream()
|
||||
.filter(key -> (key >= startKey && key <= endKey))
|
||||
.collect(Collectors.toList())
|
||||
.forEach(key -> doInvalidate(key));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the object with the given key from the cache.
|
||||
* @param key the key of the object to remove.
|
||||
|
@ -305,25 +286,6 @@ public class DBObjectCache<T extends DatabaseObject> {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates the object with given key.
|
||||
* @param key the key of the object to invalidate.
|
||||
*/
|
||||
public synchronized void invalidate(long key) {
|
||||
processQueue();
|
||||
doInvalidate(key);
|
||||
}
|
||||
|
||||
private void doInvalidate(long key) {
|
||||
KeyedSoftReference ref = map.get(key);
|
||||
if (ref != null) {
|
||||
T obj = ref.get();
|
||||
if (obj != null) {
|
||||
obj.setInvalid();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addToHardCache(T obj) {
|
||||
hardCache.addLast(obj);
|
||||
if (hardCache.size() > hardCacheSize) {
|
||||
|
|
|
@ -16,13 +16,14 @@
|
|||
package ghidra.program.database;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import db.*;
|
||||
import ghidra.program.model.address.AddressSpace;
|
||||
import ghidra.program.model.address.OverlayAddressSpace;
|
||||
import ghidra.program.model.lang.Language;
|
||||
import ghidra.program.util.LanguageTranslator;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.AssertException;
|
||||
import ghidra.util.exception.DuplicateNameException;
|
||||
|
||||
|
@ -115,7 +116,7 @@ class OverlaySpaceAdapterDB {
|
|||
}
|
||||
|
||||
void updateOverlaySpaces(ProgramAddressFactory factory) throws IOException {
|
||||
LongObjectHashtable<OverlayAddressSpace> map = new LongObjectHashtable<>();
|
||||
Map<Long, OverlayAddressSpace> map = new HashMap<>();
|
||||
for (AddressSpace space : factory.getAllAddressSpaces()) {
|
||||
if (space instanceof OverlayAddressSpace) {
|
||||
OverlayAddressSpace os = (OverlayAddressSpace) space;
|
||||
|
@ -162,11 +163,10 @@ class OverlaySpaceAdapterDB {
|
|||
}
|
||||
}
|
||||
if (map.size() != 0) {
|
||||
long[] keys = map.getKeys();
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
OverlayAddressSpace space = map.remove(keys[i]);
|
||||
for (OverlayAddressSpace space : map.values()) {
|
||||
factory.removeOverlaySpace(space.getName());
|
||||
}
|
||||
map.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ import ghidra.program.model.lang.CompilerSpec;
|
|||
import ghidra.util.*;
|
||||
import ghidra.util.classfinder.ClassTranslator;
|
||||
import ghidra.util.datastruct.FixedSizeHashMap;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
|
@ -3490,15 +3489,15 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
}
|
||||
|
||||
class IdsToDataTypeMap {
|
||||
private Map<UniversalID, LongObjectHashtable<DataType>> map = new HashMap<>();
|
||||
private Map<UniversalID, Map<Long, DataType>> map = new HashMap<>();
|
||||
|
||||
DataType getDataType(UniversalID sourceID, UniversalID dataTypeID) {
|
||||
if (sourceID == null || sourceID.equals(universalID)) {
|
||||
sourceID = LOCAL_ARCHIVE_UNIVERSAL_ID;
|
||||
}
|
||||
LongObjectHashtable<DataType> idMap = map.get(sourceID);
|
||||
Map<Long, DataType> idMap = map.get(sourceID);
|
||||
if (idMap == null) {
|
||||
idMap = new LongObjectHashtable<>();
|
||||
idMap = new HashMap<>();
|
||||
map.put(sourceID, idMap);
|
||||
}
|
||||
DataType dt = idMap.get(dataTypeID.getValue());
|
||||
|
@ -3527,7 +3526,7 @@ abstract public class DataTypeManagerDB implements DataTypeManager {
|
|||
else {
|
||||
sourceID = sourceArchive.getSourceArchiveID();
|
||||
}
|
||||
LongObjectHashtable<DataType> idMap = map.get(sourceID);
|
||||
Map<Long, DataType> idMap = map.get(sourceID);
|
||||
if (idMap != null) {
|
||||
idMap.remove(dataTypeID.getValue());
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import ghidra.program.model.symbol.*;
|
|||
import ghidra.program.util.LanguageTranslator;
|
||||
import ghidra.util.Lock;
|
||||
import ghidra.util.Msg;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.*;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
|
@ -122,7 +121,7 @@ public class ExternalManagerDB implements ManagerDB, ExternalManager {
|
|||
monitor.initialize(oldNameAdapter.getRecordCount());
|
||||
int cnt = 0;
|
||||
|
||||
LongObjectHashtable<String> nameMap = new LongObjectHashtable<>();
|
||||
Map<Long, String> nameMap = new HashMap<>();
|
||||
|
||||
RecordIterator iter = oldNameAdapter.getRecords();
|
||||
while (iter.hasNext()) {
|
||||
|
|
|
@ -18,7 +18,8 @@ package ghidra.program.database.map;
|
|||
import java.util.*;
|
||||
|
||||
import ghidra.program.model.address.*;
|
||||
import ghidra.util.datastruct.*;
|
||||
import ghidra.util.datastruct.Range;
|
||||
import ghidra.util.datastruct.SortedRangeList;
|
||||
|
||||
/**
|
||||
* AddressSetView implementation that handles image base changes. NOTE: THIS IMPLEMENTATION
|
||||
|
@ -33,8 +34,7 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
|
||||
private AddressMap addrMap;
|
||||
|
||||
private LongObjectHashtable<SortedRangeList> baseLists =
|
||||
new LongObjectHashtable<SortedRangeList>();
|
||||
private Map<Long, SortedRangeList> baseLists = new HashMap<>();
|
||||
private ArrayList<Long> bases = new ArrayList<Long>();
|
||||
|
||||
private Comparator<Long> baseComparator = new Comparator<Long>() {
|
||||
|
@ -108,7 +108,7 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
* Removes all addresses from this set.
|
||||
*/
|
||||
public void clear() {
|
||||
baseLists = new LongObjectHashtable<SortedRangeList>();
|
||||
baseLists = new HashMap<>();
|
||||
bases = new ArrayList<Long>();
|
||||
}
|
||||
|
||||
|
@ -251,9 +251,9 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
@Override
|
||||
public int getNumAddressRanges() {
|
||||
int n = 0;
|
||||
long[] keys = baseLists.getKeys();
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
SortedRangeList list = baseLists.get(keys[i]);
|
||||
|
||||
for (long key : baseLists.keySet()) {
|
||||
SortedRangeList list = baseLists.get(key);
|
||||
n += list.getNumRanges();
|
||||
}
|
||||
return n;
|
||||
|
@ -286,9 +286,8 @@ public class NormalizedAddressSet implements AddressSetView {
|
|||
@Override
|
||||
public long getNumAddresses() {
|
||||
long n = 0;
|
||||
long[] keys = baseLists.getKeys();
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
SortedRangeList list = baseLists.get(keys[i]);
|
||||
for (long key : baseLists.keySet()) {
|
||||
SortedRangeList list = baseLists.get(key);
|
||||
n += list.getNumValues();
|
||||
}
|
||||
return n;
|
||||
|
|
|
@ -32,9 +32,7 @@ import ghidra.program.model.listing.ProgramContext;
|
|||
import ghidra.program.util.RangeMapAdapter;
|
||||
import ghidra.program.util.RegisterValueStore;
|
||||
import ghidra.util.Lock;
|
||||
import ghidra.util.datastruct.LongObjectHashtable;
|
||||
import ghidra.util.exception.CancelledException;
|
||||
import ghidra.util.exception.VersionException;
|
||||
import ghidra.util.task.TaskMonitor;
|
||||
|
||||
/**
|
||||
|
@ -61,7 +59,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
|||
* address ranges using the PropertyMap utilities.
|
||||
*/
|
||||
private HashMap<String, Register> registersMap;
|
||||
private LongObjectHashtable<AddressRangeMapDB> valueMaps;
|
||||
private Map<Integer, AddressRangeMapDB> valueMaps;
|
||||
private Register baseContextRegister;
|
||||
protected Map<Register, RegisterValueStore> defaultRegisterValueMap;
|
||||
|
||||
|
@ -88,7 +86,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
|||
defaultRegisterValueMap = new HashMap<Register, RegisterValueStore>();
|
||||
|
||||
registersMap = new HashMap<String, Register>();
|
||||
valueMaps = new LongObjectHashtable<AddressRangeMapDB>();
|
||||
valueMaps = new HashMap<>();
|
||||
registerSpaceSize = 0;
|
||||
|
||||
for (Register register : registers) {
|
||||
|
@ -350,7 +348,7 @@ public class OldProgramContextDB implements ProgramContext, DefaultProgramContex
|
|||
public void invalidateCache(boolean all) throws IOException {
|
||||
lock.acquire();
|
||||
try {
|
||||
valueMaps.removeAll();
|
||||
valueMaps.clear();
|
||||
}
|
||||
finally {
|
||||
lock.release();
|
||||
|
|
|
@ -306,29 +306,11 @@ public abstract class DefaultPropertyMap implements PropertyMap {
|
|||
* @throws ClassNotFoundException if the class for the object being
|
||||
* read is not in the class path
|
||||
*/
|
||||
public void restoreProperties(ObjectInputStream ois) throws IOException, ClassNotFoundException {
|
||||
public void restoreProperties(ObjectInputStream ois)
|
||||
throws IOException, ClassNotFoundException {
|
||||
propertyMgr.restoreProperties(ois);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write all properties in the map to the given output stream.
|
||||
* @throws IOException if there is a problem writing to the stream
|
||||
*/
|
||||
public void saveAll(ObjectOutputStream out) throws IOException {
|
||||
propertyMgr.saveAll(out);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore properties read from the given input stream.
|
||||
* @param in input stream
|
||||
* @throws IOException if there is a problem reading from the stream
|
||||
* @throws ClassNotFoundException if the class for the object being
|
||||
* read is not in the class path
|
||||
*/
|
||||
public void restoreAll(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
||||
propertyMgr.restoreAll(in);
|
||||
}
|
||||
|
||||
private class AddressPropertyIterator implements AddressIterator {
|
||||
|
||||
private LongIterator iter;
|
||||
|
@ -350,7 +332,8 @@ public abstract class DefaultPropertyMap implements PropertyMap {
|
|||
|
||||
AddressPropertyIterator(Address start, Address end, boolean forward) {
|
||||
iter =
|
||||
propertyMgr.getPropertyIterator(addrMap.getKey(start), addrMap.getKey(end), forward);
|
||||
propertyMgr.getPropertyIterator(addrMap.getKey(start), addrMap.getKey(end),
|
||||
forward);
|
||||
this.forward = forward;
|
||||
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue