add new map that lets me specify hashCode/equals

This commit is contained in:
Terence Parr 2012-09-09 18:55:47 -07:00
parent 3c5dec1172
commit 89656989e6
3 changed files with 311 additions and 1 deletions

View File

@ -38,6 +38,7 @@ import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.dfa.DFAState;
import org.antlr.v4.runtime.misc.Array2DHashSet;
import org.antlr.v4.runtime.misc.DoubleKeyMap;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.misc.IntervalSet;
@ -1339,6 +1340,21 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
/**
SLL analysis termination.
This function is used just for SLL. SLL can decide to give up any
point, even immediately, failing over to full LL. To be as efficient
as possible, SLL only fails over when it's positive it can't get
anywhere on more look ahead without seeing a conflict.
assuming one stage parsing, an SLL confg set with only conflicting
subsets should force failover to full LL, even if the config sets
don't resolve to the same alternative like {1,2} and {3,4}. The only
time SLL keeps going when there exists a conflicting subset, is when
there is a set of nonconflicting conflicts.
SLL stops when it sees only conflicting config subsets
LL keeps going when there is uncertainty
SLL can't evaluate them 1st because it needs to create the DFA cache.
*/
public boolean needMoreLookaheadSLL(@NotNull ATNConfigSet configs) {
return false;
@ -1448,10 +1464,58 @@ public class ParserATNSimulator<Symbol extends Token> extends ATNSimulator {
=> continue
The function implementation tries to bail out as soon as
possible. That means we can stop as soon as our final alt set gets
possible. That means we can stop as soon as our viable alt set gets
more than a single alternative in it.
// First, map (s,_,x,_) -> altset for all configs
for c in configs:
map[c] U= c.alt # map hash/equals uses s and x, not alt and not pred
// Then, any singleton alternative sets do not conflict.
nonconflicting = set [map[c] for c in configs if len(map[c])=1]
viable_alts = nonconflicting
// All other sets must be in a conflicting subset
viable_alts = set [min(map[c]) for c in configs if len(map[c])>1]
continue if len(viable_alts)>1
or
boolean continue(configs):
for c in configs:
map[c] U= c.alt # map hash/equals uses s and x, not alt and not pred
viable_alts = set()
for e in map.entries:
if len(e.value)==1:
viable_alts.add(e.value)
if len(viable_alts)>1: return true
else:
viable_alts.add(min(e.value))
return len(viable_alts)>1
*/
public boolean needMoreLookaheadLL(@NotNull ATNConfigSet configs) {
class AltAndContextHashSet extends Array2DHashSet<ATNConfig> {
public AltAndContextHashSet() {
super(16,2);
}
/** Code is function of (s, _, ctx, _) */
@Override
public int hashCode(ATNConfig o) {
int hashCode = 7;
hashCode = 31 * hashCode + o.state.stateNumber;
hashCode = 31 * hashCode + o.context.hashCode();
return hashCode;
}
@Override
public boolean equals(ATNConfig a, ATNConfig b) {
if ( a==b ) return true;
if ( a==null || b==null ) return false;
if ( hashCode(a) != hashCode(b) ) return false;
return a.state.stateNumber==b.state.stateNumber
&& b.context.equals(b.context);
}
}
return false;
}

View File

@ -0,0 +1,6 @@
package org.antlr.v4.runtime.misc;
import java.util.Map;
public interface EquivalenceMap<K,V> extends Map<K,V>, EquivalenceRelation<K> {
}

View File

@ -0,0 +1,240 @@
package org.antlr.v4.runtime.misc;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
/** A limited map (many unsupported operations) that lets me use
* varying hashCode/equals.
*/
public class FlexibleHashMap<K,V> implements EquivalenceMap<K,V> {
public static final int INITAL_CAPACITY = 16; // must be power of 2
public static final int INITAL_BUCKET_CAPACITY = 8;
public static final double LOAD_FACTOR = 0.75;
public class Entry {
K key;
V value;
public Entry(K key, V value) { this.key = key; this.value = value; }
@Override
public String toString() {
return key.toString()+":"+value.toString();
}
}
protected LinkedList<Entry>[] buckets;
/** How many elements in set */
protected int n = 0;
protected int threshold = (int)(INITAL_CAPACITY * LOAD_FACTOR); // when to expand
protected int currentPrime = 1; // jump by 4 primes each expand or whatever
protected int initialBucketCapacity = INITAL_BUCKET_CAPACITY;
public FlexibleHashMap() {
this(INITAL_CAPACITY, INITAL_BUCKET_CAPACITY);
}
public FlexibleHashMap(int initialCapacity, int initialBucketCapacity) {
buckets = (LinkedList<Entry>[])new LinkedList[initialCapacity];
this.initialBucketCapacity = initialBucketCapacity;
}
@Override
public boolean equals(K a, K b) {
return a.equals(b);
}
@Override
public int hashCode(K o) {
return o.hashCode();
}
protected int getBucket(K key) {
int hash = hashCode(key);
int b = hash & (buckets.length-1); // assumes len is power of 2
return b;
}
@Override
public V get(Object o) {
K key = (K)o;
if ( key==null ) return null;
int b = getBucket(key);
LinkedList<Entry> bucket = buckets[b];
if ( bucket==null ) return null; // no bucket
for (Entry e : bucket) {
if ( equals(e.key, key) ) return e.value; // use special equals
}
return null;
}
@Override
public V put(K key, V value) {
if ( key==null ) return null;
if ( n > threshold ) expand();
int b = getBucket(key);
LinkedList<Entry> bucket = buckets[b];
if ( bucket==null ) {
bucket = buckets[b] = new LinkedList<Entry>();
}
for (Entry e : bucket) {
if ( equals(e.key, key) ) {
V prev = e.value;
e.value = value;
n++;
return prev;
}
}
// not there
bucket.add(new Entry(key, value));
return null;
}
@Override
public V remove(Object key) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(Map<? extends K, ? extends V> m) {
throw new UnsupportedOperationException();
}
@Override
public Set<K> keySet() {
throw new UnsupportedOperationException();
}
@Override
public Collection<V> values() {
throw new UnsupportedOperationException();
}
@Override
public Set<Map.Entry<K, V>> entrySet() {
throw new UnsupportedOperationException();
}
@Override
public boolean containsKey(Object key) {
return get(key)!=null;
}
@Override
public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
@Override
public int hashCode() {
int h = 0;
for (LinkedList<Entry> bucket : buckets) {
if ( bucket==null ) continue;
for (Entry e : bucket) {
if ( e==null ) break;
h += hashCode(e.key);
}
}
return h;
}
@Override
public boolean equals(Object o) {
throw new UnsupportedOperationException();
}
protected void expand() {
LinkedList<Entry>[] old = buckets;
currentPrime += 4;
int newCapacity = buckets.length * 2;
LinkedList<Entry>[] newTable = (LinkedList<Entry>[])new LinkedList[newCapacity];
buckets = newTable;
threshold = (int)(newCapacity * LOAD_FACTOR);
// System.out.println("new size="+newCapacity+", thres="+threshold);
// rehash all existing entries
int oldSize = size();
for (LinkedList<Entry> bucket : old) {
if ( bucket==null ) continue;
for (Entry e : bucket) {
if ( e==null ) break;
put(e.key, e.value);
}
}
n = oldSize;
}
@Override
public int size() {
return n;
}
@Override
public boolean isEmpty() {
return n==0;
}
@Override
public void clear() {
buckets = (LinkedList<Entry>[])new LinkedList[INITAL_CAPACITY];
n = 0;
}
public String toString() {
if ( size()==0 ) return "{}";
StringBuilder buf = new StringBuilder();
buf.append('{');
boolean first = true;
for (LinkedList<Entry> bucket : buckets) {
if ( bucket==null ) continue;
for (Entry e : bucket) {
if ( e==null ) break;
if ( first ) first=false;
else buf.append(", ");
buf.append(e.toString());
}
}
buf.append('}');
return buf.toString();
}
public String toTableString() {
StringBuilder buf = new StringBuilder();
for (LinkedList<Entry> bucket : buckets) {
if ( bucket==null ) {
buf.append("null\n");
continue;
}
buf.append('[');
boolean first = true;
for (Entry e : bucket) {
if ( first ) first=false;
else buf.append(" ");
if ( e==null ) buf.append("_");
else buf.append(e.toString());
}
buf.append("]\n");
}
return buf.toString();
}
public static void main(String[] args) {
FlexibleHashMap<String,Integer> map = new FlexibleHashMap<String,Integer>();
map.put("hi", 1);
map.put("mom", 2);
map.put("foo", 3);
map.put("ach", 4);
map.put("cbba", 5);
map.put("d", 6);
map.put("edf", 7);
map.put("mom", 8);
map.put("hi", 9);
System.out.println(map);
System.out.println(map.toTableString());
}
}