[Swift] Remove HashMap and fix Parser caches.
Remove HashMap, and replace all uses of it with dictionaries. There's no need for us to have a custom HashMap implementation (mirroring the Java standard library) when Swift's standard dictionaries work just fine. Fix Parser.bypassAltsAtnCache. This was declared as a Parser instance variable, when in the Java runtime it is static (and therefore the cache outlives the Parser instances). It was also being handled in a thread-unsafe manner, because the cache was being read outside of the mutex that was supposed to be protecting it. Fix both issues by moving the cache and the mutex so that they are static to the Parser module and rewriting getATNWithBypassAlts. Remove Parser.decisionToDFAMutex. The Java code uses a synchronized block on ParserATNSimulator.decisionToDFA, but the translation to Swift had put a mutex in Parser. The decisionToDFA value is shared between Parser, ParserATNSimulator, and the generated parser, so a mutex in ParserATNSimulator isn't blocking all possible accesses, so it's useless. Since this is only code for debugging anyway, just remove the useless mutex and simplify getDFAStrings and dumpDFA.
This commit is contained in:
parent
a4a14213f9
commit
f8e63d53c5
|
@ -8,7 +8,21 @@
|
|||
|
||||
import Foundation
|
||||
|
||||
///
|
||||
///
|
||||
/// This field maps from the serialized ATN string to the deserialized _org.antlr.v4.runtime.atn.ATN_ with
|
||||
/// bypass alternatives.
|
||||
///
|
||||
/// - SeeAlso: org.antlr.v4.runtime.atn.ATNDeserializationOptions#isGenerateRuleBypassTransitions()
|
||||
///
|
||||
private var bypassAltsAtnCache = [String: ATN]()
|
||||
|
||||
///
|
||||
/// mutex for bypassAltsAtnCache updates
|
||||
///
|
||||
private let bypassAltsAtnCacheMutex = Mutex()
|
||||
|
||||
|
||||
///
|
||||
/// This is all the parsing support code essentially; most of it is error recovery stuff.
|
||||
///
|
||||
open class Parser: Recognizer<ParserATNSimulator> {
|
||||
|
@ -58,25 +72,6 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
}
|
||||
}
|
||||
|
||||
///
|
||||
/// mutex for bypassAltsAtnCache updates
|
||||
///
|
||||
private let bypassAltsAtnCacheMutex = Mutex()
|
||||
|
||||
///
|
||||
/// mutex for decisionToDFA updates
|
||||
///
|
||||
private let decisionToDFAMutex = Mutex()
|
||||
|
||||
///
|
||||
/// This field maps from the serialized ATN string to the deserialized _org.antlr.v4.runtime.atn.ATN_ with
|
||||
/// bypass alternatives.
|
||||
///
|
||||
/// - SeeAlso: org.antlr.v4.runtime.atn.ATNDeserializationOptions#isGenerateRuleBypassTransitions()
|
||||
///
|
||||
private let bypassAltsAtnCache: HashMap<String, ATN> = HashMap<String, ATN>()
|
||||
|
||||
|
||||
///
|
||||
/// The error handling strategy for the parser. The default value is a new
|
||||
/// instance of _org.antlr.v4.runtime.DefaultErrorStrategy_.
|
||||
|
@ -417,23 +412,21 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
///
|
||||
/// The ATN with bypass alternatives is expensive to create so we create it
|
||||
/// lazily.
|
||||
///
|
||||
/// - Throws: _ANTLRError.unsupportedOperation_ if the current parser does not
|
||||
/// implement the _#getSerializedATN()_ method.
|
||||
///
|
||||
///
|
||||
public func getATNWithBypassAlts() -> ATN {
|
||||
let serializedAtn = getSerializedATN()
|
||||
|
||||
var result = bypassAltsAtnCache[serializedAtn]
|
||||
bypassAltsAtnCacheMutex.synchronized { [unowned self] in
|
||||
if result == nil {
|
||||
let deserializationOptions = ATNDeserializationOptions()
|
||||
try! deserializationOptions.setGenerateRuleBypassTransitions(true)
|
||||
result = try! ATNDeserializer(deserializationOptions).deserialize(Array(serializedAtn))
|
||||
self.bypassAltsAtnCache[serializedAtn] = result!
|
||||
return bypassAltsAtnCacheMutex.synchronized {
|
||||
if let cachedResult = bypassAltsAtnCache[serializedAtn] {
|
||||
return cachedResult
|
||||
}
|
||||
|
||||
let deserializationOptions = ATNDeserializationOptions()
|
||||
try! deserializationOptions.setGenerateRuleBypassTransitions(true)
|
||||
let result = try! ATNDeserializer(deserializationOptions).deserialize(Array(serializedAtn))
|
||||
bypassAltsAtnCache[serializedAtn] = result
|
||||
return result
|
||||
}
|
||||
return result!
|
||||
}
|
||||
|
||||
///
|
||||
|
@ -965,18 +958,13 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
|
||||
/// For debugging and other purposes.
|
||||
public func getDFAStrings() -> [String] {
|
||||
var s = [String]()
|
||||
guard let _interp = _interp else {
|
||||
return s
|
||||
guard let _interp = _interp else {
|
||||
return []
|
||||
}
|
||||
decisionToDFAMutex.synchronized { [unowned self] in
|
||||
for d in 0..<_interp.decisionToDFA.count {
|
||||
let dfa = _interp.decisionToDFA[d]
|
||||
s.append(dfa.toString(self.getVocabulary()))
|
||||
}
|
||||
|
||||
let vocab = getVocabulary()
|
||||
return _interp.decisionToDFA.map {
|
||||
$0.toString(vocab)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
/// For debugging and other purposes.
|
||||
|
@ -984,19 +972,16 @@ open class Parser: Recognizer<ParserATNSimulator> {
|
|||
guard let _interp = _interp else {
|
||||
return
|
||||
}
|
||||
decisionToDFAMutex.synchronized { [unowned self] in
|
||||
var seenOne = false
|
||||
|
||||
for dfa in _interp.decisionToDFA {
|
||||
if !dfa.states.isEmpty {
|
||||
if seenOne {
|
||||
print("")
|
||||
}
|
||||
print("Decision \(dfa.decision):")
|
||||
|
||||
print(dfa.toString(self.getVocabulary()), terminator: "")
|
||||
seenOne = true
|
||||
var seenOne = false
|
||||
let vocab = getVocabulary()
|
||||
for dfa in _interp.decisionToDFA {
|
||||
if !dfa.states.isEmpty {
|
||||
if seenOne {
|
||||
print("")
|
||||
}
|
||||
print("Decision \(dfa.decision):")
|
||||
print(dfa.toString(vocab), terminator: "")
|
||||
seenOne = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -303,7 +303,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
|
|||
|
||||
public final func getConflictingAltSubsets() -> [BitSet] {
|
||||
let length = configs.count
|
||||
let configToAlts = HashMap<Int, BitSet>(count: length)
|
||||
var configToAlts = [Int: BitSet]()
|
||||
|
||||
for i in 0..<length {
|
||||
let hash = configHash(configs[i].state.stateNumber, configs[i].context)
|
||||
|
@ -318,12 +318,12 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
|
|||
try! alts.set(configs[i].alt)
|
||||
}
|
||||
|
||||
return configToAlts.values
|
||||
return Array(configToAlts.values)
|
||||
}
|
||||
|
||||
public final func getStateToAltMap() -> HashMap<ATNState, BitSet> {
|
||||
public final func getStateToAltMap() -> [ATNState: BitSet] {
|
||||
let length = configs.count
|
||||
let m = HashMap<ATNState, BitSet>(count: length)
|
||||
var m = [ATNState: BitSet]()
|
||||
|
||||
for i in 0..<length {
|
||||
var alts: BitSet
|
||||
|
@ -412,7 +412,7 @@ public class ATNConfigSet: Hashable, CustomStringConvertible {
|
|||
public final func applyPrecedenceFilter(_ mergeCache: inout DoubleKeyMap<PredictionContext, PredictionContext, PredictionContext>?,_ parser: Parser,_ _outerContext: ParserRuleContext!) throws -> ATNConfigSet {
|
||||
|
||||
let configSet = ATNConfigSet(fullCtx)
|
||||
let statesFromAlt1 = HashMap<Int, PredictionContext>(count: configs.count)
|
||||
var statesFromAlt1 = [Int: PredictionContext]()
|
||||
for config in configs {
|
||||
// handle alt 1 first
|
||||
if config.alt != 1 {
|
||||
|
|
|
@ -75,11 +75,10 @@ open class ATNSimulator {
|
|||
open func getCachedContext(_ context: PredictionContext) -> PredictionContext {
|
||||
//TODO: synced (sharedContextCache!)
|
||||
//synced (sharedContextCache!) {
|
||||
let visited = HashMap<PredictionContext, PredictionContext>()
|
||||
|
||||
var visited = [PredictionContext: PredictionContext]()
|
||||
return PredictionContext.getCachedContext(context,
|
||||
sharedContextCache,
|
||||
visited)
|
||||
&visited)
|
||||
}
|
||||
|
||||
public static func edgeFactory(_ atn: ATN,
|
||||
|
|
|
@ -711,7 +711,7 @@ open class LexerATNSimulator: ATNSimulator {
|
|||
|
||||
return dfaStatesMutex.synchronized {
|
||||
if let existing = dfa.states[proposed] {
|
||||
return existing!
|
||||
return existing
|
||||
}
|
||||
|
||||
let newState = proposed
|
||||
|
|
|
@ -19,10 +19,9 @@ public enum LookupDictionaryType: Int {
|
|||
}
|
||||
|
||||
public struct LookupDictionary {
|
||||
private var type: LookupDictionaryType
|
||||
// private var cache: HashMap<Int, [ATNConfig]> = HashMap<Int, [ATNConfig]>()
|
||||
//
|
||||
private var cache: HashMap<Int, ATNConfig> = HashMap<Int, ATNConfig>()
|
||||
private let type: LookupDictionaryType
|
||||
private var cache = [Int: ATNConfig]()
|
||||
|
||||
public init(type: LookupDictionaryType = LookupDictionaryType.lookup) {
|
||||
self.type = type
|
||||
}
|
||||
|
@ -48,82 +47,41 @@ public struct LookupDictionary {
|
|||
return true
|
||||
}
|
||||
|
||||
|
||||
let same: Bool =
|
||||
lhs.state.stateNumber == rhs.state.stateNumber &&
|
||||
return
|
||||
lhs.state.stateNumber == rhs.state.stateNumber &&
|
||||
lhs.alt == rhs.alt &&
|
||||
lhs.semanticContext == rhs.semanticContext
|
||||
|
||||
return same
|
||||
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
//Ordered
|
||||
return lhs == rhs
|
||||
}
|
||||
}
|
||||
|
||||
// public mutating func getOrAdd(config: ATNConfig) -> ATNConfig {
|
||||
//
|
||||
// let h = hash(config)
|
||||
//
|
||||
// if let configList = cache[h] {
|
||||
// let length = configList.count
|
||||
// for i in 0..<length {
|
||||
// if equal(configList[i], config) {
|
||||
// return configList[i]
|
||||
// }
|
||||
// }
|
||||
// cache[h]!.append(config)
|
||||
// } else {
|
||||
// cache[h] = [config]
|
||||
// }
|
||||
//
|
||||
// return config
|
||||
//
|
||||
// }
|
||||
public mutating func getOrAdd(_ config: ATNConfig) -> ATNConfig {
|
||||
|
||||
let h = hash(config)
|
||||
|
||||
if let configList = cache[h] {
|
||||
return configList
|
||||
} else {
|
||||
cache[h] = config
|
||||
}
|
||||
|
||||
return config
|
||||
public mutating func getOrAdd(_ config: ATNConfig) -> ATNConfig {
|
||||
let h = hash(config)
|
||||
|
||||
if let configList = cache[h] {
|
||||
return configList
|
||||
}
|
||||
else {
|
||||
cache[h] = config
|
||||
}
|
||||
|
||||
return config
|
||||
}
|
||||
|
||||
public var isEmpty: Bool {
|
||||
return cache.isEmpty
|
||||
}
|
||||
|
||||
// public func contains(config: ATNConfig) -> Bool {
|
||||
//
|
||||
// let h = hash(config)
|
||||
// if let configList = cache[h] {
|
||||
// for c in configList {
|
||||
// if equal(c, config) {
|
||||
// return true
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return false
|
||||
//
|
||||
// }
|
||||
public func contains(_ config: ATNConfig) -> Bool {
|
||||
|
||||
let h = hash(config)
|
||||
if let _ = cache[h] {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
|
||||
return cache[h] != nil
|
||||
}
|
||||
|
||||
public mutating func removeAll() {
|
||||
cache.clear()
|
||||
cache.removeAll()
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -255,7 +255,7 @@ open class ParserATNSimulator: ATNSimulator {
|
|||
|
||||
public final var decisionToDFA: [DFA]
|
||||
|
||||
///
|
||||
///
|
||||
/// SLL, LL, or LL + exact ambig detection?
|
||||
///
|
||||
|
||||
|
@ -2013,7 +2013,7 @@ open class ParserATNSimulator: ATNSimulator {
|
|||
|
||||
return dfaStatesMutex.synchronized {
|
||||
if let existing = dfa.states[D] {
|
||||
return existing!
|
||||
return existing
|
||||
}
|
||||
|
||||
D.stateNumber = dfa.states.count
|
||||
|
|
|
@ -580,71 +580,67 @@ public class PredictionContext: Hashable, CustomStringConvertible {
|
|||
public static func getCachedContext(
|
||||
_ context: PredictionContext,
|
||||
_ contextCache: PredictionContextCache,
|
||||
_ visited: HashMap<PredictionContext, PredictionContext>) -> PredictionContext {
|
||||
if context.isEmpty() {
|
||||
_ visited: inout [PredictionContext: PredictionContext]) -> PredictionContext {
|
||||
if context.isEmpty() {
|
||||
return context
|
||||
}
|
||||
|
||||
if let visitedContext = visited[context] {
|
||||
return visitedContext
|
||||
}
|
||||
|
||||
if let cachedContext = contextCache.get(context) {
|
||||
visited[context] = cachedContext
|
||||
return cachedContext
|
||||
}
|
||||
|
||||
var changed = false
|
||||
var parents = [PredictionContext?](repeating: nil, count: context.size())
|
||||
let length = parents.count
|
||||
for i in 0..<length {
|
||||
if context.getParent(i) == nil {
|
||||
return context
|
||||
}
|
||||
|
||||
var existing = visited[context]
|
||||
if existing != nil {
|
||||
return existing!
|
||||
}
|
||||
let parent = getCachedContext(context.getParent(i)!, contextCache, &visited)
|
||||
if changed || parent !== context.getParent(i) {
|
||||
if !changed {
|
||||
parents = [PredictionContext?](repeating: nil, count: context.size())
|
||||
|
||||
existing = contextCache.get(context)
|
||||
if existing != nil {
|
||||
visited[context] = existing!
|
||||
return existing!
|
||||
}
|
||||
|
||||
var changed = false
|
||||
var parents = [PredictionContext?](repeating: nil, count: context.size())
|
||||
let length = parents.count
|
||||
for i in 0..<length {
|
||||
//added by janyou
|
||||
if context.getParent(i) == nil {
|
||||
return context
|
||||
}
|
||||
|
||||
let parent = getCachedContext(context.getParent(i)!, contextCache, visited)
|
||||
//modified by janyou != !==
|
||||
if changed || parent !== context.getParent(i) {
|
||||
if !changed {
|
||||
parents = [PredictionContext?](repeating: nil, count: context.size())
|
||||
|
||||
for j in 0..<context.size() {
|
||||
parents[j] = context.getParent(j)
|
||||
}
|
||||
|
||||
changed = true
|
||||
for j in 0..<context.size() {
|
||||
parents[j] = context.getParent(j)
|
||||
}
|
||||
|
||||
parents[i] = parent
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
|
||||
if !changed {
|
||||
contextCache.add(context)
|
||||
visited[context] = context
|
||||
return context
|
||||
parents[i] = parent
|
||||
}
|
||||
}
|
||||
|
||||
let updated: PredictionContext
|
||||
if parents.isEmpty {
|
||||
updated = EMPTY
|
||||
}
|
||||
else if parents.count == 1 {
|
||||
updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
|
||||
}
|
||||
else {
|
||||
let arrayPredictionContext = context as! ArrayPredictionContext
|
||||
updated = ArrayPredictionContext(parents, arrayPredictionContext.returnStates)
|
||||
}
|
||||
if !changed {
|
||||
contextCache.add(context)
|
||||
visited[context] = context
|
||||
return context
|
||||
}
|
||||
|
||||
contextCache.add(updated)
|
||||
visited[updated] = updated
|
||||
visited[context] = updated
|
||||
let updated: PredictionContext
|
||||
if parents.isEmpty {
|
||||
updated = EMPTY
|
||||
}
|
||||
else if parents.count == 1 {
|
||||
updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0))
|
||||
}
|
||||
else {
|
||||
let arrayPredictionContext = context as! ArrayPredictionContext
|
||||
updated = ArrayPredictionContext(parents, arrayPredictionContext.returnStates)
|
||||
}
|
||||
|
||||
return updated
|
||||
contextCache.add(updated)
|
||||
visited[updated] = updated
|
||||
visited[context] = updated
|
||||
|
||||
return updated
|
||||
}
|
||||
|
||||
|
||||
|
@ -652,14 +648,14 @@ public class PredictionContext: Hashable, CustomStringConvertible {
|
|||
// ter's recursive version of Sam's getAllNodes()
|
||||
public static func getAllContextNodes(_ context: PredictionContext) -> [PredictionContext] {
|
||||
var nodes = [PredictionContext]()
|
||||
let visited = HashMap<PredictionContext, PredictionContext>()
|
||||
getAllContextNodes_(context, &nodes, visited)
|
||||
var visited = [PredictionContext: PredictionContext]()
|
||||
getAllContextNodes_(context, &nodes, &visited)
|
||||
return nodes
|
||||
}
|
||||
|
||||
public static func getAllContextNodes_(_ context: PredictionContext?,
|
||||
_ nodes: inout [PredictionContext],
|
||||
_ visited: HashMap<PredictionContext, PredictionContext>) {
|
||||
private static func getAllContextNodes_(_ context: PredictionContext?,
|
||||
_ nodes: inout [PredictionContext],
|
||||
_ visited: inout [PredictionContext: PredictionContext]) {
|
||||
guard let context = context, visited[context] == nil else {
|
||||
return
|
||||
}
|
||||
|
@ -667,7 +663,7 @@ public class PredictionContext: Hashable, CustomStringConvertible {
|
|||
nodes.append(context)
|
||||
let length = context.size()
|
||||
for i in 0..<length {
|
||||
getAllContextNodes_(context.getParent(i), &nodes, visited)
|
||||
getAllContextNodes_(context.getParent(i), &nodes, &visited)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,12 +12,12 @@
|
|||
///
|
||||
|
||||
public final class PredictionContextCache {
|
||||
//internal final var
|
||||
var cache: HashMap<PredictionContext, PredictionContext> =
|
||||
HashMap<PredictionContext, PredictionContext>()
|
||||
private var cache = [PredictionContext: PredictionContext]()
|
||||
|
||||
public init() {
|
||||
}
|
||||
///
|
||||
|
||||
///
|
||||
/// Add a context to the cache and return it. If the context already exists,
|
||||
/// return that one instead and do not add a new context to the cache.
|
||||
/// Protect shared cache from unsafe thread access.
|
||||
|
@ -27,10 +27,9 @@ public final class PredictionContextCache {
|
|||
if ctx === PredictionContext.EMPTY {
|
||||
return PredictionContext.EMPTY
|
||||
}
|
||||
let existing: PredictionContext? = cache[ctx]
|
||||
if existing != nil {
|
||||
if let existing = cache[ctx] {
|
||||
// print(name+" reuses "+existing);
|
||||
return existing!
|
||||
return existing
|
||||
}
|
||||
cache[ctx] = ctx
|
||||
return ctx
|
||||
|
|
|
@ -493,7 +493,7 @@ public enum PredictionMode {
|
|||
/// map[c._org.antlr.v4.runtime.atn.ATNConfig#state state_] U= c._org.antlr.v4.runtime.atn.ATNConfig#alt alt_
|
||||
///
|
||||
///
|
||||
public static func getStateToAltMap(_ configs: ATNConfigSet) -> HashMap<ATNState, BitSet> {
|
||||
public static func getStateToAltMap(_ configs: ATNConfigSet) -> [ATNState: BitSet] {
|
||||
return configs.getStateToAltMap()
|
||||
}
|
||||
|
||||
|
|
|
@ -7,20 +7,17 @@
|
|||
|
||||
public class DFA: CustomStringConvertible {
|
||||
///
|
||||
/// A set of all DFA states. Use _java.util.Map_ so we can get old state back
|
||||
/// (_java.util.Set_ only allows you to see if it's there).
|
||||
/// A set of all DFA states.
|
||||
///
|
||||
public var states = [DFAState: DFAState]()
|
||||
|
||||
public final var states: HashMap<DFAState, DFAState?> = HashMap<DFAState, DFAState?>()
|
||||
|
||||
public /*volatile*/ var s0: DFAState?
|
||||
public var s0: DFAState?
|
||||
|
||||
public final var decision: Int
|
||||
|
||||
///
|
||||
/// From which ATN state did we create this DFA?
|
||||
///
|
||||
|
||||
///
|
||||
public let atnStartState: DecisionState
|
||||
|
||||
///
|
||||
|
|
|
@ -11,34 +11,36 @@
|
|||
/// This nested hash table saves creating a single key each time we access
|
||||
/// map; avoids mem creation.
|
||||
///
|
||||
public struct DoubleKeyMap<Key1:Hashable, Key2:Hashable, Value> {
|
||||
private var data: HashMap<Key1, HashMap<Key2, Value>> = HashMap<Key1, HashMap<Key2, Value>>()
|
||||
public struct DoubleKeyMap<Key1: Hashable, Key2: Hashable, Value> {
|
||||
private var data = [Key1: [Key2: Value]]()
|
||||
|
||||
@discardableResult
|
||||
public mutating func put(_ k1: Key1, _ k2: Key2, _ v: Value) -> Value? {
|
||||
|
||||
var data2 = data[k1]
|
||||
var prev: Value? = nil
|
||||
if data2 == nil {
|
||||
data2 = HashMap<Key2, Value>()
|
||||
|
||||
} else {
|
||||
prev = data2![k2]
|
||||
let prev: Value?
|
||||
if var data2 = data[k1] {
|
||||
prev = data2[k2]
|
||||
data2[k2] = v
|
||||
data[k1] = data2
|
||||
}
|
||||
else {
|
||||
prev = nil
|
||||
let data2 = [
|
||||
k2: v
|
||||
]
|
||||
data[k1] = data2
|
||||
}
|
||||
data2![k2] = v
|
||||
data[k1] = data2
|
||||
return prev
|
||||
}
|
||||
|
||||
public func get(_ k1: Key1, _ k2: Key2) -> Value? {
|
||||
|
||||
public func get(_ k1: Key1, _ k2: Key2) -> Value? {
|
||||
if let data2 = data[k1] {
|
||||
return data2[k2]
|
||||
}
|
||||
return nil
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
public func get(_ k1: Key1) -> HashMap<Key2, Value>? {
|
||||
public func get(_ k1: Key1) -> [Key2: Value]? {
|
||||
return data[k1]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,475 +0,0 @@
|
|||
///
|
||||
/// Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
/// Use of this file is governed by the BSD 3-clause license that
|
||||
/// can be found in the LICENSE.txt file in the project root.
|
||||
///
|
||||
|
||||
final class Entry<K: Hashable,V>: CustomStringConvertible {
|
||||
final var key: K
|
||||
final var value: V
|
||||
final var next: Entry<K,V>!
|
||||
final var hash: Int
|
||||
|
||||
///
|
||||
/// Creates new entry.
|
||||
///
|
||||
init(_ h: Int, _ k: K, _ v: V, _ n: Entry<K,V>!) {
|
||||
value = v
|
||||
next = n
|
||||
key = k
|
||||
hash = h
|
||||
}
|
||||
|
||||
final func getKey() -> K {
|
||||
return key
|
||||
}
|
||||
|
||||
final func getValue() -> V {
|
||||
return value
|
||||
}
|
||||
|
||||
final func setValue(_ newValue: V) -> V {
|
||||
let oldValue: V = value
|
||||
value = newValue
|
||||
return oldValue
|
||||
}
|
||||
|
||||
final var hashValue: Int {
|
||||
return key.hashValue
|
||||
}
|
||||
|
||||
var description: String { return "\(getKey())=\(getValue())" }
|
||||
|
||||
}
|
||||
func == <K, V: Equatable>(lhs: Entry<K,V>, rhs: Entry<K,V>) -> Bool {
|
||||
if lhs === rhs {
|
||||
return true
|
||||
}
|
||||
if lhs.key == rhs.key {
|
||||
if lhs.value == rhs.value {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
func == <K, V: Equatable>(lhs: Entry<K,V?>, rhs: Entry<K,V?>) -> Bool {
|
||||
if lhs === rhs {
|
||||
return true
|
||||
}
|
||||
if lhs.key == rhs.key {
|
||||
if lhs.value == nil && rhs.value == nil {
|
||||
return true
|
||||
} else if lhs.value != nil && rhs.value != nil && lhs.value! == rhs.value! {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
|
||||
public final class HashMap<K: Hashable,V>: Sequence
|
||||
{
|
||||
|
||||
///
|
||||
/// The default initial capacity - MUST be a power of two.
|
||||
///
|
||||
private let DEFAULT_INITIAL_CAPACITY: Int = 16
|
||||
|
||||
///
|
||||
/// The maximum capacity, used if a higher value is implicitly specified
|
||||
/// by either of the constructors with arguments.
|
||||
/// MUST be a power of two <= 1<<30.
|
||||
///
|
||||
private let MAXIMUM_CAPACITY: Int = 1 << 30
|
||||
|
||||
///
|
||||
/// The load factor used when none specified in constructor.
|
||||
///
|
||||
private let DEFAULT_LOAD_FACTOR: Float = 0.75
|
||||
|
||||
///
|
||||
/// The table, resized as necessary. Length MUST Always be a power of two.
|
||||
///
|
||||
var table: [Entry<K,V>?]
|
||||
|
||||
///
|
||||
/// The number of key-value mappings contained in this map.
|
||||
///
|
||||
var size: Int = 0
|
||||
|
||||
///
|
||||
/// The next size value at which to resize (capacity * load factor).
|
||||
/// -
|
||||
///
|
||||
var threshold: Int = 0
|
||||
|
||||
///
|
||||
/// The load factor for the hash table.
|
||||
///
|
||||
/// -
|
||||
///
|
||||
var loadFactor: Float = 0
|
||||
|
||||
///
|
||||
/// The number of times this HashMap has been structurally modified
|
||||
/// Structural modifications are those that change the number of mappings in
|
||||
/// the HashMap or otherwise modify its internal structure (e.g.,
|
||||
/// rehash). This field is used to make iterators on Collection-views of
|
||||
/// the HashMap fail-fast. (See ConcurrentModificationException).
|
||||
///
|
||||
var modCount: Int = 0
|
||||
|
||||
public init(count: Int) {
|
||||
var initialCapacity = count
|
||||
if (count < 0)
|
||||
{
|
||||
initialCapacity = DEFAULT_INITIAL_CAPACITY
|
||||
}
|
||||
else if (count > MAXIMUM_CAPACITY)
|
||||
{
|
||||
initialCapacity = MAXIMUM_CAPACITY
|
||||
} else {
|
||||
// Find a power of 2 >= initialCapacity
|
||||
initialCapacity = 1
|
||||
while initialCapacity < count
|
||||
{
|
||||
initialCapacity <<= 1
|
||||
}
|
||||
}
|
||||
|
||||
self.loadFactor = DEFAULT_LOAD_FACTOR
|
||||
threshold = Int(Float(initialCapacity) * loadFactor)
|
||||
table = [Entry<K,V>?](repeating: nil, count: initialCapacity)
|
||||
}
|
||||
public init() {
|
||||
self.loadFactor = DEFAULT_LOAD_FACTOR
|
||||
threshold = Int(Float(DEFAULT_INITIAL_CAPACITY) * DEFAULT_LOAD_FACTOR)
|
||||
table = [Entry<K,V>?](repeating: nil, count: DEFAULT_INITIAL_CAPACITY)
|
||||
}
|
||||
|
||||
static func hash(_ h: Int) -> Int {
|
||||
var h = h
|
||||
// This function ensures that hashCodes that differ only by
|
||||
// constant multiples at each bit position have a bounded
|
||||
// number of collisions (approximately 8 at default load factor).
|
||||
h ^= (h >>> 20) ^ (h >>> 12)
|
||||
return h ^ (h >>> 7) ^ (h >>> 4)
|
||||
}
|
||||
|
||||
///
|
||||
/// Returns index for hash code h.
|
||||
///
|
||||
static func indexFor(_ h: Int, _ length: Int) -> Int {
|
||||
return h & (length-1)
|
||||
}
|
||||
|
||||
///
|
||||
/// Returns <tt>true</tt> if this map contains no key-value mappings.
|
||||
///
|
||||
/// - returns: <tt>true</tt> if this map contains no key-value mappings
|
||||
///
|
||||
public final var isEmpty: Bool {
|
||||
return size == 0
|
||||
}
|
||||
public final subscript(key: K) -> V? {
|
||||
get {
|
||||
return get(key)
|
||||
}
|
||||
set {
|
||||
if newValue == nil {
|
||||
remove(key)
|
||||
}else{
|
||||
put(key,newValue!)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final var count: Int {
|
||||
return size
|
||||
}
|
||||
///
|
||||
/// Returns the value to which the specified key is mapped,
|
||||
/// or `null` if this map contains no mapping for the key.
|
||||
///
|
||||
/// More formally, if this map contains a mapping from a key
|
||||
/// `k` to a value `v` such that `(key==null ? k==null :
|
||||
/// key.equals(k))`, then this method returns `v`; otherwise
|
||||
/// it returns `null`. (There can be at most one such mapping.)
|
||||
///
|
||||
/// A return value of `null` does not necessarily
|
||||
/// indicate that the map contains no mapping for the key; it's also
|
||||
/// possible that the map explicitly maps the key to `null`.
|
||||
/// The _#containsKey containsKey_ operation may be used to
|
||||
/// distinguish these two cases.
|
||||
///
|
||||
/// - seealso: #put(Object, Object)
|
||||
///
|
||||
public final func get(_ key: K) -> V? {
|
||||
let hash: Int = HashMap.hash(key.hashValue)
|
||||
var e = table[HashMap.indexFor(hash, table.count)]
|
||||
while let eWrap = e {
|
||||
if eWrap.hash == hash && eWrap.key == key
|
||||
{
|
||||
return eWrap.value
|
||||
}
|
||||
e = eWrap.next
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
///
|
||||
/// Returns <tt>true</tt> if this map contains a mapping for the
|
||||
/// specified key.
|
||||
///
|
||||
/// - parameter key: The key whose presence in this map is to be tested
|
||||
/// - returns: <tt>true</tt> if this map contains a mapping for the specified
|
||||
/// key.
|
||||
///
|
||||
public final func containsKey(_ key: K) -> Bool {
|
||||
return getEntry(key) != nil
|
||||
}
|
||||
|
||||
///
|
||||
/// Returns the entry associated with the specified key in the
|
||||
/// HashMap. Returns null if the HashMap contains no mapping
|
||||
/// for the key.
|
||||
///
|
||||
final func getEntry(_ key: K) -> Entry<K,V>! {
|
||||
let hash: Int = HashMap.hash(key.hashValue)
|
||||
var e = table[HashMap.indexFor(hash, table.count)]
|
||||
while let eWrap = e {
|
||||
if eWrap.hash == hash && eWrap.key == key
|
||||
{
|
||||
return eWrap
|
||||
}
|
||||
e = eWrap.next
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
///
|
||||
/// Associates the specified value with the specified key in this map.
|
||||
/// If the map previously contained a mapping for the key, the old
|
||||
/// value is replaced.
|
||||
///
|
||||
/// - parameter key: key with which the specified value is to be associated
|
||||
/// - parameter value: value to be associated with the specified key
|
||||
/// - returns: the previous value associated with <tt>key</tt>, or
|
||||
/// <tt>null</tt> if there was no mapping for <tt>key</tt>.
|
||||
/// (A <tt>null</tt> return can also indicate that the map
|
||||
/// previously associated <tt>null</tt> with <tt>key</tt>.)
|
||||
///
|
||||
@discardableResult
|
||||
public final func put(_ key: K, _ value: V) -> V? {
|
||||
|
||||
let hash: Int = HashMap.hash(key.hashValue)
|
||||
let i: Int = HashMap.indexFor(hash, table.count)
|
||||
var e = table[i]
|
||||
while let eWrap = e {
|
||||
if eWrap.hash == hash && eWrap.key == key {
|
||||
let oldValue = eWrap.value
|
||||
eWrap.value = value
|
||||
return oldValue
|
||||
}
|
||||
e = eWrap.next
|
||||
}
|
||||
|
||||
|
||||
modCount += 1
|
||||
addEntry(hash, key, value, i)
|
||||
return nil
|
||||
}
|
||||
|
||||
///
|
||||
/// Adds a new entry with the specified key, value and hash code to
|
||||
/// the specified bucket. It is the responsibility of this
|
||||
/// method to resize the table if appropriate.
|
||||
///
|
||||
/// Subclass overrides this to alter the behavior of put method.
|
||||
///
|
||||
final func addEntry(_ hash: Int, _ key: K, _ value: V, _ bucketIndex: Int) {
|
||||
let e = table[bucketIndex]
|
||||
table[bucketIndex] = Entry<K,V>(hash, key, value, e)
|
||||
let oldSize = size
|
||||
size += 1
|
||||
if oldSize >= threshold {
|
||||
resize(2 * table.count)
|
||||
}
|
||||
}
|
||||
///
|
||||
/// Rehashes the contents of this map into a new array with a
|
||||
/// larger capacity. This method is called automatically when the
|
||||
/// number of keys in this map reaches its threshold.
|
||||
///
|
||||
/// If current capacity is MAXIMUM_CAPACITY, this method does not
|
||||
/// resize the map, but sets threshold to Integer.MAX_VALUE.
|
||||
/// This has the effect of preventing future calls.
|
||||
///
|
||||
/// - parameter newCapacity: the new capacity, MUST be a power of two;
|
||||
/// must be greater than current capacity unless current
|
||||
/// capacity is MAXIMUM_CAPACITY (in which case value
|
||||
/// is irrelevant).
|
||||
///
|
||||
final func resize(_ newCapacity: Int) {
|
||||
let oldCapacity: Int = table.count
|
||||
if oldCapacity == MAXIMUM_CAPACITY {
|
||||
threshold = Int.max
|
||||
return
|
||||
}
|
||||
|
||||
var newTable = [Entry<K,V>?](repeating: nil, count: newCapacity)
|
||||
transfer(&newTable)
|
||||
table = newTable
|
||||
threshold = Int(Float(newCapacity) * loadFactor)
|
||||
}
|
||||
|
||||
///
|
||||
/// Transfers all entries from current table to newTable.
|
||||
///
|
||||
final func transfer(_ newTable: inout [Entry<K,V>?]) {
|
||||
|
||||
let newCapacity: Int = newTable.count
|
||||
let length = table.count
|
||||
for j in 0..<length {
|
||||
if let e = table[j] {
|
||||
table[j] = nil
|
||||
var eOption: Entry<K,V>? = e
|
||||
while let e = eOption {
|
||||
let next = e.next
|
||||
let i: Int = HashMap.indexFor(e.hash, newCapacity)
|
||||
e.next = newTable[i]
|
||||
newTable[i] = e
|
||||
eOption = next
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
///
|
||||
/// Removes all of the mappings from this map.
|
||||
/// The map will be empty after this call returns.
|
||||
///
|
||||
public final func clear() {
|
||||
modCount += 1
|
||||
let length = table.count
|
||||
for i in 0..<length {
|
||||
table[i] = nil
|
||||
}
|
||||
size = 0
|
||||
}
|
||||
@discardableResult
|
||||
public func remove(_ key: K) -> V? {
|
||||
if let e = removeEntryForKey(key) {
|
||||
return e.value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
final func removeEntryForKey(_ key: K) -> Entry<K,V>? {
|
||||
let hash: Int = HashMap.hash(Int(key.hashValue))
|
||||
let i = Int(HashMap.indexFor(hash, Int(table.count)))
|
||||
var prev = table[i]
|
||||
var e = prev
|
||||
|
||||
while let eWrap = e {
|
||||
let next = eWrap.next
|
||||
var _: AnyObject
|
||||
if eWrap.hash == hash && eWrap.key == key{
|
||||
modCount += 1
|
||||
size -= 1
|
||||
if prev === eWrap
|
||||
{table[i] = next}
|
||||
else
|
||||
{prev?.next = next}
|
||||
return eWrap
|
||||
}
|
||||
prev = eWrap
|
||||
e = next
|
||||
}
|
||||
|
||||
return e
|
||||
}
|
||||
|
||||
public final var values: [V]{
|
||||
var valueList: [V] = [V]()
|
||||
let length = table.count
|
||||
for j in 0..<length {
|
||||
if let e = table[j] {
|
||||
valueList.append(e.value)
|
||||
var eOption: Entry<K,V>? = e
|
||||
while let e = eOption {
|
||||
let next = e.next
|
||||
eOption = next
|
||||
if let eOption = eOption {
|
||||
valueList.append(eOption.value)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
return valueList
|
||||
}
|
||||
|
||||
public final var keys: [K]{
|
||||
var keyList: [K] = [K]()
|
||||
let length = table.count
|
||||
for j in 0..<length {
|
||||
if let e = table[j] {
|
||||
keyList.append(e.key)
|
||||
var eOption: Entry<K,V>? = e
|
||||
while let e = eOption {
|
||||
let next = e.next
|
||||
eOption = next
|
||||
if let eOption = eOption {
|
||||
keyList.append(eOption.key )
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
return keyList
|
||||
}
|
||||
|
||||
|
||||
public func makeIterator() -> AnyIterator<(K,V)> {
|
||||
var _next: Entry<K,V>? // next entry to return
|
||||
let expectedModCount: Int = modCount // For fast-fail
|
||||
var index: Int = 0 // current slot
|
||||
//var current: HashMapEntry<K,V> // current entry
|
||||
if size > 0{ // advance to first entry
|
||||
|
||||
while index < table.count && _next == nil
|
||||
{
|
||||
_next = table[index]
|
||||
index += 1
|
||||
}
|
||||
}
|
||||
|
||||
return AnyIterator {
|
||||
if self.modCount != expectedModCount
|
||||
{
|
||||
fatalError("\(#function) ConcurrentModificationException")
|
||||
}
|
||||
if let e = _next {
|
||||
_next = e.next
|
||||
if _next == nil{
|
||||
while index < self.table.count && _next == nil
|
||||
{
|
||||
_next = self.table[index]
|
||||
index += 1
|
||||
}
|
||||
}
|
||||
//current = e
|
||||
return (e.getKey(),e.getValue())
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue