From 1b8e1a4d4ea4a80070be4739a8fc449b08901d2e Mon Sep 17 00:00:00 2001 From: Justin King Date: Tue, 8 Mar 2022 12:08:09 -0800 Subject: [PATCH] [Go] Port locking algorithm from C++ to Go --- .../v4/test/runtime/templates/Go.test.stg | 4 +- runtime/Go/antlr/atn.go | 21 +- runtime/Go/antlr/atn_state.go | 6 + runtime/Go/antlr/dfa.go | 34 +-- runtime/Go/antlr/dfa_state.go | 14 +- runtime/Go/antlr/lexer_atn_simulator.go | 50 +++-- runtime/Go/antlr/parser_atn_simulator.go | 34 +-- .../antlr/v4/tool/templates/codegen/Go/Go.stg | 206 ++++++++++-------- 8 files changed, 198 insertions(+), 171 deletions(-) diff --git a/runtime-testsuite/resources/org/antlr/v4/test/runtime/templates/Go.test.stg b/runtime-testsuite/resources/org/antlr/v4/test/runtime/templates/Go.test.stg index eb8def1959..8ab158d8c5 100644 --- a/runtime-testsuite/resources/org/antlr/v4/test/runtime/templates/Go.test.stg +++ b/runtime-testsuite/resources/org/antlr/v4/test/runtime/templates/Go.test.stg @@ -103,9 +103,7 @@ PositionAdjustingLexerDef() ::= "" PositionAdjustingLexer() ::= << func (p *PositionAdjustingLexer) NextToken() antlr.Token { if _, ok := p.Interpreter.(*PositionAdjustingLexerATNSimulator); !ok { - lexerDeserializer := antlr.NewATNDeserializer(nil) - lexerAtn := lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn) - p.Interpreter = NewPositionAdjustingLexerATNSimulator(p, lexerAtn, p.Interpreter.DecisionToDFA(), p.Interpreter.SharedContextCache()) + p.Interpreter = NewPositionAdjustingLexerATNSimulator(p, p.Interpreter.ATN(), p.Interpreter.DecisionToDFA(), p.Interpreter.SharedContextCache()) p.Virt = p } diff --git a/runtime/Go/antlr/atn.go b/runtime/Go/antlr/atn.go index 1592212e14..a4e2079e65 100644 --- a/runtime/Go/antlr/atn.go +++ b/runtime/Go/antlr/atn.go @@ -4,6 +4,8 @@ package antlr +import "sync" + var ATNInvalidAltNumber int type ATN struct { @@ -37,6 +39,10 @@ type ATN struct { ruleToTokenType []int states []ATNState + + mu sync.Mutex + stateMu sync.RWMutex + edgeMu sync.RWMutex } func NewATN(grammarType int, maxTokenType int) *ATN { @@ -59,14 +65,15 @@ func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet { // in s and staying in same rule. Token.EPSILON is in set if we reach end of // rule. func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet { - if s.GetNextTokenWithinRule() != nil { - return s.GetNextTokenWithinRule() + a.mu.Lock() + defer a.mu.Unlock() + iset := s.GetNextTokenWithinRule() + if iset == nil { + iset = a.NextTokensInContext(s, nil) + iset.readOnly = true + s.SetNextTokenWithinRule(iset) } - - s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil)) - s.GetNextTokenWithinRule().readOnly = true - - return s.GetNextTokenWithinRule() + return iset } func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet { diff --git a/runtime/Go/antlr/atn_state.go b/runtime/Go/antlr/atn_state.go index 563d5db38d..3835bb2e93 100644 --- a/runtime/Go/antlr/atn_state.go +++ b/runtime/Go/antlr/atn_state.go @@ -243,6 +243,8 @@ func NewBasicBlockStartState() *BasicBlockStartState { return &BasicBlockStartState{BaseBlockStartState: b} } +var _ BlockStartState = &BasicBlockStartState{} + // BlockEndState is a terminal node of a simple (a|b|c) block. type BlockEndState struct { *BaseATNState @@ -318,6 +320,8 @@ func NewPlusBlockStartState() *PlusBlockStartState { return &PlusBlockStartState{BaseBlockStartState: b} } +var _ BlockStartState = &PlusBlockStartState{} + // StarBlockStartState is the block that begins a closure loop. type StarBlockStartState struct { *BaseBlockStartState @@ -331,6 +335,8 @@ func NewStarBlockStartState() *StarBlockStartState { return &StarBlockStartState{BaseBlockStartState: b} } +var _ BlockStartState = &StarBlockStartState{} + type StarLoopbackState struct { *BaseATNState } diff --git a/runtime/Go/antlr/dfa.go b/runtime/Go/antlr/dfa.go index 4f14f82360..d55a2a87d5 100644 --- a/runtime/Go/antlr/dfa.go +++ b/runtime/Go/antlr/dfa.go @@ -6,9 +6,6 @@ package antlr import ( "sort" - "sync" - "sync/atomic" - "unsafe" ) type DFA struct { @@ -19,23 +16,28 @@ type DFA struct { // states is all the DFA states. Use Map to get the old state back; Set can only // indicate whether it is there. - states map[int]*DFAState - statesMu sync.RWMutex + states map[int]*DFAState s0 *DFAState // precedenceDfa is the backing field for isPrecedenceDfa and setPrecedenceDfa. // True if the DFA is for a precedence decision and false otherwise. - precedenceDfa bool - precedenceDfaMu sync.RWMutex + precedenceDfa bool } func NewDFA(atnStartState DecisionState, decision int) *DFA { - return &DFA{ + dfa := &DFA{ atnStartState: atnStartState, decision: decision, states: make(map[int]*DFAState), } + if s, ok := atnStartState.(*StarLoopEntryState); ok && s.precedenceRuleDecision { + dfa.precedenceDfa = true + dfa.s0 = NewDFAState(-1, NewBaseATNConfigSet(false)) + dfa.s0.isAcceptState = false + dfa.s0.requiresFullContext = false + } + return dfa } // getPrecedenceStartState gets the start state for the current precedence and @@ -80,8 +82,6 @@ func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) { } func (d *DFA) getPrecedenceDfa() bool { - d.precedenceDfaMu.RLock() - defer d.precedenceDfaMu.RUnlock() return d.precedenceDfa } @@ -105,42 +105,32 @@ func (d *DFA) setPrecedenceDfa(precedenceDfa bool) { d.setS0(nil) } - d.precedenceDfaMu.Lock() - defer d.precedenceDfaMu.Unlock() d.precedenceDfa = precedenceDfa } } func (d *DFA) getS0() *DFAState { - return (*DFAState)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(&d.s0)))) + return d.s0 } func (d *DFA) setS0(s *DFAState) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(&d.s0)), unsafe.Pointer(s)) + d.s0 = s } func (d *DFA) getState(hash int) (*DFAState, bool) { - d.statesMu.RLock() - defer d.statesMu.RUnlock() s, ok := d.states[hash] return s, ok } func (d *DFA) setStates(states map[int]*DFAState) { - d.statesMu.Lock() - defer d.statesMu.Unlock() d.states = states } func (d *DFA) setState(hash int, state *DFAState) { - d.statesMu.Lock() - defer d.statesMu.Unlock() d.states[hash] = state } func (d *DFA) numStates() int { - d.statesMu.RLock() - defer d.statesMu.RUnlock() return len(d.states) } diff --git a/runtime/Go/antlr/dfa_state.go b/runtime/Go/antlr/dfa_state.go index 3a81706adf..dc8a37b15e 100644 --- a/runtime/Go/antlr/dfa_state.go +++ b/runtime/Go/antlr/dfa_state.go @@ -6,7 +6,6 @@ package antlr import ( "fmt" - "sync" ) // PredPrediction maps a predicate to a predicted alternative. @@ -50,8 +49,7 @@ type DFAState struct { // edges elements point to the target of the symbol. Shift up by 1 so (-1) // Token.EOF maps to the first element. - edges []*DFAState - edgesMu sync.RWMutex + edges []*DFAState isAcceptState bool @@ -109,32 +107,22 @@ func (d *DFAState) GetAltSet() Set { } func (d *DFAState) getEdges() []*DFAState { - d.edgesMu.RLock() - defer d.edgesMu.RUnlock() return d.edges } func (d *DFAState) numEdges() int { - d.edgesMu.RLock() - defer d.edgesMu.RUnlock() return len(d.edges) } func (d *DFAState) getIthEdge(i int) *DFAState { - d.edgesMu.RLock() - defer d.edgesMu.RUnlock() return d.edges[i] } func (d *DFAState) setEdges(newEdges []*DFAState) { - d.edgesMu.Lock() - defer d.edgesMu.Unlock() d.edges = newEdges } func (d *DFAState) setIthEdge(i int, edge *DFAState) { - d.edgesMu.Lock() - defer d.edgesMu.Unlock() d.edges[i] = edge } diff --git a/runtime/Go/antlr/lexer_atn_simulator.go b/runtime/Go/antlr/lexer_atn_simulator.go index 730bea47aa..dc05153ea4 100644 --- a/runtime/Go/antlr/lexer_atn_simulator.go +++ b/runtime/Go/antlr/lexer_atn_simulator.go @@ -91,11 +91,16 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int { dfa := l.decisionToDFA[mode] - if dfa.getS0() == nil { + var s0 *DFAState + l.atn.stateMu.RLock() + s0 = dfa.getS0() + l.atn.stateMu.RUnlock() + + if s0 == nil { return l.MatchATN(input) } - return l.execATN(input, dfa.getS0()) + return l.execATN(input, s0) } func (l *LexerATNSimulator) reset() { @@ -117,11 +122,7 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int { suppressEdge := s0Closure.hasSemanticContext s0Closure.hasSemanticContext = false - next := l.addDFAState(s0Closure) - - if !suppressEdge { - l.decisionToDFA[l.mode].setS0(next) - } + next := l.addDFAState(s0Closure, suppressEdge) predict := l.execATN(input, next) @@ -203,10 +204,15 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int { // {@code t}, or {@code nil} if the target state for l edge is not // already cached func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState { - if s.getEdges() == nil || t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge { + if t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge { return nil } + l.atn.edgeMu.RLock() + defer l.atn.edgeMu.RUnlock() + if s.getEdges() == nil { + return nil + } target := s.getIthEdge(t - LexerATNSimulatorMinDFAEdge) if LexerATNSimulatorDebug && target != nil { fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber)) @@ -537,7 +543,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg suppressEdge := cfgs.HasSemanticContext() cfgs.SetHasSemanticContext(false) - to = l.addDFAState(cfgs) + to = l.addDFAState(cfgs, true) if suppressEdge { return to @@ -551,6 +557,8 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg if LexerATNSimulatorDebug { fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk)) } + l.atn.edgeMu.Lock() + defer l.atn.edgeMu.Unlock() if from.getEdges() == nil { // make room for tokens 1..n and -1 masquerading as index 0 from.setEdges(make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1)) @@ -564,7 +572,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg // configurations already. This method also detects the first // configuration containing an ATN rule stop state. Later, when // traversing the DFA, we will know which rule to accept. -func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState { +func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet, suppressEdge bool) *DFAState { proposed := NewDFAState(-1, configs) var firstConfigWithRuleStopState ATNConfig @@ -585,16 +593,22 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState { } hash := proposed.hash() dfa := l.decisionToDFA[l.mode] + + l.atn.stateMu.Lock() + defer l.atn.stateMu.Unlock() existing, ok := dfa.getState(hash) if ok { - return existing - } - newState := proposed - newState.stateNumber = dfa.numStates() - configs.SetReadOnly(true) - newState.configs = configs - dfa.setState(hash, newState) - return newState + proposed = existing + } else { + proposed.stateNumber = dfa.numStates() + configs.SetReadOnly(true) + proposed.configs = configs + dfa.setState(hash, proposed) + } + if !suppressEdge { + dfa.setS0(proposed) + } + return proposed } func (l *LexerATNSimulator) getDFA(mode int) *DFA { diff --git a/runtime/Go/antlr/parser_atn_simulator.go b/runtime/Go/antlr/parser_atn_simulator.go index d7a3665bec..9a41d82845 100644 --- a/runtime/Go/antlr/parser_atn_simulator.go +++ b/runtime/Go/antlr/parser_atn_simulator.go @@ -96,14 +96,18 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou // Now we are certain to have a specific decision's DFA // But, do we still need an initial state? var s0 *DFAState + p.atn.stateMu.RLock() if dfa.getPrecedenceDfa() { + p.atn.edgeMu.RLock() // the start state for a precedence DFA depends on the current // parser precedence, and is provided by a DFA method. s0 = dfa.getPrecedenceStartState(p.parser.GetPrecedence()) + p.atn.edgeMu.RUnlock() } else { // the start state for a "regular" DFA is just s0 s0 = dfa.getS0() } + p.atn.stateMu.RUnlock() if s0 == nil { if outerContext == nil { @@ -114,21 +118,10 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou " exec LA(1)==" + p.getLookaheadName(input) + ", outerContext=" + outerContext.String(p.parser.GetRuleNames(), nil)) } - // If p is not a precedence DFA, we check the ATN start state - // to determine if p ATN start state is the decision for the - // closure block that determines whether a precedence rule - // should continue or complete. - - t2 := dfa.atnStartState - t, ok := t2.(*StarLoopEntryState) - if !dfa.getPrecedenceDfa() && ok { - if t.precedenceRuleDecision { - dfa.setPrecedenceDfa(true) - } - } fullCtx := false s0Closure := p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx) + p.atn.stateMu.Lock() if dfa.getPrecedenceDfa() { // If p is a precedence DFA, we use applyPrecedenceFilter // to convert the computed start state to a precedence start @@ -139,12 +132,16 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou dfa.s0.configs = s0Closure s0Closure = p.applyPrecedenceFilter(s0Closure) s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure)) + p.atn.edgeMu.Lock() dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0) + p.atn.edgeMu.Unlock() } else { s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure)) dfa.setS0(s0) } + p.atn.stateMu.Unlock() } + alt := p.execATN(dfa, s0, input, index, outerContext) if ParserATNSimulatorDebug { fmt.Println("DFA after predictATN: " + dfa.String(p.parser.GetLiteralNames(), nil)) @@ -295,11 +292,16 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, // already cached func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int) *DFAState { - edges := previousD.getEdges() - if edges == nil || t+1 < 0 || t+1 >= len(edges) { + if t+1 < 0 { return nil } + p.atn.edgeMu.RLock() + defer p.atn.edgeMu.RUnlock() + edges := previousD.getEdges() + if edges == nil || t+1 >= len(edges) { + return nil + } return previousD.getIthEdge(t + 1) } @@ -1446,14 +1448,18 @@ func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFA if to == nil { return nil } + p.atn.stateMu.Lock() to = p.addDFAState(dfa, to) // used existing if possible not incoming + p.atn.stateMu.Unlock() if from == nil || t < -1 || t > p.atn.maxTokenType { return to } + p.atn.edgeMu.Lock() if from.getEdges() == nil { from.setEdges(make([]*DFAState, p.atn.maxTokenType+1+1)) } from.setIthEdge(t+1, to) // connect + p.atn.edgeMu.Unlock() if ParserATNSimulatorDebug { var names []string diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg index a700b8f3af..f95fc8e34f 100644 --- a/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg +++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg @@ -14,6 +14,7 @@ package parser // import ( "fmt" "strconv" + "sync" "github.com/antlr/antlr4/runtime/Go/antlr" ) @@ -26,6 +27,7 @@ import ( // Suppress unused import errors var _ = fmt.Printf var _ = strconv.Itoa +var _ = sync.Once{} @@ -143,61 +145,69 @@ func (v *BaseVisitor) Visit(ctx *> Parser(parser, funcs, atn, sempredFuncs, superClass) ::= << - -var parserATN = - -var parserATN []uint16 - +type struct { + +} - -var literalNames = []string{ - , +var ParserStaticData struct { + once sync.Once + serializedATN []uint16 + literalNames []string + symbolicNames []string + ruleNames []string + predictionContextCache *antlr.PredictionContextCache + atn *antlr.ATN + decisionToDFA []*antlr.DFA } - -var literalNames []string - +func ParserInit() { + staticData := &ParserStaticData + + staticData.literalNames = []string{ + , + } + -var symbolicNames = []string{ - , -} - -var symbolicNames []string + staticData.symbolicNames = []string{ + , + } - - -var ruleNames = []string{ - "}; separator=", ", wrap>, + staticData.ruleNames = []string{ + "}; separator=", ", wrap>, + } + + staticData.predictionContextCache = antlr.NewPredictionContextCache() + staticData.serializedATN = + deserializer := antlr.NewATNDeserializer(nil) + staticData.atn = deserializer.DeserializeFromUInt16(staticData.serializedATN) + atn := staticData.atn + staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) + decisionToDFA := staticData.decisionToDFA + for index, state := range atn.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(state, index) + } } - -var ruleNames []string - -type struct { - +// Init initializes any static state used to implement . By default the +// static state used to implement the parser is lazily initialized during the first call to +// New(). You can call this function if you wish to initialize the static state ahead +// of time. +func Init() { + staticData := &ParserStaticData + staticData.once.Do(ParserInit) } // New produces a new parser instance for the optional input antlr.TokenStream. -// -// The * instance produced may be reused by calling the SetInputStream method. -// The initial parser configuration is expensive to construct, and the object is not thread-safe; -// however, if used within a Golang sync.Pool, the construction cost amortizes well and the -// objects can be used in a thread-safe manner. func New(input antlr.TokenStream) * { + Init() this := new() - deserializer := antlr.NewATNDeserializer(nil) - deserializedATN := deserializer.DeserializeFromUInt16(parserATN) - decisionToDFA := make([]*antlr.DFA, len(deserializedATN.DecisionToState)) - for index, ds := range deserializedATN.DecisionToState { - decisionToDFA[index] = antlr.NewDFA(ds, index) - } this.BaseParser = antlr.NewBaseParser(input) - - this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache()) - this.RuleNames = ruleNames - this.LiteralNames = literalNames - this.SymbolicNames = symbolicNames + staticData := &ParserStaticData + this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) + this.RuleNames = staticData.ruleNames + this.LiteralNames = staticData.literalNames + this.SymbolicNames = staticData.symbolicNames this.GrammarFileName = "" return this @@ -1382,6 +1392,7 @@ package parser import ( "fmt" + "sync" "unicode" "github.com/antlr/antlr4/runtime/Go/antlr" @@ -1393,6 +1404,7 @@ import ( // Suppress unused import error var _ = fmt.Printf +var _ = sync.Once{} var _ = unicode.IsLetter @@ -1403,76 +1415,82 @@ var _ = unicode.IsLetter >> Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= << - -var serializedLexerAtn = - -var serializedLexerAtn []uint16 - - - -var lexerChannelNames = []string{ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN", "}; separator=", ", wrap>, +type struct { + *antlr.BaseLexer + channelNames []string + modeNames []string + // TODO: EOF string } -var lexerModeNames = []string{ - "}; separator=", ", wrap>, +var LexerStaticData struct { + once sync.Once + serializedATN []uint16 + channelNames []string + modeNames []string + literalNames []string + symbolicNames []string + ruleNames []string + predictionContextCache *antlr.PredictionContextCache + atn *antlr.ATN + decisionToDFA []*antlr.DFA } +func LexerInit() { + staticData := &LexerStaticData + staticData.channelNames = []string{ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN", "}; separator=", ", wrap>, + } + staticData.modeNames = []string{ + "}; separator=", ", wrap>, + } -var lexerLiteralNames = []string{ - , -} - -var lexerLiteralNames []string + staticData.literalNames = []string{ + , + } - - -var lexerSymbolicNames = []string{ - , -} - -var lexerSymbolicNames []string + staticData.symbolicNames = []string{ + , + } - - -var lexerRuleNames = []string{ - "}; separator=", ", wrap>, + staticData.ruleNames = []string{ + "}; separator=", ", wrap>, + } + + staticData.predictionContextCache = antlr.NewPredictionContextCache() + staticData.serializedATN = + deserializer := antlr.NewATNDeserializer(nil) + staticData.atn = deserializer.DeserializeFromUInt16(staticData.serializedATN) + atn := staticData.atn + staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) + decisionToDFA := staticData.decisionToDFA + for index, state := range atn.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(state, index) + } } - -var lexerRuleNames []string - -type struct { - *antlr.BaseLexer - channelNames []string - modeNames []string - // TODO: EOF string +// Init initializes any static state used to implement . By default the +// static state used to implement the lexer is lazily initialized during the first call to +// New(). You can call this function if you wish to initialize the static state ahead +// of time. +func Init() { + staticData := &LexerStaticData + staticData.once.Do(LexerInit) } // New produces a new lexer instance for the optional input antlr.CharStream. -// -// The * instance produced may be reused by calling the SetInputStream method. -// The initial lexer configuration is expensive to construct, and the object is not thread-safe; -// however, if used within a Golang sync.Pool, the construction cost amortizes well and the -// objects can be used in a thread-safe manner. func New(input antlr.CharStream) * { + Init() l := new() - lexerDeserializer := antlr.NewATNDeserializer(nil) - lexerAtn := lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn) - lexerDecisionToDFA := make([]*antlr.DFA, len(lexerAtn.DecisionToState)) - for index, ds := range lexerAtn.DecisionToState { - lexerDecisionToDFA[index] = antlr.NewDFA(ds, index) - } l.BaseLexer = antlr.NewBaseLexer(input) - l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache()) - - l.channelNames = lexerChannelNames - l.modeNames = lexerModeNames - l.RuleNames = lexerRuleNames - l.LiteralNames = lexerLiteralNames - l.SymbolicNames = lexerSymbolicNames + staticData := &LexerStaticData + l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) + l.channelNames = staticData.channelNames + l.modeNames = staticData.modeNames + l.RuleNames = staticData.ruleNames + l.LiteralNames = staticData.literalNames + l.SymbolicNames = staticData.symbolicNames l.GrammarFileName = "" // TODO: l.EOF = antlr.TokenEOF