File: peek.go

package info (click to toggle)
golang-github-alecthomas-participle-v2 2.1.4-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 920 kB
  • sloc: javascript: 1,164; sh: 41; makefile: 7
file content (133 lines) | stat: -rw-r--r-- 3,319 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
package lexer

// PeekingLexer supports arbitrary lookahead as well as cloning.
type PeekingLexer struct {
	Checkpoint
	tokens []Token
	elide  map[TokenType]bool
}

// RawCursor index in the token stream.
type RawCursor int

// Checkpoint wraps the mutable state of the PeekingLexer.
//
// Copying and restoring just this state is a bit faster than copying the entire PeekingLexer.
type Checkpoint struct {
	rawCursor  RawCursor // The raw position of the next possibly elided token
	nextCursor RawCursor // The raw position of the next non-elided token
	cursor     int       // Index of the next non-elided token among other non-elided tokens
}

// Upgrade a Lexer to a PeekingLexer with arbitrary lookahead.
//
// "elide" is a slice of token types to elide from processing.
func Upgrade(lex Lexer, elide ...TokenType) (*PeekingLexer, error) {
	r := &PeekingLexer{
		elide: make(map[TokenType]bool, len(elide)),
	}
	for _, rn := range elide {
		r.elide[rn] = true
	}
	for {
		t, err := lex.Next()
		if err != nil {
			return r, err
		}
		r.tokens = append(r.tokens, t)
		if t.EOF() {
			break
		}
	}
	r.advanceToNonElided()
	return r, nil
}

// Range returns the slice of tokens between the two cursor points.
func (p *PeekingLexer) Range(rawStart, rawEnd RawCursor) []Token {
	return p.tokens[rawStart:rawEnd]
}

// Cursor position in tokens, excluding elided tokens.
func (c Checkpoint) Cursor() int {
	return c.cursor
}

// RawCursor position in tokens, including elided tokens.
func (c Checkpoint) RawCursor() RawCursor {
	return c.rawCursor
}

// Next consumes and returns the next token.
func (p *PeekingLexer) Next() *Token {
	t := &p.tokens[p.nextCursor]
	if t.EOF() {
		return t
	}
	p.nextCursor++
	p.rawCursor = p.nextCursor
	p.cursor++
	p.advanceToNonElided()
	return t
}

// Peek ahead at the next non-elided token.
func (p *PeekingLexer) Peek() *Token {
	return &p.tokens[p.nextCursor]
}

// RawPeek peeks ahead at the next raw token.
//
// Unlike Peek, this will include elided tokens.
func (p *PeekingLexer) RawPeek() *Token {
	return &p.tokens[p.rawCursor]
}

// advanceToNonElided advances nextCursor to the closest non-elided token
func (p *PeekingLexer) advanceToNonElided() {
	for ; ; p.nextCursor++ {
		t := &p.tokens[p.nextCursor]
		if t.EOF() || !p.elide[t.Type] {
			return
		}
	}
}

// PeekAny peeks forward over elided and non-elided tokens.
//
// Elided tokens will be returned if they match, otherwise the next
// non-elided token will be returned.
//
// The returned RawCursor position is the location of the returned token.
// Use FastForward to move the internal cursors forward.
func (p *PeekingLexer) PeekAny(match func(Token) bool) (t Token, rawCursor RawCursor) {
	for i := p.rawCursor; ; i++ {
		t = p.tokens[i]
		if t.EOF() || match(t) || !p.elide[t.Type] {
			return t, i
		}
	}
}

// FastForward the internal cursors to this RawCursor position.
func (p *PeekingLexer) FastForward(rawCursor RawCursor) {
	for ; p.rawCursor <= rawCursor; p.rawCursor++ {
		t := &p.tokens[p.rawCursor]
		if t.EOF() {
			break
		}
		if !p.elide[t.Type] {
			p.cursor++
		}
	}
	p.nextCursor = p.rawCursor
	p.advanceToNonElided()
}

func (p *PeekingLexer) MakeCheckpoint() Checkpoint {
	return p.Checkpoint
}

func (p *PeekingLexer) LoadCheckpoint(checkpoint Checkpoint) {
	p.Checkpoint = checkpoint
}