mirror of
https://github.com/kubenetworks/kubevpn.git
synced 2025-10-13 02:53:52 +08:00
chore: upgrade coredns version (#550)
This commit is contained in:
230
vendor/github.com/expr-lang/expr/parser/lexer/lexer.go
generated
vendored
Normal file
230
vendor/github.com/expr-lang/expr/parser/lexer/lexer.go
generated
vendored
Normal file
@@ -0,0 +1,230 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/expr-lang/expr/file"
|
||||
)
|
||||
|
||||
func Lex(source file.Source) ([]Token, error) {
|
||||
l := &lexer{
|
||||
source: source,
|
||||
tokens: make([]Token, 0),
|
||||
start: 0,
|
||||
end: 0,
|
||||
}
|
||||
l.commit()
|
||||
|
||||
for state := root; state != nil; {
|
||||
state = state(l)
|
||||
}
|
||||
|
||||
if l.err != nil {
|
||||
return nil, l.err.Bind(source)
|
||||
}
|
||||
|
||||
return l.tokens, nil
|
||||
}
|
||||
|
||||
type lexer struct {
|
||||
source file.Source
|
||||
tokens []Token
|
||||
start, end int
|
||||
err *file.Error
|
||||
}
|
||||
|
||||
const eof rune = -1
|
||||
|
||||
func (l *lexer) commit() {
|
||||
l.start = l.end
|
||||
}
|
||||
|
||||
func (l *lexer) next() rune {
|
||||
if l.end >= len(l.source) {
|
||||
l.end++
|
||||
return eof
|
||||
}
|
||||
r := l.source[l.end]
|
||||
l.end++
|
||||
return r
|
||||
}
|
||||
|
||||
func (l *lexer) peek() rune {
|
||||
r := l.next()
|
||||
l.backup()
|
||||
return r
|
||||
}
|
||||
|
||||
func (l *lexer) backup() {
|
||||
l.end--
|
||||
}
|
||||
|
||||
func (l *lexer) emit(t Kind) {
|
||||
l.emitValue(t, l.word())
|
||||
}
|
||||
|
||||
func (l *lexer) emitValue(t Kind, value string) {
|
||||
l.tokens = append(l.tokens, Token{
|
||||
Location: file.Location{From: l.start, To: l.end},
|
||||
Kind: t,
|
||||
Value: value,
|
||||
})
|
||||
l.commit()
|
||||
}
|
||||
|
||||
func (l *lexer) emitEOF() {
|
||||
from := l.end - 2
|
||||
if from < 0 {
|
||||
from = 0
|
||||
}
|
||||
to := l.end - 1
|
||||
if to < 0 {
|
||||
to = 0
|
||||
}
|
||||
l.tokens = append(l.tokens, Token{
|
||||
Location: file.Location{From: from, To: to},
|
||||
Kind: EOF,
|
||||
})
|
||||
l.commit()
|
||||
}
|
||||
|
||||
func (l *lexer) skip() {
|
||||
l.commit()
|
||||
}
|
||||
|
||||
func (l *lexer) word() string {
|
||||
// TODO: boundary check is NOT needed here, but for some reason CI fuzz tests are failing.
|
||||
if l.start > len(l.source) || l.end > len(l.source) {
|
||||
return "__invalid__"
|
||||
}
|
||||
return string(l.source[l.start:l.end])
|
||||
}
|
||||
|
||||
func (l *lexer) accept(valid string) bool {
|
||||
if strings.ContainsRune(valid, l.next()) {
|
||||
return true
|
||||
}
|
||||
l.backup()
|
||||
return false
|
||||
}
|
||||
|
||||
func (l *lexer) acceptRun(valid string) {
|
||||
for strings.ContainsRune(valid, l.next()) {
|
||||
}
|
||||
l.backup()
|
||||
}
|
||||
|
||||
func (l *lexer) skipSpaces() {
|
||||
r := l.peek()
|
||||
for ; r == ' '; r = l.peek() {
|
||||
l.next()
|
||||
}
|
||||
l.skip()
|
||||
}
|
||||
|
||||
func (l *lexer) acceptWord(word string) bool {
|
||||
pos := l.end
|
||||
|
||||
l.skipSpaces()
|
||||
|
||||
for _, ch := range word {
|
||||
if l.next() != ch {
|
||||
l.end = pos
|
||||
return false
|
||||
}
|
||||
}
|
||||
if r := l.peek(); r != ' ' && r != eof {
|
||||
l.end = pos
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (l *lexer) error(format string, args ...any) stateFn {
|
||||
if l.err == nil { // show first error
|
||||
l.err = &file.Error{
|
||||
Location: file.Location{
|
||||
From: l.end - 1,
|
||||
To: l.end,
|
||||
},
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func digitVal(ch rune) int {
|
||||
switch {
|
||||
case '0' <= ch && ch <= '9':
|
||||
return int(ch - '0')
|
||||
case 'a' <= lower(ch) && lower(ch) <= 'f':
|
||||
return int(lower(ch) - 'a' + 10)
|
||||
}
|
||||
return 16 // larger than any legal digit val
|
||||
}
|
||||
|
||||
func lower(ch rune) rune { return ('a' - 'A') | ch } // returns lower-case ch iff ch is ASCII letter
|
||||
|
||||
func (l *lexer) scanDigits(ch rune, base, n int) rune {
|
||||
for n > 0 && digitVal(ch) < base {
|
||||
ch = l.next()
|
||||
n--
|
||||
}
|
||||
if n > 0 {
|
||||
l.error("invalid char escape")
|
||||
}
|
||||
return ch
|
||||
}
|
||||
|
||||
func (l *lexer) scanEscape(quote rune) rune {
|
||||
ch := l.next() // read character after '/'
|
||||
switch ch {
|
||||
case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', quote:
|
||||
// nothing to do
|
||||
ch = l.next()
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7':
|
||||
ch = l.scanDigits(ch, 8, 3)
|
||||
case 'x':
|
||||
ch = l.scanDigits(l.next(), 16, 2)
|
||||
case 'u':
|
||||
ch = l.scanDigits(l.next(), 16, 4)
|
||||
case 'U':
|
||||
ch = l.scanDigits(l.next(), 16, 8)
|
||||
default:
|
||||
l.error("invalid char escape")
|
||||
}
|
||||
return ch
|
||||
}
|
||||
|
||||
func (l *lexer) scanString(quote rune) (n int) {
|
||||
ch := l.next() // read character after quote
|
||||
for ch != quote {
|
||||
if ch == '\n' || ch == eof {
|
||||
l.error("literal not terminated")
|
||||
return
|
||||
}
|
||||
if ch == '\\' {
|
||||
ch = l.scanEscape(quote)
|
||||
} else {
|
||||
ch = l.next()
|
||||
}
|
||||
n++
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (l *lexer) scanRawString(quote rune) (n int) {
|
||||
ch := l.next() // read character after back tick
|
||||
for ch != quote {
|
||||
if ch == eof {
|
||||
l.error("literal not terminated")
|
||||
return
|
||||
}
|
||||
ch = l.next()
|
||||
n++
|
||||
}
|
||||
l.emitValue(String, string(l.source[l.start+1:l.end-1]))
|
||||
return
|
||||
}
|
226
vendor/github.com/expr-lang/expr/parser/lexer/state.go
generated
vendored
Normal file
226
vendor/github.com/expr-lang/expr/parser/lexer/state.go
generated
vendored
Normal file
@@ -0,0 +1,226 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/expr-lang/expr/parser/utils"
|
||||
)
|
||||
|
||||
type stateFn func(*lexer) stateFn
|
||||
|
||||
func root(l *lexer) stateFn {
|
||||
switch r := l.next(); {
|
||||
case r == eof:
|
||||
l.emitEOF()
|
||||
return nil
|
||||
case utils.IsSpace(r):
|
||||
l.skip()
|
||||
return root
|
||||
case r == '\'' || r == '"':
|
||||
l.scanString(r)
|
||||
str, err := unescape(l.word())
|
||||
if err != nil {
|
||||
l.error("%v", err)
|
||||
}
|
||||
l.emitValue(String, str)
|
||||
case r == '`':
|
||||
l.scanRawString(r)
|
||||
case '0' <= r && r <= '9':
|
||||
l.backup()
|
||||
return number
|
||||
case r == '?':
|
||||
return questionMark
|
||||
case r == '/':
|
||||
return slash
|
||||
case r == '#':
|
||||
return pointer
|
||||
case r == '|':
|
||||
l.accept("|")
|
||||
l.emit(Operator)
|
||||
case r == ':':
|
||||
l.accept(":")
|
||||
l.emit(Operator)
|
||||
case strings.ContainsRune("([{", r):
|
||||
l.emit(Bracket)
|
||||
case strings.ContainsRune(")]}", r):
|
||||
l.emit(Bracket)
|
||||
case strings.ContainsRune(",;%+-^", r): // single rune operator
|
||||
l.emit(Operator)
|
||||
case strings.ContainsRune("&!=*<>", r): // possible double rune operator
|
||||
l.accept("&=*")
|
||||
l.emit(Operator)
|
||||
case r == '.':
|
||||
l.backup()
|
||||
return dot
|
||||
case utils.IsAlphaNumeric(r):
|
||||
l.backup()
|
||||
return identifier
|
||||
default:
|
||||
return l.error("unrecognized character: %#U", r)
|
||||
}
|
||||
return root
|
||||
}
|
||||
|
||||
func number(l *lexer) stateFn {
|
||||
if !l.scanNumber() {
|
||||
return l.error("bad number syntax: %q", l.word())
|
||||
}
|
||||
l.emit(Number)
|
||||
return root
|
||||
}
|
||||
|
||||
func (l *lexer) scanNumber() bool {
|
||||
digits := "0123456789_"
|
||||
// Is it hex?
|
||||
if l.accept("0") {
|
||||
// Note: Leading 0 does not mean octal in floats.
|
||||
if l.accept("xX") {
|
||||
digits = "0123456789abcdefABCDEF_"
|
||||
} else if l.accept("oO") {
|
||||
digits = "01234567_"
|
||||
} else if l.accept("bB") {
|
||||
digits = "01_"
|
||||
}
|
||||
}
|
||||
l.acceptRun(digits)
|
||||
end := l.end
|
||||
if l.accept(".") {
|
||||
// Lookup for .. operator: if after dot there is another dot (1..2), it maybe a range operator.
|
||||
if l.peek() == '.' {
|
||||
// We can't backup() here, as it would require two backups,
|
||||
// and backup() func supports only one for now. So, save and
|
||||
// restore it here.
|
||||
l.end = end
|
||||
return true
|
||||
}
|
||||
l.acceptRun(digits)
|
||||
}
|
||||
if l.accept("eE") {
|
||||
l.accept("+-")
|
||||
l.acceptRun(digits)
|
||||
}
|
||||
// Next thing mustn't be alphanumeric.
|
||||
if utils.IsAlphaNumeric(l.peek()) {
|
||||
l.next()
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func dot(l *lexer) stateFn {
|
||||
l.next()
|
||||
if l.accept("0123456789") {
|
||||
l.backup()
|
||||
return number
|
||||
}
|
||||
l.accept(".")
|
||||
l.emit(Operator)
|
||||
return root
|
||||
}
|
||||
|
||||
func identifier(l *lexer) stateFn {
|
||||
loop:
|
||||
for {
|
||||
switch r := l.next(); {
|
||||
case utils.IsAlphaNumeric(r):
|
||||
// absorb
|
||||
default:
|
||||
l.backup()
|
||||
switch l.word() {
|
||||
case "not":
|
||||
return not
|
||||
case "in", "or", "and", "matches", "contains", "startsWith", "endsWith", "let", "if", "else":
|
||||
l.emit(Operator)
|
||||
default:
|
||||
l.emit(Identifier)
|
||||
}
|
||||
break loop
|
||||
}
|
||||
}
|
||||
return root
|
||||
}
|
||||
|
||||
func not(l *lexer) stateFn {
|
||||
l.emit(Operator)
|
||||
|
||||
l.skipSpaces()
|
||||
|
||||
end := l.end
|
||||
|
||||
// Get the next word.
|
||||
for {
|
||||
r := l.next()
|
||||
if utils.IsAlphaNumeric(r) {
|
||||
// absorb
|
||||
} else {
|
||||
l.backup()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
switch l.word() {
|
||||
case "in", "matches", "contains", "startsWith", "endsWith":
|
||||
l.emit(Operator)
|
||||
default:
|
||||
l.end = end
|
||||
}
|
||||
return root
|
||||
}
|
||||
|
||||
func questionMark(l *lexer) stateFn {
|
||||
l.accept(".?")
|
||||
l.emit(Operator)
|
||||
return root
|
||||
}
|
||||
|
||||
func slash(l *lexer) stateFn {
|
||||
if l.accept("/") {
|
||||
return singleLineComment
|
||||
}
|
||||
if l.accept("*") {
|
||||
return multiLineComment
|
||||
}
|
||||
l.emit(Operator)
|
||||
return root
|
||||
}
|
||||
|
||||
func singleLineComment(l *lexer) stateFn {
|
||||
for {
|
||||
r := l.next()
|
||||
if r == eof || r == '\n' {
|
||||
break
|
||||
}
|
||||
}
|
||||
l.skip()
|
||||
return root
|
||||
}
|
||||
|
||||
func multiLineComment(l *lexer) stateFn {
|
||||
for {
|
||||
r := l.next()
|
||||
if r == eof {
|
||||
return l.error("unclosed comment")
|
||||
}
|
||||
if r == '*' && l.accept("/") {
|
||||
break
|
||||
}
|
||||
}
|
||||
l.skip()
|
||||
return root
|
||||
}
|
||||
|
||||
func pointer(l *lexer) stateFn {
|
||||
l.accept("#")
|
||||
l.emit(Operator)
|
||||
for {
|
||||
switch r := l.next(); {
|
||||
case utils.IsAlphaNumeric(r): // absorb
|
||||
default:
|
||||
l.backup()
|
||||
if l.word() != "" {
|
||||
l.emit(Identifier)
|
||||
}
|
||||
return root
|
||||
}
|
||||
}
|
||||
}
|
47
vendor/github.com/expr-lang/expr/parser/lexer/token.go
generated
vendored
Normal file
47
vendor/github.com/expr-lang/expr/parser/lexer/token.go
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/expr-lang/expr/file"
|
||||
)
|
||||
|
||||
type Kind string
|
||||
|
||||
const (
|
||||
Identifier Kind = "Identifier"
|
||||
Number Kind = "Number"
|
||||
String Kind = "String"
|
||||
Operator Kind = "Operator"
|
||||
Bracket Kind = "Bracket"
|
||||
EOF Kind = "EOF"
|
||||
)
|
||||
|
||||
type Token struct {
|
||||
file.Location
|
||||
Kind Kind
|
||||
Value string
|
||||
}
|
||||
|
||||
func (t Token) String() string {
|
||||
if t.Value == "" {
|
||||
return string(t.Kind)
|
||||
}
|
||||
return fmt.Sprintf("%s(%#v)", t.Kind, t.Value)
|
||||
}
|
||||
|
||||
func (t Token) Is(kind Kind, values ...string) bool {
|
||||
if len(values) == 0 {
|
||||
return kind == t.Kind
|
||||
}
|
||||
|
||||
for _, v := range values {
|
||||
if v == t.Value {
|
||||
goto found
|
||||
}
|
||||
}
|
||||
return false
|
||||
|
||||
found:
|
||||
return kind == t.Kind
|
||||
}
|
186
vendor/github.com/expr-lang/expr/parser/lexer/utils.go
generated
vendored
Normal file
186
vendor/github.com/expr-lang/expr/parser/lexer/utils.go
generated
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var (
|
||||
newlineNormalizer = strings.NewReplacer("\r\n", "\n", "\r", "\n")
|
||||
)
|
||||
|
||||
// Unescape takes a quoted string, unquotes, and unescapes it.
|
||||
func unescape(value string) (string, error) {
|
||||
// All strings normalize newlines to the \n representation.
|
||||
value = newlineNormalizer.Replace(value)
|
||||
n := len(value)
|
||||
|
||||
// Nothing to unescape / decode.
|
||||
if n < 2 {
|
||||
return value, fmt.Errorf("unable to unescape string")
|
||||
}
|
||||
|
||||
// Quoted string of some form, must have same first and last char.
|
||||
if value[0] != value[n-1] || (value[0] != '"' && value[0] != '\'') {
|
||||
return value, fmt.Errorf("unable to unescape string")
|
||||
}
|
||||
|
||||
value = value[1 : n-1]
|
||||
|
||||
// The string contains escape characters.
|
||||
// The following logic is adapted from `strconv/quote.go`
|
||||
var runeTmp [utf8.UTFMax]byte
|
||||
size := 3 * uint64(n) / 2
|
||||
if size >= math.MaxInt {
|
||||
return "", fmt.Errorf("too large string")
|
||||
}
|
||||
buf := make([]byte, 0, size)
|
||||
for len(value) > 0 {
|
||||
c, multibyte, rest, err := unescapeChar(value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
value = rest
|
||||
if c < utf8.RuneSelf || !multibyte {
|
||||
buf = append(buf, byte(c))
|
||||
} else {
|
||||
n := utf8.EncodeRune(runeTmp[:], c)
|
||||
buf = append(buf, runeTmp[:n]...)
|
||||
}
|
||||
}
|
||||
return string(buf), nil
|
||||
}
|
||||
|
||||
// unescapeChar takes a string input and returns the following info:
|
||||
//
|
||||
// value - the escaped unicode rune at the front of the string.
|
||||
// multibyte - whether the rune value might require multiple bytes to represent.
|
||||
// tail - the remainder of the input string.
|
||||
// err - error value, if the character could not be unescaped.
|
||||
//
|
||||
// When multibyte is true the return value may still fit within a single byte,
|
||||
// but a multibyte conversion is attempted which is more expensive than when the
|
||||
// value is known to fit within one byte.
|
||||
func unescapeChar(s string) (value rune, multibyte bool, tail string, err error) {
|
||||
// 1. Character is not an escape sequence.
|
||||
switch c := s[0]; {
|
||||
case c >= utf8.RuneSelf:
|
||||
r, size := utf8.DecodeRuneInString(s)
|
||||
return r, true, s[size:], nil
|
||||
case c != '\\':
|
||||
return rune(s[0]), false, s[1:], nil
|
||||
}
|
||||
|
||||
// 2. Last character is the start of an escape sequence.
|
||||
if len(s) <= 1 {
|
||||
err = fmt.Errorf("unable to unescape string, found '\\' as last character")
|
||||
return
|
||||
}
|
||||
|
||||
c := s[1]
|
||||
s = s[2:]
|
||||
// 3. Common escape sequences shared with Google SQL
|
||||
switch c {
|
||||
case 'a':
|
||||
value = '\a'
|
||||
case 'b':
|
||||
value = '\b'
|
||||
case 'f':
|
||||
value = '\f'
|
||||
case 'n':
|
||||
value = '\n'
|
||||
case 'r':
|
||||
value = '\r'
|
||||
case 't':
|
||||
value = '\t'
|
||||
case 'v':
|
||||
value = '\v'
|
||||
case '\\':
|
||||
value = '\\'
|
||||
case '\'':
|
||||
value = '\''
|
||||
case '"':
|
||||
value = '"'
|
||||
case '`':
|
||||
value = '`'
|
||||
case '?':
|
||||
value = '?'
|
||||
|
||||
// 4. Unicode escape sequences, reproduced from `strconv/quote.go`
|
||||
case 'x', 'X', 'u', 'U':
|
||||
n := 0
|
||||
switch c {
|
||||
case 'x', 'X':
|
||||
n = 2
|
||||
case 'u':
|
||||
n = 4
|
||||
case 'U':
|
||||
n = 8
|
||||
}
|
||||
var v rune
|
||||
if len(s) < n {
|
||||
err = fmt.Errorf("unable to unescape string")
|
||||
return
|
||||
}
|
||||
for j := 0; j < n; j++ {
|
||||
x, ok := unhex(s[j])
|
||||
if !ok {
|
||||
err = fmt.Errorf("unable to unescape string")
|
||||
return
|
||||
}
|
||||
v = v<<4 | x
|
||||
}
|
||||
s = s[n:]
|
||||
if v > utf8.MaxRune {
|
||||
err = fmt.Errorf("unable to unescape string")
|
||||
return
|
||||
}
|
||||
value = v
|
||||
multibyte = true
|
||||
|
||||
// 5. Octal escape sequences, must be three digits \[0-3][0-7][0-7]
|
||||
case '0', '1', '2', '3':
|
||||
if len(s) < 2 {
|
||||
err = fmt.Errorf("unable to unescape octal sequence in string")
|
||||
return
|
||||
}
|
||||
v := rune(c - '0')
|
||||
for j := 0; j < 2; j++ {
|
||||
x := s[j]
|
||||
if x < '0' || x > '7' {
|
||||
err = fmt.Errorf("unable to unescape octal sequence in string")
|
||||
return
|
||||
}
|
||||
v = v*8 + rune(x-'0')
|
||||
}
|
||||
if v > utf8.MaxRune {
|
||||
err = fmt.Errorf("unable to unescape string")
|
||||
return
|
||||
}
|
||||
value = v
|
||||
s = s[2:]
|
||||
multibyte = true
|
||||
|
||||
// Unknown escape sequence.
|
||||
default:
|
||||
err = fmt.Errorf("unable to unescape string")
|
||||
}
|
||||
|
||||
tail = s
|
||||
return
|
||||
}
|
||||
|
||||
func unhex(b byte) (rune, bool) {
|
||||
c := rune(b)
|
||||
switch {
|
||||
case '0' <= c && c <= '9':
|
||||
return c - '0', true
|
||||
case 'a' <= c && c <= 'f':
|
||||
return c - 'a' + 10, true
|
||||
case 'A' <= c && c <= 'F':
|
||||
return c - 'A' + 10, true
|
||||
}
|
||||
return 0, false
|
||||
}
|
69
vendor/github.com/expr-lang/expr/parser/operator/operator.go
generated
vendored
Normal file
69
vendor/github.com/expr-lang/expr/parser/operator/operator.go
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
package operator
|
||||
|
||||
type Associativity int
|
||||
|
||||
const (
|
||||
Left Associativity = iota + 1
|
||||
Right
|
||||
)
|
||||
|
||||
type Operator struct {
|
||||
Precedence int
|
||||
Associativity Associativity
|
||||
}
|
||||
|
||||
func Less(a, b string) bool {
|
||||
return Binary[a].Precedence < Binary[b].Precedence
|
||||
}
|
||||
|
||||
func IsBoolean(op string) bool {
|
||||
return op == "and" || op == "or" || op == "&&" || op == "||"
|
||||
}
|
||||
|
||||
func AllowedNegateSuffix(op string) bool {
|
||||
switch op {
|
||||
case "contains", "matches", "startsWith", "endsWith", "in":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
var Unary = map[string]Operator{
|
||||
"not": {50, Left},
|
||||
"!": {50, Left},
|
||||
"-": {90, Left},
|
||||
"+": {90, Left},
|
||||
}
|
||||
|
||||
var Binary = map[string]Operator{
|
||||
"|": {0, Left},
|
||||
"or": {10, Left},
|
||||
"||": {10, Left},
|
||||
"and": {15, Left},
|
||||
"&&": {15, Left},
|
||||
"==": {20, Left},
|
||||
"!=": {20, Left},
|
||||
"<": {20, Left},
|
||||
">": {20, Left},
|
||||
">=": {20, Left},
|
||||
"<=": {20, Left},
|
||||
"in": {20, Left},
|
||||
"matches": {20, Left},
|
||||
"contains": {20, Left},
|
||||
"startsWith": {20, Left},
|
||||
"endsWith": {20, Left},
|
||||
"..": {25, Left},
|
||||
"+": {30, Left},
|
||||
"-": {30, Left},
|
||||
"*": {60, Left},
|
||||
"/": {60, Left},
|
||||
"%": {60, Left},
|
||||
"**": {100, Right},
|
||||
"^": {100, Right},
|
||||
"??": {500, Left},
|
||||
}
|
||||
|
||||
func IsComparison(op string) bool {
|
||||
return op == "<" || op == ">" || op == ">=" || op == "<="
|
||||
}
|
891
vendor/github.com/expr-lang/expr/parser/parser.go
generated
vendored
Normal file
891
vendor/github.com/expr-lang/expr/parser/parser.go
generated
vendored
Normal file
@@ -0,0 +1,891 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
. "github.com/expr-lang/expr/ast"
|
||||
"github.com/expr-lang/expr/builtin"
|
||||
"github.com/expr-lang/expr/conf"
|
||||
"github.com/expr-lang/expr/file"
|
||||
. "github.com/expr-lang/expr/parser/lexer"
|
||||
"github.com/expr-lang/expr/parser/operator"
|
||||
"github.com/expr-lang/expr/parser/utils"
|
||||
)
|
||||
|
||||
type arg byte
|
||||
|
||||
const (
|
||||
expr arg = 1 << iota
|
||||
predicate
|
||||
)
|
||||
|
||||
const optional arg = 1 << 7
|
||||
|
||||
var predicates = map[string]struct {
|
||||
args []arg
|
||||
}{
|
||||
"all": {[]arg{expr, predicate}},
|
||||
"none": {[]arg{expr, predicate}},
|
||||
"any": {[]arg{expr, predicate}},
|
||||
"one": {[]arg{expr, predicate}},
|
||||
"filter": {[]arg{expr, predicate}},
|
||||
"map": {[]arg{expr, predicate}},
|
||||
"count": {[]arg{expr, predicate | optional}},
|
||||
"sum": {[]arg{expr, predicate | optional}},
|
||||
"find": {[]arg{expr, predicate}},
|
||||
"findIndex": {[]arg{expr, predicate}},
|
||||
"findLast": {[]arg{expr, predicate}},
|
||||
"findLastIndex": {[]arg{expr, predicate}},
|
||||
"groupBy": {[]arg{expr, predicate}},
|
||||
"sortBy": {[]arg{expr, predicate, expr | optional}},
|
||||
"reduce": {[]arg{expr, predicate, expr | optional}},
|
||||
}
|
||||
|
||||
type parser struct {
|
||||
tokens []Token
|
||||
current Token
|
||||
pos int
|
||||
err *file.Error
|
||||
config *conf.Config
|
||||
depth int // predicate call depth
|
||||
nodeCount uint // tracks number of AST nodes created
|
||||
}
|
||||
|
||||
func (p *parser) checkNodeLimit() error {
|
||||
p.nodeCount++
|
||||
if p.config == nil {
|
||||
if p.nodeCount > conf.DefaultMaxNodes {
|
||||
p.error("compilation failed: expression exceeds maximum allowed nodes")
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if p.config.MaxNodes > 0 && p.nodeCount > p.config.MaxNodes {
|
||||
p.error("compilation failed: expression exceeds maximum allowed nodes")
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) createNode(n Node, loc file.Location) Node {
|
||||
if err := p.checkNodeLimit(); err != nil {
|
||||
return nil
|
||||
}
|
||||
if n == nil || p.err != nil {
|
||||
return nil
|
||||
}
|
||||
n.SetLocation(loc)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) createMemberNode(n *MemberNode, loc file.Location) *MemberNode {
|
||||
if err := p.checkNodeLimit(); err != nil {
|
||||
return nil
|
||||
}
|
||||
if n == nil || p.err != nil {
|
||||
return nil
|
||||
}
|
||||
n.SetLocation(loc)
|
||||
return n
|
||||
}
|
||||
|
||||
type Tree struct {
|
||||
Node Node
|
||||
Source file.Source
|
||||
}
|
||||
|
||||
func Parse(input string) (*Tree, error) {
|
||||
return ParseWithConfig(input, nil)
|
||||
}
|
||||
|
||||
func ParseWithConfig(input string, config *conf.Config) (*Tree, error) {
|
||||
source := file.NewSource(input)
|
||||
|
||||
tokens, err := Lex(source)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := &parser{
|
||||
tokens: tokens,
|
||||
current: tokens[0],
|
||||
config: config,
|
||||
}
|
||||
|
||||
node := p.parseSequenceExpression()
|
||||
|
||||
if !p.current.Is(EOF) {
|
||||
p.error("unexpected token %v", p.current)
|
||||
}
|
||||
|
||||
tree := &Tree{
|
||||
Node: node,
|
||||
Source: source,
|
||||
}
|
||||
|
||||
if p.err != nil {
|
||||
return tree, p.err.Bind(source)
|
||||
}
|
||||
|
||||
return tree, nil
|
||||
}
|
||||
|
||||
func (p *parser) error(format string, args ...any) {
|
||||
p.errorAt(p.current, format, args...)
|
||||
}
|
||||
|
||||
func (p *parser) errorAt(token Token, format string, args ...any) {
|
||||
if p.err == nil { // show first error
|
||||
p.err = &file.Error{
|
||||
Location: token.Location,
|
||||
Message: fmt.Sprintf(format, args...),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) next() {
|
||||
p.pos++
|
||||
if p.pos >= len(p.tokens) {
|
||||
p.error("unexpected end of expression")
|
||||
return
|
||||
}
|
||||
p.current = p.tokens[p.pos]
|
||||
}
|
||||
|
||||
func (p *parser) expect(kind Kind, values ...string) {
|
||||
if p.current.Is(kind, values...) {
|
||||
p.next()
|
||||
return
|
||||
}
|
||||
p.error("unexpected token %v", p.current)
|
||||
}
|
||||
|
||||
// parse functions
|
||||
|
||||
func (p *parser) parseSequenceExpression() Node {
|
||||
nodes := []Node{p.parseExpression(0)}
|
||||
|
||||
for p.current.Is(Operator, ";") && p.err == nil {
|
||||
p.next()
|
||||
// If a trailing semicolon is present, break out.
|
||||
if p.current.Is(EOF) {
|
||||
break
|
||||
}
|
||||
nodes = append(nodes, p.parseExpression(0))
|
||||
}
|
||||
|
||||
if len(nodes) == 1 {
|
||||
return nodes[0]
|
||||
}
|
||||
|
||||
return p.createNode(&SequenceNode{
|
||||
Nodes: nodes,
|
||||
}, nodes[0].Location())
|
||||
}
|
||||
|
||||
func (p *parser) parseExpression(precedence int) Node {
|
||||
if p.err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if precedence == 0 && p.current.Is(Operator, "let") {
|
||||
return p.parseVariableDeclaration()
|
||||
}
|
||||
|
||||
if precedence == 0 && p.current.Is(Operator, "if") {
|
||||
return p.parseConditionalIf()
|
||||
}
|
||||
|
||||
nodeLeft := p.parsePrimary()
|
||||
|
||||
prevOperator := ""
|
||||
opToken := p.current
|
||||
for opToken.Is(Operator) && p.err == nil {
|
||||
negate := opToken.Is(Operator, "not")
|
||||
var notToken Token
|
||||
|
||||
// Handle "not *" operator, like "not in" or "not contains".
|
||||
if negate {
|
||||
currentPos := p.pos
|
||||
p.next()
|
||||
if operator.AllowedNegateSuffix(p.current.Value) {
|
||||
if op, ok := operator.Binary[p.current.Value]; ok && op.Precedence >= precedence {
|
||||
notToken = p.current
|
||||
opToken = p.current
|
||||
} else {
|
||||
p.pos = currentPos
|
||||
p.current = opToken
|
||||
break
|
||||
}
|
||||
} else {
|
||||
p.error("unexpected token %v", p.current)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if op, ok := operator.Binary[opToken.Value]; ok && op.Precedence >= precedence {
|
||||
p.next()
|
||||
|
||||
if opToken.Value == "|" {
|
||||
identToken := p.current
|
||||
p.expect(Identifier)
|
||||
nodeLeft = p.parseCall(identToken, []Node{nodeLeft}, true)
|
||||
goto next
|
||||
}
|
||||
|
||||
if prevOperator == "??" && opToken.Value != "??" && !opToken.Is(Bracket, "(") {
|
||||
p.errorAt(opToken, "Operator (%v) and coalesce expressions (??) cannot be mixed. Wrap either by parentheses.", opToken.Value)
|
||||
break
|
||||
}
|
||||
|
||||
if operator.IsComparison(opToken.Value) {
|
||||
nodeLeft = p.parseComparison(nodeLeft, opToken, op.Precedence)
|
||||
goto next
|
||||
}
|
||||
|
||||
var nodeRight Node
|
||||
if op.Associativity == operator.Left {
|
||||
nodeRight = p.parseExpression(op.Precedence + 1)
|
||||
} else {
|
||||
nodeRight = p.parseExpression(op.Precedence)
|
||||
}
|
||||
|
||||
nodeLeft = p.createNode(&BinaryNode{
|
||||
Operator: opToken.Value,
|
||||
Left: nodeLeft,
|
||||
Right: nodeRight,
|
||||
}, opToken.Location)
|
||||
if nodeLeft == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if negate {
|
||||
nodeLeft = p.createNode(&UnaryNode{
|
||||
Operator: "not",
|
||||
Node: nodeLeft,
|
||||
}, notToken.Location)
|
||||
if nodeLeft == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
goto next
|
||||
}
|
||||
break
|
||||
|
||||
next:
|
||||
prevOperator = opToken.Value
|
||||
opToken = p.current
|
||||
}
|
||||
|
||||
if precedence == 0 {
|
||||
nodeLeft = p.parseConditional(nodeLeft)
|
||||
}
|
||||
|
||||
return nodeLeft
|
||||
}
|
||||
|
||||
func (p *parser) parseVariableDeclaration() Node {
|
||||
p.expect(Operator, "let")
|
||||
variableName := p.current
|
||||
p.expect(Identifier)
|
||||
p.expect(Operator, "=")
|
||||
value := p.parseExpression(0)
|
||||
p.expect(Operator, ";")
|
||||
node := p.parseSequenceExpression()
|
||||
return p.createNode(&VariableDeclaratorNode{
|
||||
Name: variableName.Value,
|
||||
Value: value,
|
||||
Expr: node,
|
||||
}, variableName.Location)
|
||||
}
|
||||
|
||||
func (p *parser) parseConditionalIf() Node {
|
||||
p.next()
|
||||
nodeCondition := p.parseExpression(0)
|
||||
p.expect(Bracket, "{")
|
||||
expr1 := p.parseSequenceExpression()
|
||||
p.expect(Bracket, "}")
|
||||
p.expect(Operator, "else")
|
||||
p.expect(Bracket, "{")
|
||||
expr2 := p.parseSequenceExpression()
|
||||
p.expect(Bracket, "}")
|
||||
|
||||
return &ConditionalNode{
|
||||
Cond: nodeCondition,
|
||||
Exp1: expr1,
|
||||
Exp2: expr2,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (p *parser) parseConditional(node Node) Node {
|
||||
var expr1, expr2 Node
|
||||
for p.current.Is(Operator, "?") && p.err == nil {
|
||||
p.next()
|
||||
|
||||
if !p.current.Is(Operator, ":") {
|
||||
expr1 = p.parseExpression(0)
|
||||
p.expect(Operator, ":")
|
||||
expr2 = p.parseExpression(0)
|
||||
} else {
|
||||
p.next()
|
||||
expr1 = node
|
||||
expr2 = p.parseExpression(0)
|
||||
}
|
||||
|
||||
node = p.createNode(&ConditionalNode{
|
||||
Cond: node,
|
||||
Exp1: expr1,
|
||||
Exp2: expr2,
|
||||
}, p.current.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *parser) parsePrimary() Node {
|
||||
token := p.current
|
||||
|
||||
if token.Is(Operator) {
|
||||
if op, ok := operator.Unary[token.Value]; ok {
|
||||
p.next()
|
||||
expr := p.parseExpression(op.Precedence)
|
||||
node := p.createNode(&UnaryNode{
|
||||
Operator: token.Value,
|
||||
Node: expr,
|
||||
}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
return p.parsePostfixExpression(node)
|
||||
}
|
||||
}
|
||||
|
||||
if token.Is(Bracket, "(") {
|
||||
p.next()
|
||||
expr := p.parseSequenceExpression()
|
||||
p.expect(Bracket, ")") // "an opened parenthesis is not properly closed"
|
||||
return p.parsePostfixExpression(expr)
|
||||
}
|
||||
|
||||
if p.depth > 0 {
|
||||
if token.Is(Operator, "#") || token.Is(Operator, ".") {
|
||||
name := ""
|
||||
if token.Is(Operator, "#") {
|
||||
p.next()
|
||||
if p.current.Is(Identifier) {
|
||||
name = p.current.Value
|
||||
p.next()
|
||||
}
|
||||
}
|
||||
node := p.createNode(&PointerNode{Name: name}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
return p.parsePostfixExpression(node)
|
||||
}
|
||||
}
|
||||
|
||||
if token.Is(Operator, "::") {
|
||||
p.next()
|
||||
token = p.current
|
||||
p.expect(Identifier)
|
||||
return p.parsePostfixExpression(p.parseCall(token, []Node{}, false))
|
||||
}
|
||||
|
||||
return p.parseSecondary()
|
||||
}
|
||||
|
||||
func (p *parser) parseSecondary() Node {
|
||||
var node Node
|
||||
token := p.current
|
||||
|
||||
switch token.Kind {
|
||||
|
||||
case Identifier:
|
||||
p.next()
|
||||
switch token.Value {
|
||||
case "true":
|
||||
node = p.createNode(&BoolNode{Value: true}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
return node
|
||||
case "false":
|
||||
node = p.createNode(&BoolNode{Value: false}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
return node
|
||||
case "nil":
|
||||
node = p.createNode(&NilNode{}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
return node
|
||||
default:
|
||||
if p.current.Is(Bracket, "(") {
|
||||
node = p.parseCall(token, []Node{}, true)
|
||||
} else {
|
||||
node = p.createNode(&IdentifierNode{Value: token.Value}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case Number:
|
||||
p.next()
|
||||
value := strings.Replace(token.Value, "_", "", -1)
|
||||
var node Node
|
||||
valueLower := strings.ToLower(value)
|
||||
switch {
|
||||
case strings.HasPrefix(valueLower, "0x"):
|
||||
number, err := strconv.ParseInt(value, 0, 64)
|
||||
if err != nil {
|
||||
p.error("invalid hex literal: %v", err)
|
||||
}
|
||||
node = p.toIntegerNode(number)
|
||||
case strings.ContainsAny(valueLower, ".e"):
|
||||
number, err := strconv.ParseFloat(value, 64)
|
||||
if err != nil {
|
||||
p.error("invalid float literal: %v", err)
|
||||
}
|
||||
node = p.toFloatNode(number)
|
||||
case strings.HasPrefix(valueLower, "0b"):
|
||||
number, err := strconv.ParseInt(value, 0, 64)
|
||||
if err != nil {
|
||||
p.error("invalid binary literal: %v", err)
|
||||
}
|
||||
node = p.toIntegerNode(number)
|
||||
case strings.HasPrefix(valueLower, "0o"):
|
||||
number, err := strconv.ParseInt(value, 0, 64)
|
||||
if err != nil {
|
||||
p.error("invalid octal literal: %v", err)
|
||||
}
|
||||
node = p.toIntegerNode(number)
|
||||
default:
|
||||
number, err := strconv.ParseInt(value, 10, 64)
|
||||
if err != nil {
|
||||
p.error("invalid integer literal: %v", err)
|
||||
}
|
||||
node = p.toIntegerNode(number)
|
||||
}
|
||||
if node != nil {
|
||||
node.SetLocation(token.Location)
|
||||
}
|
||||
return node
|
||||
case String:
|
||||
p.next()
|
||||
node = p.createNode(&StringNode{Value: token.Value}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
default:
|
||||
if token.Is(Bracket, "[") {
|
||||
node = p.parseArrayExpression(token)
|
||||
} else if token.Is(Bracket, "{") {
|
||||
node = p.parseMapExpression(token)
|
||||
} else {
|
||||
p.error("unexpected token %v", token)
|
||||
}
|
||||
}
|
||||
|
||||
return p.parsePostfixExpression(node)
|
||||
}
|
||||
|
||||
func (p *parser) toIntegerNode(number int64) Node {
|
||||
if number > math.MaxInt {
|
||||
p.error("integer literal is too large")
|
||||
return nil
|
||||
}
|
||||
return p.createNode(&IntegerNode{Value: int(number)}, p.current.Location)
|
||||
}
|
||||
|
||||
func (p *parser) toFloatNode(number float64) Node {
|
||||
if number > math.MaxFloat64 {
|
||||
p.error("float literal is too large")
|
||||
return nil
|
||||
}
|
||||
return p.createNode(&FloatNode{Value: number}, p.current.Location)
|
||||
}
|
||||
|
||||
func (p *parser) parseCall(token Token, arguments []Node, checkOverrides bool) Node {
|
||||
var node Node
|
||||
|
||||
isOverridden := false
|
||||
if p.config != nil {
|
||||
isOverridden = p.config.IsOverridden(token.Value)
|
||||
}
|
||||
isOverridden = isOverridden && checkOverrides
|
||||
|
||||
if b, ok := predicates[token.Value]; ok && !isOverridden {
|
||||
p.expect(Bracket, "(")
|
||||
|
||||
// In case of the pipe operator, the first argument is the left-hand side
|
||||
// of the operator, so we do not parse it as an argument inside brackets.
|
||||
args := b.args[len(arguments):]
|
||||
|
||||
for i, arg := range args {
|
||||
if arg&optional == optional {
|
||||
if p.current.Is(Bracket, ")") {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
if p.current.Is(Bracket, ")") {
|
||||
p.error("expected at least %d arguments", len(args))
|
||||
}
|
||||
}
|
||||
|
||||
if i > 0 {
|
||||
p.expect(Operator, ",")
|
||||
}
|
||||
var node Node
|
||||
switch {
|
||||
case arg&expr == expr:
|
||||
node = p.parseExpression(0)
|
||||
case arg&predicate == predicate:
|
||||
node = p.parsePredicate()
|
||||
}
|
||||
arguments = append(arguments, node)
|
||||
}
|
||||
|
||||
// skip last comma
|
||||
if p.current.Is(Operator, ",") {
|
||||
p.next()
|
||||
}
|
||||
p.expect(Bracket, ")")
|
||||
|
||||
node = p.createNode(&BuiltinNode{
|
||||
Name: token.Value,
|
||||
Arguments: arguments,
|
||||
}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
} else if _, ok := builtin.Index[token.Value]; ok && (p.config == nil || !p.config.Disabled[token.Value]) && !isOverridden {
|
||||
node = p.createNode(&BuiltinNode{
|
||||
Name: token.Value,
|
||||
Arguments: p.parseArguments(arguments),
|
||||
}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
} else {
|
||||
callee := p.createNode(&IdentifierNode{Value: token.Value}, token.Location)
|
||||
if callee == nil {
|
||||
return nil
|
||||
}
|
||||
node = p.createNode(&CallNode{
|
||||
Callee: callee,
|
||||
Arguments: p.parseArguments(arguments),
|
||||
}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *parser) parseArguments(arguments []Node) []Node {
|
||||
// If pipe operator is used, the first argument is the left-hand side
|
||||
// of the operator, so we do not parse it as an argument inside brackets.
|
||||
offset := len(arguments)
|
||||
|
||||
p.expect(Bracket, "(")
|
||||
for !p.current.Is(Bracket, ")") && p.err == nil {
|
||||
if len(arguments) > offset {
|
||||
p.expect(Operator, ",")
|
||||
}
|
||||
if p.current.Is(Bracket, ")") {
|
||||
break
|
||||
}
|
||||
node := p.parseExpression(0)
|
||||
arguments = append(arguments, node)
|
||||
}
|
||||
p.expect(Bracket, ")")
|
||||
|
||||
return arguments
|
||||
}
|
||||
|
||||
func (p *parser) parsePredicate() Node {
|
||||
startToken := p.current
|
||||
withBrackets := false
|
||||
if p.current.Is(Bracket, "{") {
|
||||
p.next()
|
||||
withBrackets = true
|
||||
}
|
||||
|
||||
p.depth++
|
||||
var node Node
|
||||
if withBrackets {
|
||||
node = p.parseSequenceExpression()
|
||||
} else {
|
||||
node = p.parseExpression(0)
|
||||
if p.current.Is(Operator, ";") {
|
||||
p.error("wrap predicate with brackets { and }")
|
||||
}
|
||||
}
|
||||
p.depth--
|
||||
|
||||
if withBrackets {
|
||||
p.expect(Bracket, "}")
|
||||
}
|
||||
predicateNode := p.createNode(&PredicateNode{
|
||||
Node: node,
|
||||
}, startToken.Location)
|
||||
if predicateNode == nil {
|
||||
return nil
|
||||
}
|
||||
return predicateNode
|
||||
}
|
||||
|
||||
func (p *parser) parseArrayExpression(token Token) Node {
|
||||
nodes := make([]Node, 0)
|
||||
|
||||
p.expect(Bracket, "[")
|
||||
for !p.current.Is(Bracket, "]") && p.err == nil {
|
||||
if len(nodes) > 0 {
|
||||
p.expect(Operator, ",")
|
||||
if p.current.Is(Bracket, "]") {
|
||||
goto end
|
||||
}
|
||||
}
|
||||
node := p.parseExpression(0)
|
||||
nodes = append(nodes, node)
|
||||
}
|
||||
end:
|
||||
p.expect(Bracket, "]")
|
||||
|
||||
node := p.createNode(&ArrayNode{Nodes: nodes}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *parser) parseMapExpression(token Token) Node {
|
||||
p.expect(Bracket, "{")
|
||||
|
||||
nodes := make([]Node, 0)
|
||||
for !p.current.Is(Bracket, "}") && p.err == nil {
|
||||
if len(nodes) > 0 {
|
||||
p.expect(Operator, ",")
|
||||
if p.current.Is(Bracket, "}") {
|
||||
goto end
|
||||
}
|
||||
if p.current.Is(Operator, ",") {
|
||||
p.error("unexpected token %v", p.current)
|
||||
}
|
||||
}
|
||||
|
||||
var key Node
|
||||
// Map key can be one of:
|
||||
// * number
|
||||
// * string
|
||||
// * identifier, which is equivalent to a string
|
||||
// * expression, which must be enclosed in parentheses -- (1 + 2)
|
||||
if p.current.Is(Number) || p.current.Is(String) || p.current.Is(Identifier) {
|
||||
key = p.createNode(&StringNode{Value: p.current.Value}, p.current.Location)
|
||||
if key == nil {
|
||||
return nil
|
||||
}
|
||||
p.next()
|
||||
} else if p.current.Is(Bracket, "(") {
|
||||
key = p.parseExpression(0)
|
||||
} else {
|
||||
p.error("a map key must be a quoted string, a number, a identifier, or an expression enclosed in parentheses (unexpected token %v)", p.current)
|
||||
}
|
||||
|
||||
p.expect(Operator, ":")
|
||||
|
||||
node := p.parseExpression(0)
|
||||
pair := p.createNode(&PairNode{Key: key, Value: node}, token.Location)
|
||||
if pair == nil {
|
||||
return nil
|
||||
}
|
||||
nodes = append(nodes, pair)
|
||||
}
|
||||
|
||||
end:
|
||||
p.expect(Bracket, "}")
|
||||
|
||||
node := p.createNode(&MapNode{Pairs: nodes}, token.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *parser) parsePostfixExpression(node Node) Node {
|
||||
postfixToken := p.current
|
||||
for (postfixToken.Is(Operator) || postfixToken.Is(Bracket)) && p.err == nil {
|
||||
optional := postfixToken.Value == "?."
|
||||
parseToken:
|
||||
if postfixToken.Value == "." || postfixToken.Value == "?." {
|
||||
p.next()
|
||||
|
||||
propertyToken := p.current
|
||||
if optional && propertyToken.Is(Bracket, "[") {
|
||||
postfixToken = propertyToken
|
||||
goto parseToken
|
||||
}
|
||||
p.next()
|
||||
|
||||
if propertyToken.Kind != Identifier &&
|
||||
// Operators like "not" and "matches" are valid methods or property names.
|
||||
(propertyToken.Kind != Operator || !utils.IsValidIdentifier(propertyToken.Value)) {
|
||||
p.error("expected name")
|
||||
}
|
||||
|
||||
property := p.createNode(&StringNode{Value: propertyToken.Value}, propertyToken.Location)
|
||||
if property == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
chainNode, isChain := node.(*ChainNode)
|
||||
optional := postfixToken.Value == "?."
|
||||
|
||||
if isChain {
|
||||
node = chainNode.Node
|
||||
}
|
||||
|
||||
memberNode := p.createMemberNode(&MemberNode{
|
||||
Node: node,
|
||||
Property: property,
|
||||
Optional: optional,
|
||||
}, propertyToken.Location)
|
||||
if memberNode == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if p.current.Is(Bracket, "(") {
|
||||
memberNode.Method = true
|
||||
node = p.createNode(&CallNode{
|
||||
Callee: memberNode,
|
||||
Arguments: p.parseArguments([]Node{}),
|
||||
}, propertyToken.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
node = memberNode
|
||||
}
|
||||
|
||||
if isChain || optional {
|
||||
node = p.createNode(&ChainNode{Node: node}, propertyToken.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
} else if postfixToken.Value == "[" {
|
||||
p.next()
|
||||
var from, to Node
|
||||
|
||||
if p.current.Is(Operator, ":") { // slice without from [:1]
|
||||
p.next()
|
||||
|
||||
if !p.current.Is(Bracket, "]") { // slice without from and to [:]
|
||||
to = p.parseExpression(0)
|
||||
}
|
||||
|
||||
node = p.createNode(&SliceNode{
|
||||
Node: node,
|
||||
To: to,
|
||||
}, postfixToken.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
p.expect(Bracket, "]")
|
||||
|
||||
} else {
|
||||
|
||||
from = p.parseExpression(0)
|
||||
|
||||
if p.current.Is(Operator, ":") {
|
||||
p.next()
|
||||
|
||||
if !p.current.Is(Bracket, "]") { // slice without to [1:]
|
||||
to = p.parseExpression(0)
|
||||
}
|
||||
|
||||
node = p.createNode(&SliceNode{
|
||||
Node: node,
|
||||
From: from,
|
||||
To: to,
|
||||
}, postfixToken.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
p.expect(Bracket, "]")
|
||||
|
||||
} else {
|
||||
// Slice operator [:] was not found,
|
||||
// it should be just an index node.
|
||||
node = p.createNode(&MemberNode{
|
||||
Node: node,
|
||||
Property: from,
|
||||
Optional: optional,
|
||||
}, postfixToken.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
if optional {
|
||||
node = p.createNode(&ChainNode{Node: node}, postfixToken.Location)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
p.expect(Bracket, "]")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
postfixToken = p.current
|
||||
}
|
||||
return node
|
||||
}
|
||||
func (p *parser) parseComparison(left Node, token Token, precedence int) Node {
|
||||
var rootNode Node
|
||||
for {
|
||||
comparator := p.parseExpression(precedence + 1)
|
||||
cmpNode := p.createNode(&BinaryNode{
|
||||
Operator: token.Value,
|
||||
Left: left,
|
||||
Right: comparator,
|
||||
}, token.Location)
|
||||
if cmpNode == nil {
|
||||
return nil
|
||||
}
|
||||
if rootNode == nil {
|
||||
rootNode = cmpNode
|
||||
} else {
|
||||
rootNode = p.createNode(&BinaryNode{
|
||||
Operator: "&&",
|
||||
Left: rootNode,
|
||||
Right: cmpNode,
|
||||
}, token.Location)
|
||||
if rootNode == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
left = comparator
|
||||
token = p.current
|
||||
if !(token.Is(Operator) && operator.IsComparison(token.Value) && p.err == nil) {
|
||||
break
|
||||
}
|
||||
p.next()
|
||||
}
|
||||
return rootNode
|
||||
}
|
34
vendor/github.com/expr-lang/expr/parser/utils/utils.go
generated
vendored
Normal file
34
vendor/github.com/expr-lang/expr/parser/utils/utils.go
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func IsValidIdentifier(str string) bool {
|
||||
if len(str) == 0 {
|
||||
return false
|
||||
}
|
||||
h, w := utf8.DecodeRuneInString(str)
|
||||
if !IsAlphabetic(h) {
|
||||
return false
|
||||
}
|
||||
for _, r := range str[w:] {
|
||||
if !IsAlphaNumeric(r) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func IsSpace(r rune) bool {
|
||||
return unicode.IsSpace(r)
|
||||
}
|
||||
|
||||
func IsAlphaNumeric(r rune) bool {
|
||||
return IsAlphabetic(r) || unicode.IsDigit(r)
|
||||
}
|
||||
|
||||
func IsAlphabetic(r rune) bool {
|
||||
return r == '_' || r == '$' || unicode.IsLetter(r)
|
||||
}
|
Reference in New Issue
Block a user