mirror of
https://github.com/datarhei/core.git
synced 2025-09-26 20:11:29 +08:00
Add process id and reference glob pattern matching
For the API endpoint /v3/process two new query parameter are introduced in order to list only processes that match a pattern for the id and the reference: idpattern and refpattern. The pattern is a glob pattern. If patterns for both are given, the results will be intersected. If you use other query parameters such as id or reference, they will be applied after the result of the pattern matching.
This commit is contained in:
14
glob/glob.go
Normal file
14
glob/glob.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"github.com/gobwas/glob"
|
||||
)
|
||||
|
||||
func Match(pattern, name string, separators ...rune) (bool, error) {
|
||||
g, err := glob.Compile(pattern, separators...)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return g.Match(name), nil
|
||||
}
|
1
go.mod
1
go.mod
@@ -8,6 +8,7 @@ require (
|
||||
github.com/datarhei/gosrt v0.1.2
|
||||
github.com/datarhei/joy4 v0.0.0-20220728180719-f752080f4a36
|
||||
github.com/go-playground/validator/v10 v10.11.0
|
||||
github.com/gobwas/glob v0.2.3
|
||||
github.com/golang-jwt/jwt/v4 v4.4.2
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/invopop/jsonschema v0.4.0
|
||||
|
2
go.sum
2
go.sum
@@ -123,6 +123,8 @@ github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl
|
||||
github.com/go-playground/validator/v10 v10.11.0 h1:0W+xRM511GY47Yy3bZUbJVitCNg2BOGlCyvTqsp/xIw=
|
||||
github.com/go-playground/validator/v10 v10.11.0/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
||||
|
@@ -10,7 +10,7 @@ import (
|
||||
)
|
||||
|
||||
func (r *queryResolver) Processes(ctx context.Context) ([]*models.Process, error) {
|
||||
ids := r.Restream.GetProcessIDs()
|
||||
ids := r.Restream.GetProcessIDs("", "")
|
||||
|
||||
procs := []*models.Process{}
|
||||
|
||||
|
@@ -70,9 +70,11 @@ func (h *RestreamHandler) Add(c echo.Context) error {
|
||||
// @Description List all known processes. Use the query parameter to filter the listed processes.
|
||||
// @ID process-3-get-all
|
||||
// @Produce json
|
||||
// @Param filter query string false "Comma separated list of fields (config, state, report, metadata) that will be part of the output. If empty, all fields will be part of the output"
|
||||
// @Param reference query string false "Return only these process that have this reference value. Overrides a list of IDs. If empty, the reference will be ignored"
|
||||
// @Param id query string false "Comma separated list of process ids to list"
|
||||
// @Param filter query string false "Comma separated list of fields (config, state, report, metadata) that will be part of the output. If empty, all fields will be part of the output."
|
||||
// @Param reference query string false "Return only these process that have this reference value. If empty, the reference will be ignored."
|
||||
// @Param id query string false "Comma separated list of process ids to list. Overrides the reference. If empty all IDs will be returned."
|
||||
// @Param idpattern query string false "Glob pattern for process IDs. If empty all IDs will be returned. Intersected with results from refpattern."
|
||||
// @Param refpattern query string false "Glob pattern for process references. If empty all IDs will be returned. Intersected with results from idpattern."
|
||||
// @Success 200 {array} api.Process
|
||||
// @Security ApiKeyAuth
|
||||
// @Router /api/v3/process [get]
|
||||
@@ -82,8 +84,10 @@ func (h *RestreamHandler) GetAll(c echo.Context) error {
|
||||
wantids := strings.FieldsFunc(util.DefaultQuery(c, "id", ""), func(r rune) bool {
|
||||
return r == rune(',')
|
||||
})
|
||||
idpattern := util.DefaultQuery(c, "idpattern", "")
|
||||
refpattern := util.DefaultQuery(c, "refpattern", "")
|
||||
|
||||
ids := h.restream.GetProcessIDs()
|
||||
ids := h.restream.GetProcessIDs(idpattern, refpattern)
|
||||
|
||||
processes := []api.Process{}
|
||||
|
||||
|
@@ -8,6 +8,7 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/glob"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
)
|
||||
|
||||
@@ -296,7 +297,7 @@ func (fs *diskFilesystem) List(pattern string) []FileInfo {
|
||||
}
|
||||
|
||||
if len(pattern) != 0 {
|
||||
if ok, _ := filepath.Match(pattern, name); !ok {
|
||||
if ok, _ := glob.Match(pattern, name, '/'); !ok {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
@@ -4,11 +4,11 @@ import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/glob"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
)
|
||||
|
||||
@@ -427,7 +427,7 @@ func (fs *memFilesystem) List(pattern string) []FileInfo {
|
||||
|
||||
for _, file := range fs.files {
|
||||
if len(pattern) != 0 {
|
||||
if ok, _ := filepath.Match(pattern, file.name); !ok {
|
||||
if ok, _ := glob.Match(pattern, file.name, '/'); !ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
@@ -57,7 +57,7 @@ func (c *restreamCollector) Collect() metric.Metrics {
|
||||
"starting": 0,
|
||||
}
|
||||
|
||||
ids := c.r.GetProcessIDs()
|
||||
ids := c.r.GetProcessIDs("", "")
|
||||
|
||||
for _, id := range ids {
|
||||
state, _ := c.r.GetProcessState(id)
|
||||
|
@@ -88,31 +88,37 @@ type Status struct {
|
||||
// States
|
||||
//
|
||||
// finished - Process has been stopped
|
||||
// starting - if process has been actively started or has been waiting for reconnect (order=start, reconnect=any)
|
||||
// finished - if process shall not reconnect (order=stop, reconnect=any)
|
||||
//
|
||||
// starting - if process has been actively started or has been waiting for reconnect (order=start, reconnect=any)
|
||||
// finished - if process shall not reconnect (order=stop, reconnect=any)
|
||||
//
|
||||
// starting - Process is about to start
|
||||
// finishing - if process should be immediately stopped (order=stop, reconnect=any)
|
||||
// running - if process could be started (order=start, reconnect=any)
|
||||
// failed - if process couldn't be started (e.g. binary not found) (order=start, reconnect=any)
|
||||
//
|
||||
// finishing - if process should be immediately stopped (order=stop, reconnect=any)
|
||||
// running - if process could be started (order=start, reconnect=any)
|
||||
// failed - if process couldn't be started (e.g. binary not found) (order=start, reconnect=any)
|
||||
//
|
||||
// running - Process is running
|
||||
// finished - if process exited normally (order=any, reconnect=any)
|
||||
// finishing - if process has been actively stopped (order=stop, reconnect=any)
|
||||
// failed - if process exited abnormally (order=any, reconnect=any)
|
||||
// killed - if process has been actively killed with SIGKILL (order=any, reconnect=any)
|
||||
//
|
||||
// finished - if process exited normally (order=any, reconnect=any)
|
||||
// finishing - if process has been actively stopped (order=stop, reconnect=any)
|
||||
// failed - if process exited abnormally (order=any, reconnect=any)
|
||||
// killed - if process has been actively killed with SIGKILL (order=any, reconnect=any)
|
||||
//
|
||||
// finishing - Process has been actively stopped and will be killed
|
||||
// finished - if process has been actively killed with SIGINT and ffmpeg exited normally (order=stop, reconnect=any)
|
||||
// killed - if process has been actively killed with SIGKILL (order=stop, reconnect=any)
|
||||
//
|
||||
// finished - if process has been actively killed with SIGINT and ffmpeg exited normally (order=stop, reconnect=any)
|
||||
// killed - if process has been actively killed with SIGKILL (order=stop, reconnect=any)
|
||||
//
|
||||
// failed - Process has been failed either by starting or during running
|
||||
// starting - if process has been waiting for reconnect (order=start, reconnect=true)
|
||||
// failed - if process shall not reconnect (order=any, reconnect=false)
|
||||
//
|
||||
// starting - if process has been waiting for reconnect (order=start, reconnect=true)
|
||||
// failed - if process shall not reconnect (order=any, reconnect=false)
|
||||
//
|
||||
// killed - Process has been stopped
|
||||
// starting - if process has been waiting for reconnect (order=start, reconnect=true)
|
||||
// killed - if process shall not reconnect (order=start, reconnect=false)
|
||||
//
|
||||
// starting - if process has been waiting for reconnect (order=start, reconnect=true)
|
||||
// killed - if process shall not reconnect (order=start, reconnect=false)
|
||||
type stateType string
|
||||
|
||||
const (
|
||||
|
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/datarhei/core/v16/ffmpeg"
|
||||
"github.com/datarhei/core/v16/ffmpeg/parse"
|
||||
"github.com/datarhei/core/v16/ffmpeg/skills"
|
||||
"github.com/datarhei/core/v16/glob"
|
||||
"github.com/datarhei/core/v16/io/fs"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/datarhei/core/v16/net"
|
||||
@@ -33,7 +34,7 @@ type Restreamer interface {
|
||||
Start() // start all processes that have a "start" order
|
||||
Stop() // stop all running process but keep their "start" order
|
||||
AddProcess(config *app.Config) error // add a new process
|
||||
GetProcessIDs() []string // get a list of all process IDs
|
||||
GetProcessIDs(idpattern, refpattern string) []string // get a list of process IDs based on patterns for ID and reference
|
||||
DeleteProcess(id string) error // delete a process
|
||||
UpdateProcess(id string, config *app.Config) error // update a process
|
||||
StartProcess(id string) error // start a process
|
||||
@@ -808,14 +809,64 @@ func (r *restream) UpdateProcess(id string, config *app.Config) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *restream) GetProcessIDs() []string {
|
||||
func (r *restream) GetProcessIDs(idpattern, refpattern string) []string {
|
||||
r.lock.RLock()
|
||||
defer r.lock.RUnlock()
|
||||
|
||||
ids := make([]string, len(r.tasks))
|
||||
if len(idpattern) == 0 && len(refpattern) == 0 {
|
||||
ids := make([]string, len(r.tasks))
|
||||
i := 0
|
||||
|
||||
for id := range r.tasks {
|
||||
ids[i] = id
|
||||
i++
|
||||
}
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
idmap := map[string]int{}
|
||||
|
||||
if len(idpattern) != 0 {
|
||||
for id := range r.tasks {
|
||||
match, err := glob.Match(idpattern, id)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
|
||||
idmap[id] = 1
|
||||
}
|
||||
}
|
||||
|
||||
if len(refpattern) != 0 {
|
||||
for _, t := range r.tasks {
|
||||
match, err := glob.Match(refpattern, t.reference)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, ok := idmap[t.id]; ok {
|
||||
idmap[t.id]++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ids := make([]string, len(idmap))
|
||||
i := 0
|
||||
|
||||
for id := range r.tasks {
|
||||
for id, count := range idmap {
|
||||
if count == 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
ids[i] = id
|
||||
i++
|
||||
}
|
||||
|
@@ -206,7 +206,7 @@ func TestGetProcess(t *testing.T) {
|
||||
_, err = rs.GetProcess(process.ID)
|
||||
require.Equal(t, nil, err, "Process not found (%s)", process.ID)
|
||||
|
||||
list := rs.GetProcessIDs()
|
||||
list := rs.GetProcessIDs("", "")
|
||||
require.Len(t, list, 1, "expected 1 process")
|
||||
require.Equal(t, process.ID, list[0], "expected same process ID")
|
||||
}
|
||||
|
8
vendor/github.com/gobwas/glob/.gitignore
generated
vendored
Normal file
8
vendor/github.com/gobwas/glob/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
glob.iml
|
||||
.idea
|
||||
*.cpu
|
||||
*.mem
|
||||
*.test
|
||||
*.dot
|
||||
*.png
|
||||
*.svg
|
9
vendor/github.com/gobwas/glob/.travis.yml
generated
vendored
Normal file
9
vendor/github.com/gobwas/glob/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
sudo: false
|
||||
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.5.3
|
||||
|
||||
script:
|
||||
- go test -v ./...
|
21
vendor/github.com/gobwas/glob/LICENSE
generated
vendored
Normal file
21
vendor/github.com/gobwas/glob/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Sergey Kamardin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
26
vendor/github.com/gobwas/glob/bench.sh
generated
vendored
Normal file
26
vendor/github.com/gobwas/glob/bench.sh
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
#! /bin/bash
|
||||
|
||||
bench() {
|
||||
filename="/tmp/$1-$2.bench"
|
||||
if test -e "${filename}";
|
||||
then
|
||||
echo "Already exists ${filename}"
|
||||
else
|
||||
backup=`git rev-parse --abbrev-ref HEAD`
|
||||
git checkout $1
|
||||
echo -n "Creating ${filename}... "
|
||||
go test ./... -run=NONE -bench=$2 > "${filename}" -benchmem
|
||||
echo "OK"
|
||||
git checkout ${backup}
|
||||
sleep 5
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
to=$1
|
||||
current=`git rev-parse --abbrev-ref HEAD`
|
||||
|
||||
bench ${to} $2
|
||||
bench ${current} $2
|
||||
|
||||
benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench"
|
525
vendor/github.com/gobwas/glob/compiler/compiler.go
generated
vendored
Normal file
525
vendor/github.com/gobwas/glob/compiler/compiler.go
generated
vendored
Normal file
@@ -0,0 +1,525 @@
|
||||
package compiler
|
||||
|
||||
// TODO use constructor with all matchers, and to their structs private
|
||||
// TODO glue multiple Text nodes (like after QuoteMeta)
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/gobwas/glob/match"
|
||||
"github.com/gobwas/glob/syntax/ast"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
)
|
||||
|
||||
func optimizeMatcher(matcher match.Matcher) match.Matcher {
|
||||
switch m := matcher.(type) {
|
||||
|
||||
case match.Any:
|
||||
if len(m.Separators) == 0 {
|
||||
return match.NewSuper()
|
||||
}
|
||||
|
||||
case match.AnyOf:
|
||||
if len(m.Matchers) == 1 {
|
||||
return m.Matchers[0]
|
||||
}
|
||||
|
||||
return m
|
||||
|
||||
case match.List:
|
||||
if m.Not == false && len(m.List) == 1 {
|
||||
return match.NewText(string(m.List))
|
||||
}
|
||||
|
||||
return m
|
||||
|
||||
case match.BTree:
|
||||
m.Left = optimizeMatcher(m.Left)
|
||||
m.Right = optimizeMatcher(m.Right)
|
||||
|
||||
r, ok := m.Value.(match.Text)
|
||||
if !ok {
|
||||
return m
|
||||
}
|
||||
|
||||
var (
|
||||
leftNil = m.Left == nil
|
||||
rightNil = m.Right == nil
|
||||
)
|
||||
if leftNil && rightNil {
|
||||
return match.NewText(r.Str)
|
||||
}
|
||||
|
||||
_, leftSuper := m.Left.(match.Super)
|
||||
lp, leftPrefix := m.Left.(match.Prefix)
|
||||
la, leftAny := m.Left.(match.Any)
|
||||
|
||||
_, rightSuper := m.Right.(match.Super)
|
||||
rs, rightSuffix := m.Right.(match.Suffix)
|
||||
ra, rightAny := m.Right.(match.Any)
|
||||
|
||||
switch {
|
||||
case leftSuper && rightSuper:
|
||||
return match.NewContains(r.Str, false)
|
||||
|
||||
case leftSuper && rightNil:
|
||||
return match.NewSuffix(r.Str)
|
||||
|
||||
case rightSuper && leftNil:
|
||||
return match.NewPrefix(r.Str)
|
||||
|
||||
case leftNil && rightSuffix:
|
||||
return match.NewPrefixSuffix(r.Str, rs.Suffix)
|
||||
|
||||
case rightNil && leftPrefix:
|
||||
return match.NewPrefixSuffix(lp.Prefix, r.Str)
|
||||
|
||||
case rightNil && leftAny:
|
||||
return match.NewSuffixAny(r.Str, la.Separators)
|
||||
|
||||
case leftNil && rightAny:
|
||||
return match.NewPrefixAny(r.Str, ra.Separators)
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
return matcher
|
||||
}
|
||||
|
||||
func compileMatchers(matchers []match.Matcher) (match.Matcher, error) {
|
||||
if len(matchers) == 0 {
|
||||
return nil, fmt.Errorf("compile error: need at least one matcher")
|
||||
}
|
||||
if len(matchers) == 1 {
|
||||
return matchers[0], nil
|
||||
}
|
||||
if m := glueMatchers(matchers); m != nil {
|
||||
return m, nil
|
||||
}
|
||||
|
||||
idx := -1
|
||||
maxLen := -1
|
||||
var val match.Matcher
|
||||
for i, matcher := range matchers {
|
||||
if l := matcher.Len(); l != -1 && l >= maxLen {
|
||||
maxLen = l
|
||||
idx = i
|
||||
val = matcher
|
||||
}
|
||||
}
|
||||
|
||||
if val == nil { // not found matcher with static length
|
||||
r, err := compileMatchers(matchers[1:])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return match.NewBTree(matchers[0], nil, r), nil
|
||||
}
|
||||
|
||||
left := matchers[:idx]
|
||||
var right []match.Matcher
|
||||
if len(matchers) > idx+1 {
|
||||
right = matchers[idx+1:]
|
||||
}
|
||||
|
||||
var l, r match.Matcher
|
||||
var err error
|
||||
if len(left) > 0 {
|
||||
l, err = compileMatchers(left)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(right) > 0 {
|
||||
r, err = compileMatchers(right)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return match.NewBTree(val, l, r), nil
|
||||
}
|
||||
|
||||
func glueMatchers(matchers []match.Matcher) match.Matcher {
|
||||
if m := glueMatchersAsEvery(matchers); m != nil {
|
||||
return m
|
||||
}
|
||||
if m := glueMatchersAsRow(matchers); m != nil {
|
||||
return m
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func glueMatchersAsRow(matchers []match.Matcher) match.Matcher {
|
||||
if len(matchers) <= 1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
c []match.Matcher
|
||||
l int
|
||||
)
|
||||
for _, matcher := range matchers {
|
||||
if ml := matcher.Len(); ml == -1 {
|
||||
return nil
|
||||
} else {
|
||||
c = append(c, matcher)
|
||||
l += ml
|
||||
}
|
||||
}
|
||||
return match.NewRow(l, c...)
|
||||
}
|
||||
|
||||
func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher {
|
||||
if len(matchers) <= 1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
hasAny bool
|
||||
hasSuper bool
|
||||
hasSingle bool
|
||||
min int
|
||||
separator []rune
|
||||
)
|
||||
|
||||
for i, matcher := range matchers {
|
||||
var sep []rune
|
||||
|
||||
switch m := matcher.(type) {
|
||||
case match.Super:
|
||||
sep = []rune{}
|
||||
hasSuper = true
|
||||
|
||||
case match.Any:
|
||||
sep = m.Separators
|
||||
hasAny = true
|
||||
|
||||
case match.Single:
|
||||
sep = m.Separators
|
||||
hasSingle = true
|
||||
min++
|
||||
|
||||
case match.List:
|
||||
if !m.Not {
|
||||
return nil
|
||||
}
|
||||
sep = m.List
|
||||
hasSingle = true
|
||||
min++
|
||||
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
// initialize
|
||||
if i == 0 {
|
||||
separator = sep
|
||||
}
|
||||
|
||||
if runes.Equal(sep, separator) {
|
||||
continue
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if hasSuper && !hasAny && !hasSingle {
|
||||
return match.NewSuper()
|
||||
}
|
||||
|
||||
if hasAny && !hasSuper && !hasSingle {
|
||||
return match.NewAny(separator)
|
||||
}
|
||||
|
||||
if (hasAny || hasSuper) && min > 0 && len(separator) == 0 {
|
||||
return match.NewMin(min)
|
||||
}
|
||||
|
||||
every := match.NewEveryOf()
|
||||
|
||||
if min > 0 {
|
||||
every.Add(match.NewMin(min))
|
||||
|
||||
if !hasAny && !hasSuper {
|
||||
every.Add(match.NewMax(min))
|
||||
}
|
||||
}
|
||||
|
||||
if len(separator) > 0 {
|
||||
every.Add(match.NewContains(string(separator), true))
|
||||
}
|
||||
|
||||
return every
|
||||
}
|
||||
|
||||
func minimizeMatchers(matchers []match.Matcher) []match.Matcher {
|
||||
var done match.Matcher
|
||||
var left, right, count int
|
||||
|
||||
for l := 0; l < len(matchers); l++ {
|
||||
for r := len(matchers); r > l; r-- {
|
||||
if glued := glueMatchers(matchers[l:r]); glued != nil {
|
||||
var swap bool
|
||||
|
||||
if done == nil {
|
||||
swap = true
|
||||
} else {
|
||||
cl, gl := done.Len(), glued.Len()
|
||||
swap = cl > -1 && gl > -1 && gl > cl
|
||||
swap = swap || count < r-l
|
||||
}
|
||||
|
||||
if swap {
|
||||
done = glued
|
||||
left = l
|
||||
right = r
|
||||
count = r - l
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if done == nil {
|
||||
return matchers
|
||||
}
|
||||
|
||||
next := append(append([]match.Matcher{}, matchers[:left]...), done)
|
||||
if right < len(matchers) {
|
||||
next = append(next, matchers[right:]...)
|
||||
}
|
||||
|
||||
if len(next) == len(matchers) {
|
||||
return next
|
||||
}
|
||||
|
||||
return minimizeMatchers(next)
|
||||
}
|
||||
|
||||
// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree
|
||||
func minimizeTree(tree *ast.Node) *ast.Node {
|
||||
switch tree.Kind {
|
||||
case ast.KindAnyOf:
|
||||
return minimizeTreeAnyOf(tree)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// minimizeAnyOf tries to find common children of given node of AnyOf pattern
|
||||
// it searches for common children from left and from right
|
||||
// if any common children are found – then it returns new optimized ast tree
|
||||
// else it returns nil
|
||||
func minimizeTreeAnyOf(tree *ast.Node) *ast.Node {
|
||||
if !areOfSameKind(tree.Children, ast.KindPattern) {
|
||||
return nil
|
||||
}
|
||||
|
||||
commonLeft, commonRight := commonChildren(tree.Children)
|
||||
commonLeftCount, commonRightCount := len(commonLeft), len(commonRight)
|
||||
if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []*ast.Node
|
||||
if commonLeftCount > 0 {
|
||||
result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...))
|
||||
}
|
||||
|
||||
var anyOf []*ast.Node
|
||||
for _, child := range tree.Children {
|
||||
reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount]
|
||||
var node *ast.Node
|
||||
if len(reuse) == 0 {
|
||||
// this pattern is completely reduced by commonLeft and commonRight patterns
|
||||
// so it become nothing
|
||||
node = ast.NewNode(ast.KindNothing, nil)
|
||||
} else {
|
||||
node = ast.NewNode(ast.KindPattern, nil, reuse...)
|
||||
}
|
||||
anyOf = appendIfUnique(anyOf, node)
|
||||
}
|
||||
switch {
|
||||
case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing:
|
||||
result = append(result, anyOf[0])
|
||||
case len(anyOf) > 1:
|
||||
result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...))
|
||||
}
|
||||
|
||||
if commonRightCount > 0 {
|
||||
result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...))
|
||||
}
|
||||
|
||||
return ast.NewNode(ast.KindPattern, nil, result...)
|
||||
}
|
||||
|
||||
func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) {
|
||||
if len(nodes) <= 1 {
|
||||
return
|
||||
}
|
||||
|
||||
// find node that has least number of children
|
||||
idx := leastChildren(nodes)
|
||||
if idx == -1 {
|
||||
return
|
||||
}
|
||||
tree := nodes[idx]
|
||||
treeLength := len(tree.Children)
|
||||
|
||||
// allocate max able size for rightCommon slice
|
||||
// to get ability insert elements in reverse order (from end to start)
|
||||
// without sorting
|
||||
commonRight = make([]*ast.Node, treeLength)
|
||||
lastRight := treeLength // will use this to get results as commonRight[lastRight:]
|
||||
|
||||
var (
|
||||
breakLeft bool
|
||||
breakRight bool
|
||||
commonTotal int
|
||||
)
|
||||
for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 {
|
||||
treeLeft := tree.Children[i]
|
||||
treeRight := tree.Children[j]
|
||||
|
||||
for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ {
|
||||
// skip least children node
|
||||
if k == idx {
|
||||
continue
|
||||
}
|
||||
|
||||
restLeft := nodes[k].Children[i]
|
||||
restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength]
|
||||
|
||||
breakLeft = breakLeft || !treeLeft.Equal(restLeft)
|
||||
|
||||
// disable searching for right common parts, if left part is already overlapping
|
||||
breakRight = breakRight || (!breakLeft && j <= i)
|
||||
breakRight = breakRight || !treeRight.Equal(restRight)
|
||||
}
|
||||
|
||||
if !breakLeft {
|
||||
commonTotal++
|
||||
commonLeft = append(commonLeft, treeLeft)
|
||||
}
|
||||
if !breakRight {
|
||||
commonTotal++
|
||||
lastRight = j
|
||||
commonRight[j] = treeRight
|
||||
}
|
||||
}
|
||||
|
||||
commonRight = commonRight[lastRight:]
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node {
|
||||
for _, n := range target {
|
||||
if reflect.DeepEqual(n, val) {
|
||||
return target
|
||||
}
|
||||
}
|
||||
return append(target, val)
|
||||
}
|
||||
|
||||
func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool {
|
||||
for _, n := range nodes {
|
||||
if n.Kind != kind {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func leastChildren(nodes []*ast.Node) int {
|
||||
min := -1
|
||||
idx := -1
|
||||
for i, n := range nodes {
|
||||
if idx == -1 || (len(n.Children) < min) {
|
||||
min = len(n.Children)
|
||||
idx = i
|
||||
}
|
||||
}
|
||||
return idx
|
||||
}
|
||||
|
||||
func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) {
|
||||
var matchers []match.Matcher
|
||||
for _, desc := range tree.Children {
|
||||
m, err := compile(desc, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matchers = append(matchers, optimizeMatcher(m))
|
||||
}
|
||||
return matchers, nil
|
||||
}
|
||||
|
||||
func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) {
|
||||
switch tree.Kind {
|
||||
case ast.KindAnyOf:
|
||||
// todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go)
|
||||
if n := minimizeTree(tree); n != nil {
|
||||
return compile(n, sep)
|
||||
}
|
||||
matchers, err := compileTreeChildren(tree, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return match.NewAnyOf(matchers...), nil
|
||||
|
||||
case ast.KindPattern:
|
||||
if len(tree.Children) == 0 {
|
||||
return match.NewNothing(), nil
|
||||
}
|
||||
matchers, err := compileTreeChildren(tree, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m, err = compileMatchers(minimizeMatchers(matchers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
case ast.KindAny:
|
||||
m = match.NewAny(sep)
|
||||
|
||||
case ast.KindSuper:
|
||||
m = match.NewSuper()
|
||||
|
||||
case ast.KindSingle:
|
||||
m = match.NewSingle(sep)
|
||||
|
||||
case ast.KindNothing:
|
||||
m = match.NewNothing()
|
||||
|
||||
case ast.KindList:
|
||||
l := tree.Value.(ast.List)
|
||||
m = match.NewList([]rune(l.Chars), l.Not)
|
||||
|
||||
case ast.KindRange:
|
||||
r := tree.Value.(ast.Range)
|
||||
m = match.NewRange(r.Lo, r.Hi, r.Not)
|
||||
|
||||
case ast.KindText:
|
||||
t := tree.Value.(ast.Text)
|
||||
m = match.NewText(t.Text)
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("could not compile tree: unknown node type")
|
||||
}
|
||||
|
||||
return optimizeMatcher(m), nil
|
||||
}
|
||||
|
||||
func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) {
|
||||
m, err := compile(tree, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
80
vendor/github.com/gobwas/glob/glob.go
generated
vendored
Normal file
80
vendor/github.com/gobwas/glob/glob.go
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"github.com/gobwas/glob/compiler"
|
||||
"github.com/gobwas/glob/syntax"
|
||||
)
|
||||
|
||||
// Glob represents compiled glob pattern.
|
||||
type Glob interface {
|
||||
Match(string) bool
|
||||
}
|
||||
|
||||
// Compile creates Glob for given pattern and strings (if any present after pattern) as separators.
|
||||
// The pattern syntax is:
|
||||
//
|
||||
// pattern:
|
||||
// { term }
|
||||
//
|
||||
// term:
|
||||
// `*` matches any sequence of non-separator characters
|
||||
// `**` matches any sequence of characters
|
||||
// `?` matches any single non-separator character
|
||||
// `[` [ `!` ] { character-range } `]`
|
||||
// character class (must be non-empty)
|
||||
// `{` pattern-list `}`
|
||||
// pattern alternatives
|
||||
// c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`)
|
||||
// `\` c matches character c
|
||||
//
|
||||
// character-range:
|
||||
// c matches character c (c != `\\`, `-`, `]`)
|
||||
// `\` c matches character c
|
||||
// lo `-` hi matches character c for lo <= c <= hi
|
||||
//
|
||||
// pattern-list:
|
||||
// pattern { `,` pattern }
|
||||
// comma-separated (without spaces) patterns
|
||||
//
|
||||
func Compile(pattern string, separators ...rune) (Glob, error) {
|
||||
ast, err := syntax.Parse(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
matcher, err := compiler.Compile(ast, separators)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return matcher, nil
|
||||
}
|
||||
|
||||
// MustCompile is the same as Compile, except that if Compile returns error, this will panic
|
||||
func MustCompile(pattern string, separators ...rune) Glob {
|
||||
g, err := Compile(pattern, separators...)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return g
|
||||
}
|
||||
|
||||
// QuoteMeta returns a string that quotes all glob pattern meta characters
|
||||
// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`.
|
||||
func QuoteMeta(s string) string {
|
||||
b := make([]byte, 2*len(s))
|
||||
|
||||
// a byte loop is correct because all meta characters are ASCII
|
||||
j := 0
|
||||
for i := 0; i < len(s); i++ {
|
||||
if syntax.Special(s[i]) {
|
||||
b[j] = '\\'
|
||||
j++
|
||||
}
|
||||
b[j] = s[i]
|
||||
j++
|
||||
}
|
||||
|
||||
return string(b[0:j])
|
||||
}
|
45
vendor/github.com/gobwas/glob/match/any.go
generated
vendored
Normal file
45
vendor/github.com/gobwas/glob/match/any.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/strings"
|
||||
)
|
||||
|
||||
type Any struct {
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewAny(s []rune) Any {
|
||||
return Any{s}
|
||||
}
|
||||
|
||||
func (self Any) Match(s string) bool {
|
||||
return strings.IndexAnyRunes(s, self.Separators) == -1
|
||||
}
|
||||
|
||||
func (self Any) Index(s string) (int, []int) {
|
||||
found := strings.IndexAnyRunes(s, self.Separators)
|
||||
switch found {
|
||||
case -1:
|
||||
case 0:
|
||||
return 0, segments0
|
||||
default:
|
||||
s = s[:found]
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s))
|
||||
for i := range s {
|
||||
segments = append(segments, i)
|
||||
}
|
||||
segments = append(segments, len(s))
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Any) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Any) String() string {
|
||||
return fmt.Sprintf("<any:![%s]>", string(self.Separators))
|
||||
}
|
82
vendor/github.com/gobwas/glob/match/any_of.go
generated
vendored
Normal file
82
vendor/github.com/gobwas/glob/match/any_of.go
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
package match
|
||||
|
||||
import "fmt"
|
||||
|
||||
type AnyOf struct {
|
||||
Matchers Matchers
|
||||
}
|
||||
|
||||
func NewAnyOf(m ...Matcher) AnyOf {
|
||||
return AnyOf{Matchers(m)}
|
||||
}
|
||||
|
||||
func (self *AnyOf) Add(m Matcher) error {
|
||||
self.Matchers = append(self.Matchers, m)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self AnyOf) Match(s string) bool {
|
||||
for _, m := range self.Matchers {
|
||||
if m.Match(s) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self AnyOf) Index(s string) (int, []int) {
|
||||
index := -1
|
||||
|
||||
segments := acquireSegments(len(s))
|
||||
for _, m := range self.Matchers {
|
||||
idx, seg := m.Index(s)
|
||||
if idx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
if index == -1 || idx < index {
|
||||
index = idx
|
||||
segments = append(segments[:0], seg...)
|
||||
continue
|
||||
}
|
||||
|
||||
if idx > index {
|
||||
continue
|
||||
}
|
||||
|
||||
// here idx == index
|
||||
segments = appendMerge(segments, seg)
|
||||
}
|
||||
|
||||
if index == -1 {
|
||||
releaseSegments(segments)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return index, segments
|
||||
}
|
||||
|
||||
func (self AnyOf) Len() (l int) {
|
||||
l = -1
|
||||
for _, m := range self.Matchers {
|
||||
ml := m.Len()
|
||||
switch {
|
||||
case l == -1:
|
||||
l = ml
|
||||
continue
|
||||
|
||||
case ml == -1:
|
||||
return -1
|
||||
|
||||
case l != ml:
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self AnyOf) String() string {
|
||||
return fmt.Sprintf("<any_of:[%s]>", self.Matchers)
|
||||
}
|
146
vendor/github.com/gobwas/glob/match/btree.go
generated
vendored
Normal file
146
vendor/github.com/gobwas/glob/match/btree.go
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type BTree struct {
|
||||
Value Matcher
|
||||
Left Matcher
|
||||
Right Matcher
|
||||
ValueLengthRunes int
|
||||
LeftLengthRunes int
|
||||
RightLengthRunes int
|
||||
LengthRunes int
|
||||
}
|
||||
|
||||
func NewBTree(Value, Left, Right Matcher) (tree BTree) {
|
||||
tree.Value = Value
|
||||
tree.Left = Left
|
||||
tree.Right = Right
|
||||
|
||||
lenOk := true
|
||||
if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
|
||||
if Left != nil {
|
||||
if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
}
|
||||
|
||||
if Right != nil {
|
||||
if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
}
|
||||
|
||||
if lenOk {
|
||||
tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes
|
||||
} else {
|
||||
tree.LengthRunes = -1
|
||||
}
|
||||
|
||||
return tree
|
||||
}
|
||||
|
||||
func (self BTree) Len() int {
|
||||
return self.LengthRunes
|
||||
}
|
||||
|
||||
// todo?
|
||||
func (self BTree) Index(s string) (int, []int) {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self BTree) Match(s string) bool {
|
||||
inputLen := len(s)
|
||||
|
||||
// self.Length, self.RLen and self.LLen are values meaning the length of runes for each part
|
||||
// here we manipulating byte length for better optimizations
|
||||
// but these checks still works, cause minLen of 1-rune string is 1 byte.
|
||||
if self.LengthRunes != -1 && self.LengthRunes > inputLen {
|
||||
return false
|
||||
}
|
||||
|
||||
// try to cut unnecessary parts
|
||||
// by knowledge of length of right and left part
|
||||
var offset, limit int
|
||||
if self.LeftLengthRunes >= 0 {
|
||||
offset = self.LeftLengthRunes
|
||||
}
|
||||
if self.RightLengthRunes >= 0 {
|
||||
limit = inputLen - self.RightLengthRunes
|
||||
} else {
|
||||
limit = inputLen
|
||||
}
|
||||
|
||||
for offset < limit {
|
||||
// search for matching part in substring
|
||||
index, segments := self.Value.Index(s[offset:limit])
|
||||
if index == -1 {
|
||||
releaseSegments(segments)
|
||||
return false
|
||||
}
|
||||
|
||||
l := s[:offset+index]
|
||||
var left bool
|
||||
if self.Left != nil {
|
||||
left = self.Left.Match(l)
|
||||
} else {
|
||||
left = l == ""
|
||||
}
|
||||
|
||||
if left {
|
||||
for i := len(segments) - 1; i >= 0; i-- {
|
||||
length := segments[i]
|
||||
|
||||
var right bool
|
||||
var r string
|
||||
// if there is no string for the right branch
|
||||
if inputLen <= offset+index+length {
|
||||
r = ""
|
||||
} else {
|
||||
r = s[offset+index+length:]
|
||||
}
|
||||
|
||||
if self.Right != nil {
|
||||
right = self.Right.Match(r)
|
||||
} else {
|
||||
right = r == ""
|
||||
}
|
||||
|
||||
if right {
|
||||
releaseSegments(segments)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, step := utf8.DecodeRuneInString(s[offset+index:])
|
||||
offset += index + step
|
||||
|
||||
releaseSegments(segments)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self BTree) String() string {
|
||||
const n string = "<nil>"
|
||||
var l, r string
|
||||
if self.Left == nil {
|
||||
l = n
|
||||
} else {
|
||||
l = self.Left.String()
|
||||
}
|
||||
if self.Right == nil {
|
||||
r = n
|
||||
} else {
|
||||
r = self.Right.String()
|
||||
}
|
||||
|
||||
return fmt.Sprintf("<btree:[%s<-%s->%s]>", l, self.Value, r)
|
||||
}
|
58
vendor/github.com/gobwas/glob/match/contains.go
generated
vendored
Normal file
58
vendor/github.com/gobwas/glob/match/contains.go
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Contains struct {
|
||||
Needle string
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewContains(needle string, not bool) Contains {
|
||||
return Contains{needle, not}
|
||||
}
|
||||
|
||||
func (self Contains) Match(s string) bool {
|
||||
return strings.Contains(s, self.Needle) != self.Not
|
||||
}
|
||||
|
||||
func (self Contains) Index(s string) (int, []int) {
|
||||
var offset int
|
||||
|
||||
idx := strings.Index(s, self.Needle)
|
||||
|
||||
if !self.Not {
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
offset = idx + len(self.Needle)
|
||||
if len(s) <= offset {
|
||||
return 0, []int{offset}
|
||||
}
|
||||
s = s[offset:]
|
||||
} else if idx != -1 {
|
||||
s = s[:idx]
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s) + 1)
|
||||
for i := range s {
|
||||
segments = append(segments, offset+i)
|
||||
}
|
||||
|
||||
return 0, append(segments, offset+len(s))
|
||||
}
|
||||
|
||||
func (self Contains) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Contains) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
return fmt.Sprintf("<contains:%s[%s]>", not, self.Needle)
|
||||
}
|
99
vendor/github.com/gobwas/glob/match/every_of.go
generated
vendored
Normal file
99
vendor/github.com/gobwas/glob/match/every_of.go
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type EveryOf struct {
|
||||
Matchers Matchers
|
||||
}
|
||||
|
||||
func NewEveryOf(m ...Matcher) EveryOf {
|
||||
return EveryOf{Matchers(m)}
|
||||
}
|
||||
|
||||
func (self *EveryOf) Add(m Matcher) error {
|
||||
self.Matchers = append(self.Matchers, m)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self EveryOf) Len() (l int) {
|
||||
for _, m := range self.Matchers {
|
||||
if ml := m.Len(); l > 0 {
|
||||
l += ml
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self EveryOf) Index(s string) (int, []int) {
|
||||
var index int
|
||||
var offset int
|
||||
|
||||
// make `in` with cap as len(s),
|
||||
// cause it is the maximum size of output segments values
|
||||
next := acquireSegments(len(s))
|
||||
current := acquireSegments(len(s))
|
||||
|
||||
sub := s
|
||||
for i, m := range self.Matchers {
|
||||
idx, seg := m.Index(sub)
|
||||
if idx == -1 {
|
||||
releaseSegments(next)
|
||||
releaseSegments(current)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
// we use copy here instead of `current = seg`
|
||||
// cause seg is a slice from reusable buffer `in`
|
||||
// and it could be overwritten in next iteration
|
||||
current = append(current, seg...)
|
||||
} else {
|
||||
// clear the next
|
||||
next = next[:0]
|
||||
|
||||
delta := index - (idx + offset)
|
||||
for _, ex := range current {
|
||||
for _, n := range seg {
|
||||
if ex+delta == n {
|
||||
next = append(next, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(next) == 0 {
|
||||
releaseSegments(next)
|
||||
releaseSegments(current)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
current = append(current[:0], next...)
|
||||
}
|
||||
|
||||
index = idx + offset
|
||||
sub = s[index:]
|
||||
offset += idx
|
||||
}
|
||||
|
||||
releaseSegments(next)
|
||||
|
||||
return index, current
|
||||
}
|
||||
|
||||
func (self EveryOf) Match(s string) bool {
|
||||
for _, m := range self.Matchers {
|
||||
if !m.Match(s) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self EveryOf) String() string {
|
||||
return fmt.Sprintf("<every_of:[%s]>", self.Matchers)
|
||||
}
|
49
vendor/github.com/gobwas/glob/match/list.go
generated
vendored
Normal file
49
vendor/github.com/gobwas/glob/match/list.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type List struct {
|
||||
List []rune
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewList(list []rune, not bool) List {
|
||||
return List{list, not}
|
||||
}
|
||||
|
||||
func (self List) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
inList := runes.IndexRune(self.List, r) != -1
|
||||
return inList == !self.Not
|
||||
}
|
||||
|
||||
func (self List) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self List) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if self.Not == (runes.IndexRune(self.List, r) == -1) {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self List) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("<list:%s[%s]>", not, string(self.List))
|
||||
}
|
81
vendor/github.com/gobwas/glob/match/match.go
generated
vendored
Normal file
81
vendor/github.com/gobwas/glob/match/match.go
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
package match
|
||||
|
||||
// todo common table of rune's length
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const lenOne = 1
|
||||
const lenZero = 0
|
||||
const lenNo = -1
|
||||
|
||||
type Matcher interface {
|
||||
Match(string) bool
|
||||
Index(string) (int, []int)
|
||||
Len() int
|
||||
String() string
|
||||
}
|
||||
|
||||
type Matchers []Matcher
|
||||
|
||||
func (m Matchers) String() string {
|
||||
var s []string
|
||||
for _, matcher := range m {
|
||||
s = append(s, fmt.Sprint(matcher))
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s", strings.Join(s, ","))
|
||||
}
|
||||
|
||||
// appendMerge merges and sorts given already SORTED and UNIQUE segments.
|
||||
func appendMerge(target, sub []int) []int {
|
||||
lt, ls := len(target), len(sub)
|
||||
out := make([]int, 0, lt+ls)
|
||||
|
||||
for x, y := 0, 0; x < lt || y < ls; {
|
||||
if x >= lt {
|
||||
out = append(out, sub[y:]...)
|
||||
break
|
||||
}
|
||||
|
||||
if y >= ls {
|
||||
out = append(out, target[x:]...)
|
||||
break
|
||||
}
|
||||
|
||||
xValue := target[x]
|
||||
yValue := sub[y]
|
||||
|
||||
switch {
|
||||
|
||||
case xValue == yValue:
|
||||
out = append(out, xValue)
|
||||
x++
|
||||
y++
|
||||
|
||||
case xValue < yValue:
|
||||
out = append(out, xValue)
|
||||
x++
|
||||
|
||||
case yValue < xValue:
|
||||
out = append(out, yValue)
|
||||
y++
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
target = append(target[:0], out...)
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
func reverseSegments(input []int) {
|
||||
l := len(input)
|
||||
m := l / 2
|
||||
|
||||
for i := 0; i < m; i++ {
|
||||
input[i], input[l-i-1] = input[l-i-1], input[i]
|
||||
}
|
||||
}
|
49
vendor/github.com/gobwas/glob/match/max.go
generated
vendored
Normal file
49
vendor/github.com/gobwas/glob/match/max.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Max struct {
|
||||
Limit int
|
||||
}
|
||||
|
||||
func NewMax(l int) Max {
|
||||
return Max{l}
|
||||
}
|
||||
|
||||
func (self Max) Match(s string) bool {
|
||||
var l int
|
||||
for range s {
|
||||
l += 1
|
||||
if l > self.Limit {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Max) Index(s string) (int, []int) {
|
||||
segments := acquireSegments(self.Limit + 1)
|
||||
segments = append(segments, 0)
|
||||
var count int
|
||||
for i, r := range s {
|
||||
count++
|
||||
if count > self.Limit {
|
||||
break
|
||||
}
|
||||
segments = append(segments, i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Max) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Max) String() string {
|
||||
return fmt.Sprintf("<max:%d>", self.Limit)
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/min.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/min.go
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Min struct {
|
||||
Limit int
|
||||
}
|
||||
|
||||
func NewMin(l int) Min {
|
||||
return Min{l}
|
||||
}
|
||||
|
||||
func (self Min) Match(s string) bool {
|
||||
var l int
|
||||
for range s {
|
||||
l += 1
|
||||
if l >= self.Limit {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self Min) Index(s string) (int, []int) {
|
||||
var count int
|
||||
|
||||
c := len(s) - self.Limit + 1
|
||||
if c <= 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
segments := acquireSegments(c)
|
||||
for i, r := range s {
|
||||
count++
|
||||
if count >= self.Limit {
|
||||
segments = append(segments, i+utf8.RuneLen(r))
|
||||
}
|
||||
}
|
||||
|
||||
if len(segments) == 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Min) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Min) String() string {
|
||||
return fmt.Sprintf("<min:%d>", self.Limit)
|
||||
}
|
27
vendor/github.com/gobwas/glob/match/nothing.go
generated
vendored
Normal file
27
vendor/github.com/gobwas/glob/match/nothing.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Nothing struct{}
|
||||
|
||||
func NewNothing() Nothing {
|
||||
return Nothing{}
|
||||
}
|
||||
|
||||
func (self Nothing) Match(s string) bool {
|
||||
return len(s) == 0
|
||||
}
|
||||
|
||||
func (self Nothing) Index(s string) (int, []int) {
|
||||
return 0, segments0
|
||||
}
|
||||
|
||||
func (self Nothing) Len() int {
|
||||
return lenZero
|
||||
}
|
||||
|
||||
func (self Nothing) String() string {
|
||||
return fmt.Sprintf("<nothing>")
|
||||
}
|
50
vendor/github.com/gobwas/glob/match/prefix.go
generated
vendored
Normal file
50
vendor/github.com/gobwas/glob/match/prefix.go
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Prefix struct {
|
||||
Prefix string
|
||||
}
|
||||
|
||||
func NewPrefix(p string) Prefix {
|
||||
return Prefix{p}
|
||||
}
|
||||
|
||||
func (self Prefix) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Prefix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
length := len(self.Prefix)
|
||||
var sub string
|
||||
if len(s) > idx+length {
|
||||
sub = s[idx+length:]
|
||||
} else {
|
||||
sub = ""
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(sub) + 1)
|
||||
segments = append(segments, length)
|
||||
for i, r := range sub {
|
||||
segments = append(segments, length+i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return idx, segments
|
||||
}
|
||||
|
||||
func (self Prefix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Prefix) Match(s string) bool {
|
||||
return strings.HasPrefix(s, self.Prefix)
|
||||
}
|
||||
|
||||
func (self Prefix) String() string {
|
||||
return fmt.Sprintf("<prefix:%s>", self.Prefix)
|
||||
}
|
55
vendor/github.com/gobwas/glob/match/prefix_any.go
generated
vendored
Normal file
55
vendor/github.com/gobwas/glob/match/prefix_any.go
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
sutil "github.com/gobwas/glob/util/strings"
|
||||
)
|
||||
|
||||
type PrefixAny struct {
|
||||
Prefix string
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewPrefixAny(s string, sep []rune) PrefixAny {
|
||||
return PrefixAny{s, sep}
|
||||
}
|
||||
|
||||
func (self PrefixAny) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Prefix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
n := len(self.Prefix)
|
||||
sub := s[idx+n:]
|
||||
i := sutil.IndexAnyRunes(sub, self.Separators)
|
||||
if i > -1 {
|
||||
sub = sub[:i]
|
||||
}
|
||||
|
||||
seg := acquireSegments(len(sub) + 1)
|
||||
seg = append(seg, n)
|
||||
for i, r := range sub {
|
||||
seg = append(seg, n+i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return idx, seg
|
||||
}
|
||||
|
||||
func (self PrefixAny) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self PrefixAny) Match(s string) bool {
|
||||
if !strings.HasPrefix(s, self.Prefix) {
|
||||
return false
|
||||
}
|
||||
return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1
|
||||
}
|
||||
|
||||
func (self PrefixAny) String() string {
|
||||
return fmt.Sprintf("<prefix_any:%s![%s]>", self.Prefix, string(self.Separators))
|
||||
}
|
62
vendor/github.com/gobwas/glob/match/prefix_suffix.go
generated
vendored
Normal file
62
vendor/github.com/gobwas/glob/match/prefix_suffix.go
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PrefixSuffix struct {
|
||||
Prefix, Suffix string
|
||||
}
|
||||
|
||||
func NewPrefixSuffix(p, s string) PrefixSuffix {
|
||||
return PrefixSuffix{p, s}
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Index(s string) (int, []int) {
|
||||
prefixIdx := strings.Index(s, self.Prefix)
|
||||
if prefixIdx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
suffixLen := len(self.Suffix)
|
||||
if suffixLen <= 0 {
|
||||
return prefixIdx, []int{len(s) - prefixIdx}
|
||||
}
|
||||
|
||||
if (len(s) - prefixIdx) <= 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s) - prefixIdx)
|
||||
for sub := s[prefixIdx:]; ; {
|
||||
suffixIdx := strings.LastIndex(sub, self.Suffix)
|
||||
if suffixIdx == -1 {
|
||||
break
|
||||
}
|
||||
|
||||
segments = append(segments, suffixIdx+suffixLen)
|
||||
sub = sub[:suffixIdx]
|
||||
}
|
||||
|
||||
if len(segments) == 0 {
|
||||
releaseSegments(segments)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
reverseSegments(segments)
|
||||
|
||||
return prefixIdx, segments
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Match(s string) bool {
|
||||
return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix)
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) String() string {
|
||||
return fmt.Sprintf("<prefix_suffix:[%s,%s]>", self.Prefix, self.Suffix)
|
||||
}
|
48
vendor/github.com/gobwas/glob/match/range.go
generated
vendored
Normal file
48
vendor/github.com/gobwas/glob/match/range.go
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Range struct {
|
||||
Lo, Hi rune
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewRange(lo, hi rune, not bool) Range {
|
||||
return Range{lo, hi, not}
|
||||
}
|
||||
|
||||
func (self Range) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self Range) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
inRange := r >= self.Lo && r <= self.Hi
|
||||
|
||||
return inRange == !self.Not
|
||||
}
|
||||
|
||||
func (self Range) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if self.Not != (r >= self.Lo && r <= self.Hi) {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Range) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
return fmt.Sprintf("<range:%s[%s,%s]>", not, string(self.Lo), string(self.Hi))
|
||||
}
|
77
vendor/github.com/gobwas/glob/match/row.go
generated
vendored
Normal file
77
vendor/github.com/gobwas/glob/match/row.go
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Row struct {
|
||||
Matchers Matchers
|
||||
RunesLength int
|
||||
Segments []int
|
||||
}
|
||||
|
||||
func NewRow(len int, m ...Matcher) Row {
|
||||
return Row{
|
||||
Matchers: Matchers(m),
|
||||
RunesLength: len,
|
||||
Segments: []int{len},
|
||||
}
|
||||
}
|
||||
|
||||
func (self Row) matchAll(s string) bool {
|
||||
var idx int
|
||||
for _, m := range self.Matchers {
|
||||
length := m.Len()
|
||||
|
||||
var next, i int
|
||||
for next = range s[idx:] {
|
||||
i++
|
||||
if i == length {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if i < length || !m.Match(s[idx:idx+next+1]) {
|
||||
return false
|
||||
}
|
||||
|
||||
idx += next + 1
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Row) lenOk(s string) bool {
|
||||
var i int
|
||||
for range s {
|
||||
i++
|
||||
if i > self.RunesLength {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return self.RunesLength == i
|
||||
}
|
||||
|
||||
func (self Row) Match(s string) bool {
|
||||
return self.lenOk(s) && self.matchAll(s)
|
||||
}
|
||||
|
||||
func (self Row) Len() (l int) {
|
||||
return self.RunesLength
|
||||
}
|
||||
|
||||
func (self Row) Index(s string) (int, []int) {
|
||||
for i := range s {
|
||||
if len(s[i:]) < self.RunesLength {
|
||||
break
|
||||
}
|
||||
if self.matchAll(s[i:]) {
|
||||
return i, self.Segments
|
||||
}
|
||||
}
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Row) String() string {
|
||||
return fmt.Sprintf("<row_%d:[%s]>", self.RunesLength, self.Matchers)
|
||||
}
|
91
vendor/github.com/gobwas/glob/match/segments.go
generated
vendored
Normal file
91
vendor/github.com/gobwas/glob/match/segments.go
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type SomePool interface {
|
||||
Get() []int
|
||||
Put([]int)
|
||||
}
|
||||
|
||||
var segmentsPools [1024]sync.Pool
|
||||
|
||||
func toPowerOfTwo(v int) int {
|
||||
v--
|
||||
v |= v >> 1
|
||||
v |= v >> 2
|
||||
v |= v >> 4
|
||||
v |= v >> 8
|
||||
v |= v >> 16
|
||||
v++
|
||||
|
||||
return v
|
||||
}
|
||||
|
||||
const (
|
||||
cacheFrom = 16
|
||||
cacheToAndHigher = 1024
|
||||
cacheFromIndex = 15
|
||||
cacheToAndHigherIndex = 1023
|
||||
)
|
||||
|
||||
var (
|
||||
segments0 = []int{0}
|
||||
segments1 = []int{1}
|
||||
segments2 = []int{2}
|
||||
segments3 = []int{3}
|
||||
segments4 = []int{4}
|
||||
)
|
||||
|
||||
var segmentsByRuneLength [5][]int = [5][]int{
|
||||
0: segments0,
|
||||
1: segments1,
|
||||
2: segments2,
|
||||
3: segments3,
|
||||
4: segments4,
|
||||
}
|
||||
|
||||
func init() {
|
||||
for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 {
|
||||
func(i int) {
|
||||
segmentsPools[i-1] = sync.Pool{New: func() interface{} {
|
||||
return make([]int, 0, i)
|
||||
}}
|
||||
}(i)
|
||||
}
|
||||
}
|
||||
|
||||
func getTableIndex(c int) int {
|
||||
p := toPowerOfTwo(c)
|
||||
switch {
|
||||
case p >= cacheToAndHigher:
|
||||
return cacheToAndHigherIndex
|
||||
case p <= cacheFrom:
|
||||
return cacheFromIndex
|
||||
default:
|
||||
return p - 1
|
||||
}
|
||||
}
|
||||
|
||||
func acquireSegments(c int) []int {
|
||||
// make []int with less capacity than cacheFrom
|
||||
// is faster than acquiring it from pool
|
||||
if c < cacheFrom {
|
||||
return make([]int, 0, c)
|
||||
}
|
||||
|
||||
return segmentsPools[getTableIndex(c)].Get().([]int)[:0]
|
||||
}
|
||||
|
||||
func releaseSegments(s []int) {
|
||||
c := cap(s)
|
||||
|
||||
// make []int with less capacity than cacheFrom
|
||||
// is faster than acquiring it from pool
|
||||
if c < cacheFrom {
|
||||
return
|
||||
}
|
||||
|
||||
segmentsPools[getTableIndex(c)].Put(s)
|
||||
}
|
43
vendor/github.com/gobwas/glob/match/single.go
generated
vendored
Normal file
43
vendor/github.com/gobwas/glob/match/single.go
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// single represents ?
|
||||
type Single struct {
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewSingle(s []rune) Single {
|
||||
return Single{s}
|
||||
}
|
||||
|
||||
func (self Single) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
return runes.IndexRune(self.Separators, r) == -1
|
||||
}
|
||||
|
||||
func (self Single) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self Single) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if runes.IndexRune(self.Separators, r) == -1 {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Single) String() string {
|
||||
return fmt.Sprintf("<single:![%s]>", string(self.Separators))
|
||||
}
|
35
vendor/github.com/gobwas/glob/match/suffix.go
generated
vendored
Normal file
35
vendor/github.com/gobwas/glob/match/suffix.go
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Suffix struct {
|
||||
Suffix string
|
||||
}
|
||||
|
||||
func NewSuffix(s string) Suffix {
|
||||
return Suffix{s}
|
||||
}
|
||||
|
||||
func (self Suffix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Suffix) Match(s string) bool {
|
||||
return strings.HasSuffix(s, self.Suffix)
|
||||
}
|
||||
|
||||
func (self Suffix) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Suffix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return 0, []int{idx + len(self.Suffix)}
|
||||
}
|
||||
|
||||
func (self Suffix) String() string {
|
||||
return fmt.Sprintf("<suffix:%s>", self.Suffix)
|
||||
}
|
43
vendor/github.com/gobwas/glob/match/suffix_any.go
generated
vendored
Normal file
43
vendor/github.com/gobwas/glob/match/suffix_any.go
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
sutil "github.com/gobwas/glob/util/strings"
|
||||
)
|
||||
|
||||
type SuffixAny struct {
|
||||
Suffix string
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewSuffixAny(s string, sep []rune) SuffixAny {
|
||||
return SuffixAny{s, sep}
|
||||
}
|
||||
|
||||
func (self SuffixAny) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Suffix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1
|
||||
|
||||
return i, []int{idx + len(self.Suffix) - i}
|
||||
}
|
||||
|
||||
func (self SuffixAny) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self SuffixAny) Match(s string) bool {
|
||||
if !strings.HasSuffix(s, self.Suffix) {
|
||||
return false
|
||||
}
|
||||
return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1
|
||||
}
|
||||
|
||||
func (self SuffixAny) String() string {
|
||||
return fmt.Sprintf("<suffix_any:![%s]%s>", string(self.Separators), self.Suffix)
|
||||
}
|
33
vendor/github.com/gobwas/glob/match/super.go
generated
vendored
Normal file
33
vendor/github.com/gobwas/glob/match/super.go
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Super struct{}
|
||||
|
||||
func NewSuper() Super {
|
||||
return Super{}
|
||||
}
|
||||
|
||||
func (self Super) Match(s string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Super) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Super) Index(s string) (int, []int) {
|
||||
segments := acquireSegments(len(s) + 1)
|
||||
for i := range s {
|
||||
segments = append(segments, i)
|
||||
}
|
||||
segments = append(segments, len(s))
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Super) String() string {
|
||||
return fmt.Sprintf("<super>")
|
||||
}
|
45
vendor/github.com/gobwas/glob/match/text.go
generated
vendored
Normal file
45
vendor/github.com/gobwas/glob/match/text.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// raw represents raw string to match
|
||||
type Text struct {
|
||||
Str string
|
||||
RunesLength int
|
||||
BytesLength int
|
||||
Segments []int
|
||||
}
|
||||
|
||||
func NewText(s string) Text {
|
||||
return Text{
|
||||
Str: s,
|
||||
RunesLength: utf8.RuneCountInString(s),
|
||||
BytesLength: len(s),
|
||||
Segments: []int{len(s)},
|
||||
}
|
||||
}
|
||||
|
||||
func (self Text) Match(s string) bool {
|
||||
return self.Str == s
|
||||
}
|
||||
|
||||
func (self Text) Len() int {
|
||||
return self.RunesLength
|
||||
}
|
||||
|
||||
func (self Text) Index(s string) (int, []int) {
|
||||
index := strings.Index(s, self.Str)
|
||||
if index == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return index, self.Segments
|
||||
}
|
||||
|
||||
func (self Text) String() string {
|
||||
return fmt.Sprintf("<text:`%v`>", self.Str)
|
||||
}
|
148
vendor/github.com/gobwas/glob/readme.md
generated
vendored
Normal file
148
vendor/github.com/gobwas/glob/readme.md
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
# glob.[go](https://golang.org)
|
||||
|
||||
[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url]
|
||||
|
||||
> Go Globbing Library.
|
||||
|
||||
## Install
|
||||
|
||||
```shell
|
||||
go get github.com/gobwas/glob
|
||||
```
|
||||
|
||||
## Example
|
||||
|
||||
```go
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gobwas/glob"
|
||||
|
||||
func main() {
|
||||
var g glob.Glob
|
||||
|
||||
// create simple glob
|
||||
g = glob.MustCompile("*.github.com")
|
||||
g.Match("api.github.com") // true
|
||||
|
||||
// quote meta characters and then create simple glob
|
||||
g = glob.MustCompile(glob.QuoteMeta("*.github.com"))
|
||||
g.Match("*.github.com") // true
|
||||
|
||||
// create new glob with set of delimiters as ["."]
|
||||
g = glob.MustCompile("api.*.com", '.')
|
||||
g.Match("api.github.com") // true
|
||||
g.Match("api.gi.hub.com") // false
|
||||
|
||||
// create new glob with set of delimiters as ["."]
|
||||
// but now with super wildcard
|
||||
g = glob.MustCompile("api.**.com", '.')
|
||||
g.Match("api.github.com") // true
|
||||
g.Match("api.gi.hub.com") // true
|
||||
|
||||
// create glob with single symbol wildcard
|
||||
g = glob.MustCompile("?at")
|
||||
g.Match("cat") // true
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with single symbol wildcard and delimiters ['f']
|
||||
g = glob.MustCompile("?at", 'f')
|
||||
g.Match("cat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-list matchers
|
||||
g = glob.MustCompile("[abc]at")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-list matchers
|
||||
g = glob.MustCompile("[!abc]at")
|
||||
g.Match("cat") // false
|
||||
g.Match("bat") // false
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-range matchers
|
||||
g = glob.MustCompile("[a-c]at")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-range matchers
|
||||
g = glob.MustCompile("[!a-c]at")
|
||||
g.Match("cat") // false
|
||||
g.Match("bat") // false
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with pattern-alternatives list
|
||||
g = glob.MustCompile("{cat,bat,[fr]at}")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // true
|
||||
g.Match("rat") // true
|
||||
g.Match("at") // false
|
||||
g.Match("zat") // false
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
This library is created for compile-once patterns. This means, that compilation could take time, but
|
||||
strings matching is done faster, than in case when always parsing template.
|
||||
|
||||
If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower.
|
||||
|
||||
Run `go test -bench=.` from source root to see the benchmarks:
|
||||
|
||||
Pattern | Fixture | Match | Speed (ns/op)
|
||||
--------|---------|-------|--------------
|
||||
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432
|
||||
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199
|
||||
`https://*.google.*` | `https://account.google.com` | `true` | 96
|
||||
`https://*.google.*` | `https://google.com` | `false` | 66
|
||||
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163
|
||||
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197
|
||||
`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22
|
||||
`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24
|
||||
`abc*` | `abcdef` | `true` | 8.15
|
||||
`abc*` | `af` | `false` | 5.68
|
||||
`*def` | `abcdef` | `true` | 8.84
|
||||
`*def` | `af` | `false` | 5.74
|
||||
`ab*ef` | `abcdef` | `true` | 15.2
|
||||
`ab*ef` | `af` | `false` | 10.4
|
||||
|
||||
The same things with `regexp` package:
|
||||
|
||||
Pattern | Fixture | Match | Speed (ns/op)
|
||||
--------|---------|-------|--------------
|
||||
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553
|
||||
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383
|
||||
`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205
|
||||
`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767
|
||||
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435
|
||||
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674
|
||||
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039
|
||||
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272
|
||||
`^abc.*$` | `abcdef` | `true` | 237
|
||||
`^abc.*$` | `af` | `false` | 100
|
||||
`^.*def$` | `abcdef` | `true` | 464
|
||||
`^.*def$` | `af` | `false` | 265
|
||||
`^ab.*ef$` | `abcdef` | `true` | 375
|
||||
`^ab.*ef$` | `af` | `false` | 145
|
||||
|
||||
[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg
|
||||
[godoc-url]: https://godoc.org/github.com/gobwas/glob
|
||||
[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master
|
||||
[travis-url]: https://travis-ci.org/gobwas/glob
|
||||
|
||||
## Syntax
|
||||
|
||||
Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm),
|
||||
except that `**` is aka super-asterisk, that do not sensitive for separators.
|
122
vendor/github.com/gobwas/glob/syntax/ast/ast.go
generated
vendored
Normal file
122
vendor/github.com/gobwas/glob/syntax/ast/ast.go
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Node struct {
|
||||
Parent *Node
|
||||
Children []*Node
|
||||
Value interface{}
|
||||
Kind Kind
|
||||
}
|
||||
|
||||
func NewNode(k Kind, v interface{}, ch ...*Node) *Node {
|
||||
n := &Node{
|
||||
Kind: k,
|
||||
Value: v,
|
||||
}
|
||||
for _, c := range ch {
|
||||
Insert(n, c)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (a *Node) Equal(b *Node) bool {
|
||||
if a.Kind != b.Kind {
|
||||
return false
|
||||
}
|
||||
if a.Value != b.Value {
|
||||
return false
|
||||
}
|
||||
if len(a.Children) != len(b.Children) {
|
||||
return false
|
||||
}
|
||||
for i, c := range a.Children {
|
||||
if !c.Equal(b.Children[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (a *Node) String() string {
|
||||
var buf bytes.Buffer
|
||||
buf.WriteString(a.Kind.String())
|
||||
if a.Value != nil {
|
||||
buf.WriteString(" =")
|
||||
buf.WriteString(fmt.Sprintf("%v", a.Value))
|
||||
}
|
||||
if len(a.Children) > 0 {
|
||||
buf.WriteString(" [")
|
||||
for i, c := range a.Children {
|
||||
if i > 0 {
|
||||
buf.WriteString(", ")
|
||||
}
|
||||
buf.WriteString(c.String())
|
||||
}
|
||||
buf.WriteString("]")
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func Insert(parent *Node, children ...*Node) {
|
||||
parent.Children = append(parent.Children, children...)
|
||||
for _, ch := range children {
|
||||
ch.Parent = parent
|
||||
}
|
||||
}
|
||||
|
||||
type List struct {
|
||||
Not bool
|
||||
Chars string
|
||||
}
|
||||
|
||||
type Range struct {
|
||||
Not bool
|
||||
Lo, Hi rune
|
||||
}
|
||||
|
||||
type Text struct {
|
||||
Text string
|
||||
}
|
||||
|
||||
type Kind int
|
||||
|
||||
const (
|
||||
KindNothing Kind = iota
|
||||
KindPattern
|
||||
KindList
|
||||
KindRange
|
||||
KindText
|
||||
KindAny
|
||||
KindSuper
|
||||
KindSingle
|
||||
KindAnyOf
|
||||
)
|
||||
|
||||
func (k Kind) String() string {
|
||||
switch k {
|
||||
case KindNothing:
|
||||
return "Nothing"
|
||||
case KindPattern:
|
||||
return "Pattern"
|
||||
case KindList:
|
||||
return "List"
|
||||
case KindRange:
|
||||
return "Range"
|
||||
case KindText:
|
||||
return "Text"
|
||||
case KindAny:
|
||||
return "Any"
|
||||
case KindSuper:
|
||||
return "Super"
|
||||
case KindSingle:
|
||||
return "Single"
|
||||
case KindAnyOf:
|
||||
return "AnyOf"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
157
vendor/github.com/gobwas/glob/syntax/ast/parser.go
generated
vendored
Normal file
157
vendor/github.com/gobwas/glob/syntax/ast/parser.go
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
package ast
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/syntax/lexer"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Lexer interface {
|
||||
Next() lexer.Token
|
||||
}
|
||||
|
||||
type parseFn func(*Node, Lexer) (parseFn, *Node, error)
|
||||
|
||||
func Parse(lexer Lexer) (*Node, error) {
|
||||
var parser parseFn
|
||||
|
||||
root := NewNode(KindPattern, nil)
|
||||
|
||||
var (
|
||||
tree *Node
|
||||
err error
|
||||
)
|
||||
for parser, tree = parserMain, root; parser != nil; {
|
||||
parser, tree, err = parser(tree, lexer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) {
|
||||
for {
|
||||
token := lex.Next()
|
||||
switch token.Type {
|
||||
case lexer.EOF:
|
||||
return nil, tree, nil
|
||||
|
||||
case lexer.Error:
|
||||
return nil, tree, errors.New(token.Raw)
|
||||
|
||||
case lexer.Text:
|
||||
Insert(tree, NewNode(KindText, Text{token.Raw}))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.Any:
|
||||
Insert(tree, NewNode(KindAny, nil))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.Super:
|
||||
Insert(tree, NewNode(KindSuper, nil))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.Single:
|
||||
Insert(tree, NewNode(KindSingle, nil))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.RangeOpen:
|
||||
return parserRange, tree, nil
|
||||
|
||||
case lexer.TermsOpen:
|
||||
a := NewNode(KindAnyOf, nil)
|
||||
Insert(tree, a)
|
||||
|
||||
p := NewNode(KindPattern, nil)
|
||||
Insert(a, p)
|
||||
|
||||
return parserMain, p, nil
|
||||
|
||||
case lexer.Separator:
|
||||
p := NewNode(KindPattern, nil)
|
||||
Insert(tree.Parent, p)
|
||||
|
||||
return parserMain, p, nil
|
||||
|
||||
case lexer.TermsClose:
|
||||
return parserMain, tree.Parent.Parent, nil
|
||||
|
||||
default:
|
||||
return nil, tree, fmt.Errorf("unexpected token: %s", token)
|
||||
}
|
||||
}
|
||||
return nil, tree, fmt.Errorf("unknown error")
|
||||
}
|
||||
|
||||
func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) {
|
||||
var (
|
||||
not bool
|
||||
lo rune
|
||||
hi rune
|
||||
chars string
|
||||
)
|
||||
for {
|
||||
token := lex.Next()
|
||||
switch token.Type {
|
||||
case lexer.EOF:
|
||||
return nil, tree, errors.New("unexpected end")
|
||||
|
||||
case lexer.Error:
|
||||
return nil, tree, errors.New(token.Raw)
|
||||
|
||||
case lexer.Not:
|
||||
not = true
|
||||
|
||||
case lexer.RangeLo:
|
||||
r, w := utf8.DecodeRuneInString(token.Raw)
|
||||
if len(token.Raw) > w {
|
||||
return nil, tree, fmt.Errorf("unexpected length of lo character")
|
||||
}
|
||||
lo = r
|
||||
|
||||
case lexer.RangeBetween:
|
||||
//
|
||||
|
||||
case lexer.RangeHi:
|
||||
r, w := utf8.DecodeRuneInString(token.Raw)
|
||||
if len(token.Raw) > w {
|
||||
return nil, tree, fmt.Errorf("unexpected length of lo character")
|
||||
}
|
||||
|
||||
hi = r
|
||||
|
||||
if hi < lo {
|
||||
return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo))
|
||||
}
|
||||
|
||||
case lexer.Text:
|
||||
chars = token.Raw
|
||||
|
||||
case lexer.RangeClose:
|
||||
isRange := lo != 0 && hi != 0
|
||||
isChars := chars != ""
|
||||
|
||||
if isChars == isRange {
|
||||
return nil, tree, fmt.Errorf("could not parse range")
|
||||
}
|
||||
|
||||
if isRange {
|
||||
Insert(tree, NewNode(KindRange, Range{
|
||||
Lo: lo,
|
||||
Hi: hi,
|
||||
Not: not,
|
||||
}))
|
||||
} else {
|
||||
Insert(tree, NewNode(KindList, List{
|
||||
Chars: chars,
|
||||
Not: not,
|
||||
}))
|
||||
}
|
||||
|
||||
return parserMain, tree, nil
|
||||
}
|
||||
}
|
||||
}
|
273
vendor/github.com/gobwas/glob/syntax/lexer/lexer.go
generated
vendored
Normal file
273
vendor/github.com/gobwas/glob/syntax/lexer/lexer.go
generated
vendored
Normal file
@@ -0,0 +1,273 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
char_any = '*'
|
||||
char_comma = ','
|
||||
char_single = '?'
|
||||
char_escape = '\\'
|
||||
char_range_open = '['
|
||||
char_range_close = ']'
|
||||
char_terms_open = '{'
|
||||
char_terms_close = '}'
|
||||
char_range_not = '!'
|
||||
char_range_between = '-'
|
||||
)
|
||||
|
||||
var specials = []byte{
|
||||
char_any,
|
||||
char_single,
|
||||
char_escape,
|
||||
char_range_open,
|
||||
char_range_close,
|
||||
char_terms_open,
|
||||
char_terms_close,
|
||||
}
|
||||
|
||||
func Special(c byte) bool {
|
||||
return bytes.IndexByte(specials, c) != -1
|
||||
}
|
||||
|
||||
type tokens []Token
|
||||
|
||||
func (i *tokens) shift() (ret Token) {
|
||||
ret = (*i)[0]
|
||||
copy(*i, (*i)[1:])
|
||||
*i = (*i)[:len(*i)-1]
|
||||
return
|
||||
}
|
||||
|
||||
func (i *tokens) push(v Token) {
|
||||
*i = append(*i, v)
|
||||
}
|
||||
|
||||
func (i *tokens) empty() bool {
|
||||
return len(*i) == 0
|
||||
}
|
||||
|
||||
var eof rune = 0
|
||||
|
||||
type lexer struct {
|
||||
data string
|
||||
pos int
|
||||
err error
|
||||
|
||||
tokens tokens
|
||||
termsLevel int
|
||||
|
||||
lastRune rune
|
||||
lastRuneSize int
|
||||
hasRune bool
|
||||
}
|
||||
|
||||
func NewLexer(source string) *lexer {
|
||||
l := &lexer{
|
||||
data: source,
|
||||
tokens: tokens(make([]Token, 0, 4)),
|
||||
}
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *lexer) Next() Token {
|
||||
if l.err != nil {
|
||||
return Token{Error, l.err.Error()}
|
||||
}
|
||||
if !l.tokens.empty() {
|
||||
return l.tokens.shift()
|
||||
}
|
||||
|
||||
l.fetchItem()
|
||||
return l.Next()
|
||||
}
|
||||
|
||||
func (l *lexer) peek() (r rune, w int) {
|
||||
if l.pos == len(l.data) {
|
||||
return eof, 0
|
||||
}
|
||||
|
||||
r, w = utf8.DecodeRuneInString(l.data[l.pos:])
|
||||
if r == utf8.RuneError {
|
||||
l.errorf("could not read rune")
|
||||
r = eof
|
||||
w = 0
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (l *lexer) read() rune {
|
||||
if l.hasRune {
|
||||
l.hasRune = false
|
||||
l.seek(l.lastRuneSize)
|
||||
return l.lastRune
|
||||
}
|
||||
|
||||
r, s := l.peek()
|
||||
l.seek(s)
|
||||
|
||||
l.lastRune = r
|
||||
l.lastRuneSize = s
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (l *lexer) seek(w int) {
|
||||
l.pos += w
|
||||
}
|
||||
|
||||
func (l *lexer) unread() {
|
||||
if l.hasRune {
|
||||
l.errorf("could not unread rune")
|
||||
return
|
||||
}
|
||||
l.seek(-l.lastRuneSize)
|
||||
l.hasRune = true
|
||||
}
|
||||
|
||||
func (l *lexer) errorf(f string, v ...interface{}) {
|
||||
l.err = fmt.Errorf(f, v...)
|
||||
}
|
||||
|
||||
func (l *lexer) inTerms() bool {
|
||||
return l.termsLevel > 0
|
||||
}
|
||||
|
||||
func (l *lexer) termsEnter() {
|
||||
l.termsLevel++
|
||||
}
|
||||
|
||||
func (l *lexer) termsLeave() {
|
||||
l.termsLevel--
|
||||
}
|
||||
|
||||
var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open}
|
||||
var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma)
|
||||
|
||||
func (l *lexer) fetchItem() {
|
||||
r := l.read()
|
||||
switch {
|
||||
case r == eof:
|
||||
l.tokens.push(Token{EOF, ""})
|
||||
|
||||
case r == char_terms_open:
|
||||
l.termsEnter()
|
||||
l.tokens.push(Token{TermsOpen, string(r)})
|
||||
|
||||
case r == char_comma && l.inTerms():
|
||||
l.tokens.push(Token{Separator, string(r)})
|
||||
|
||||
case r == char_terms_close && l.inTerms():
|
||||
l.tokens.push(Token{TermsClose, string(r)})
|
||||
l.termsLeave()
|
||||
|
||||
case r == char_range_open:
|
||||
l.tokens.push(Token{RangeOpen, string(r)})
|
||||
l.fetchRange()
|
||||
|
||||
case r == char_single:
|
||||
l.tokens.push(Token{Single, string(r)})
|
||||
|
||||
case r == char_any:
|
||||
if l.read() == char_any {
|
||||
l.tokens.push(Token{Super, string(r) + string(r)})
|
||||
} else {
|
||||
l.unread()
|
||||
l.tokens.push(Token{Any, string(r)})
|
||||
}
|
||||
|
||||
default:
|
||||
l.unread()
|
||||
|
||||
var breakers []rune
|
||||
if l.inTerms() {
|
||||
breakers = inTermsBreakers
|
||||
} else {
|
||||
breakers = inTextBreakers
|
||||
}
|
||||
l.fetchText(breakers)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *lexer) fetchRange() {
|
||||
var wantHi bool
|
||||
var wantClose bool
|
||||
var seenNot bool
|
||||
for {
|
||||
r := l.read()
|
||||
if r == eof {
|
||||
l.errorf("unexpected end of input")
|
||||
return
|
||||
}
|
||||
|
||||
if wantClose {
|
||||
if r != char_range_close {
|
||||
l.errorf("expected close range character")
|
||||
} else {
|
||||
l.tokens.push(Token{RangeClose, string(r)})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if wantHi {
|
||||
l.tokens.push(Token{RangeHi, string(r)})
|
||||
wantClose = true
|
||||
continue
|
||||
}
|
||||
|
||||
if !seenNot && r == char_range_not {
|
||||
l.tokens.push(Token{Not, string(r)})
|
||||
seenNot = true
|
||||
continue
|
||||
}
|
||||
|
||||
if n, w := l.peek(); n == char_range_between {
|
||||
l.seek(w)
|
||||
l.tokens.push(Token{RangeLo, string(r)})
|
||||
l.tokens.push(Token{RangeBetween, string(n)})
|
||||
wantHi = true
|
||||
continue
|
||||
}
|
||||
|
||||
l.unread() // unread first peek and fetch as text
|
||||
l.fetchText([]rune{char_range_close})
|
||||
wantClose = true
|
||||
}
|
||||
}
|
||||
|
||||
func (l *lexer) fetchText(breakers []rune) {
|
||||
var data []rune
|
||||
var escaped bool
|
||||
|
||||
reading:
|
||||
for {
|
||||
r := l.read()
|
||||
if r == eof {
|
||||
break
|
||||
}
|
||||
|
||||
if !escaped {
|
||||
if r == char_escape {
|
||||
escaped = true
|
||||
continue
|
||||
}
|
||||
|
||||
if runes.IndexRune(breakers, r) != -1 {
|
||||
l.unread()
|
||||
break reading
|
||||
}
|
||||
}
|
||||
|
||||
escaped = false
|
||||
data = append(data, r)
|
||||
}
|
||||
|
||||
if len(data) > 0 {
|
||||
l.tokens.push(Token{Text, string(data)})
|
||||
}
|
||||
}
|
88
vendor/github.com/gobwas/glob/syntax/lexer/token.go
generated
vendored
Normal file
88
vendor/github.com/gobwas/glob/syntax/lexer/token.go
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
package lexer
|
||||
|
||||
import "fmt"
|
||||
|
||||
type TokenType int
|
||||
|
||||
const (
|
||||
EOF TokenType = iota
|
||||
Error
|
||||
Text
|
||||
Char
|
||||
Any
|
||||
Super
|
||||
Single
|
||||
Not
|
||||
Separator
|
||||
RangeOpen
|
||||
RangeClose
|
||||
RangeLo
|
||||
RangeHi
|
||||
RangeBetween
|
||||
TermsOpen
|
||||
TermsClose
|
||||
)
|
||||
|
||||
func (tt TokenType) String() string {
|
||||
switch tt {
|
||||
case EOF:
|
||||
return "eof"
|
||||
|
||||
case Error:
|
||||
return "error"
|
||||
|
||||
case Text:
|
||||
return "text"
|
||||
|
||||
case Char:
|
||||
return "char"
|
||||
|
||||
case Any:
|
||||
return "any"
|
||||
|
||||
case Super:
|
||||
return "super"
|
||||
|
||||
case Single:
|
||||
return "single"
|
||||
|
||||
case Not:
|
||||
return "not"
|
||||
|
||||
case Separator:
|
||||
return "separator"
|
||||
|
||||
case RangeOpen:
|
||||
return "range_open"
|
||||
|
||||
case RangeClose:
|
||||
return "range_close"
|
||||
|
||||
case RangeLo:
|
||||
return "range_lo"
|
||||
|
||||
case RangeHi:
|
||||
return "range_hi"
|
||||
|
||||
case RangeBetween:
|
||||
return "range_between"
|
||||
|
||||
case TermsOpen:
|
||||
return "terms_open"
|
||||
|
||||
case TermsClose:
|
||||
return "terms_close"
|
||||
|
||||
default:
|
||||
return "undef"
|
||||
}
|
||||
}
|
||||
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Raw string
|
||||
}
|
||||
|
||||
func (t Token) String() string {
|
||||
return fmt.Sprintf("%v<%q>", t.Type, t.Raw)
|
||||
}
|
14
vendor/github.com/gobwas/glob/syntax/syntax.go
generated
vendored
Normal file
14
vendor/github.com/gobwas/glob/syntax/syntax.go
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package syntax
|
||||
|
||||
import (
|
||||
"github.com/gobwas/glob/syntax/ast"
|
||||
"github.com/gobwas/glob/syntax/lexer"
|
||||
)
|
||||
|
||||
func Parse(s string) (*ast.Node, error) {
|
||||
return ast.Parse(lexer.NewLexer(s))
|
||||
}
|
||||
|
||||
func Special(b byte) bool {
|
||||
return lexer.Special(b)
|
||||
}
|
154
vendor/github.com/gobwas/glob/util/runes/runes.go
generated
vendored
Normal file
154
vendor/github.com/gobwas/glob/util/runes/runes.go
generated
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
package runes
|
||||
|
||||
func Index(s, needle []rune) int {
|
||||
ls, ln := len(s), len(needle)
|
||||
|
||||
switch {
|
||||
case ln == 0:
|
||||
return 0
|
||||
case ln == 1:
|
||||
return IndexRune(s, needle[0])
|
||||
case ln == ls:
|
||||
if Equal(s, needle) {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
case ln > ls:
|
||||
return -1
|
||||
}
|
||||
|
||||
head:
|
||||
for i := 0; i < ls && ls-i >= ln; i++ {
|
||||
for y := 0; y < ln; y++ {
|
||||
if s[i+y] != needle[y] {
|
||||
continue head
|
||||
}
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func LastIndex(s, needle []rune) int {
|
||||
ls, ln := len(s), len(needle)
|
||||
|
||||
switch {
|
||||
case ln == 0:
|
||||
if ls == 0 {
|
||||
return 0
|
||||
}
|
||||
return ls
|
||||
case ln == 1:
|
||||
return IndexLastRune(s, needle[0])
|
||||
case ln == ls:
|
||||
if Equal(s, needle) {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
case ln > ls:
|
||||
return -1
|
||||
}
|
||||
|
||||
head:
|
||||
for i := ls - 1; i >= 0 && i >= ln; i-- {
|
||||
for y := ln - 1; y >= 0; y-- {
|
||||
if s[i-(ln-y-1)] != needle[y] {
|
||||
continue head
|
||||
}
|
||||
}
|
||||
|
||||
return i - ln + 1
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
// IndexAny returns the index of the first instance of any Unicode code point
|
||||
// from chars in s, or -1 if no Unicode code point from chars is present in s.
|
||||
func IndexAny(s, chars []rune) int {
|
||||
if len(chars) > 0 {
|
||||
for i, c := range s {
|
||||
for _, m := range chars {
|
||||
if c == m {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func Contains(s, needle []rune) bool {
|
||||
return Index(s, needle) >= 0
|
||||
}
|
||||
|
||||
func Max(s []rune) (max rune) {
|
||||
for _, r := range s {
|
||||
if r > max {
|
||||
max = r
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func Min(s []rune) rune {
|
||||
min := rune(-1)
|
||||
for _, r := range s {
|
||||
if min == -1 {
|
||||
min = r
|
||||
continue
|
||||
}
|
||||
|
||||
if r < min {
|
||||
min = r
|
||||
}
|
||||
}
|
||||
|
||||
return min
|
||||
}
|
||||
|
||||
func IndexRune(s []rune, r rune) int {
|
||||
for i, c := range s {
|
||||
if c == r {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func IndexLastRune(s []rune, r rune) int {
|
||||
for i := len(s) - 1; i >= 0; i-- {
|
||||
if s[i] == r {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func Equal(a, b []rune) bool {
|
||||
if len(a) == len(b) {
|
||||
for i := 0; i < len(a); i++ {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// HasPrefix tests whether the string s begins with prefix.
|
||||
func HasPrefix(s, prefix []rune) bool {
|
||||
return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix)
|
||||
}
|
||||
|
||||
// HasSuffix tests whether the string s ends with suffix.
|
||||
func HasSuffix(s, suffix []rune) bool {
|
||||
return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix)
|
||||
}
|
39
vendor/github.com/gobwas/glob/util/strings/strings.go
generated
vendored
Normal file
39
vendor/github.com/gobwas/glob/util/strings/strings.go
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
package strings
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func IndexAnyRunes(s string, rs []rune) int {
|
||||
for _, r := range rs {
|
||||
if i := strings.IndexRune(s, r); i != -1 {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func LastIndexAnyRunes(s string, rs []rune) int {
|
||||
for _, r := range rs {
|
||||
i := -1
|
||||
if 0 <= r && r < utf8.RuneSelf {
|
||||
i = strings.LastIndexByte(s, byte(r))
|
||||
} else {
|
||||
sub := s
|
||||
for len(sub) > 0 {
|
||||
j := strings.IndexRune(s, r)
|
||||
if j == -1 {
|
||||
break
|
||||
}
|
||||
i = j
|
||||
sub = sub[i+1:]
|
||||
}
|
||||
}
|
||||
if i != -1 {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
10
vendor/modules.txt
vendored
10
vendor/modules.txt
vendored
@@ -106,6 +106,16 @@ github.com/go-playground/universal-translator
|
||||
# github.com/go-playground/validator/v10 v10.11.0
|
||||
## explicit; go 1.13
|
||||
github.com/go-playground/validator/v10
|
||||
# github.com/gobwas/glob v0.2.3
|
||||
## explicit
|
||||
github.com/gobwas/glob
|
||||
github.com/gobwas/glob/compiler
|
||||
github.com/gobwas/glob/match
|
||||
github.com/gobwas/glob/syntax
|
||||
github.com/gobwas/glob/syntax/ast
|
||||
github.com/gobwas/glob/syntax/lexer
|
||||
github.com/gobwas/glob/util/runes
|
||||
github.com/gobwas/glob/util/strings
|
||||
# github.com/golang-jwt/jwt v3.2.2+incompatible
|
||||
## explicit
|
||||
github.com/golang-jwt/jwt
|
||||
|
Reference in New Issue
Block a user