chore: upgrade coredns version (#550)

This commit is contained in:
naison
2025-04-19 10:06:56 +08:00
committed by GitHub
parent c42e3475f9
commit c9f1ce6522
1701 changed files with 235209 additions and 29271 deletions

View File

@@ -141,12 +141,14 @@ func (n *Normalizer) Normalize(input string, lexerOpts ...lexerOption) (normaliz
var lastToken Token // The last token that is not whitespace or comment
var groupablePlaceholder groupablePlaceholder
ctes := make(map[string]bool) // Holds the CTEs that are currently being processed
for {
token := lexer.Scan()
if token.Type == EOF {
break
}
n.collectMetadata(&token, &lastToken, statementMetadata)
n.collectMetadata(&token, &lastToken, statementMetadata, ctes)
n.normalizeSQL(&token, &lastToken, &normalizedSQLBuilder, &groupablePlaceholder, lexerOpts...)
}
@@ -158,7 +160,7 @@ func (n *Normalizer) Normalize(input string, lexerOpts ...lexerOption) (normaliz
return n.trimNormalizedSQL(normalizedSQL), statementMetadata, nil
}
func (n *Normalizer) collectMetadata(token *Token, lastToken *Token, statementMetadata *StatementMetadata) {
func (n *Normalizer) collectMetadata(token *Token, lastToken *Token, statementMetadata *StatementMetadata, ctes map[string]bool) {
if n.config.CollectComments && (token.Type == COMMENT || token.Type == MULTILINE_COMMENT) {
// Collect comments
statementMetadata.Comments = append(statementMetadata.Comments, token.Value)
@@ -175,9 +177,14 @@ func (n *Normalizer) collectMetadata(token *Token, lastToken *Token, statementMe
if n.config.CollectCommands && isCommand(strings.ToUpper(tokenVal)) {
// Collect commands
statementMetadata.Commands = append(statementMetadata.Commands, strings.ToUpper(tokenVal))
} else if strings.ToUpper(lastToken.Value) == "WITH" && token.Type == IDENT {
// Collect CTEs so we can skip them later in table collection
ctes[tokenVal] = true
} else if n.config.CollectTables && isTableIndicator(strings.ToUpper(lastToken.Value)) && !isSQLKeyword(token) {
// Collect table names
statementMetadata.Tables = append(statementMetadata.Tables, tokenVal)
// Collect table names the token is not a CTE
if _, ok := ctes[tokenVal]; !ok {
statementMetadata.Tables = append(statementMetadata.Tables, tokenVal)
}
} else if n.config.CollectProcedure && isProcedure(lastToken) {
// Collect procedure names
statementMetadata.Procedures = append(statementMetadata.Procedures, tokenVal)

View File

@@ -22,13 +22,15 @@ func ObfuscateAndNormalize(input string, obfuscator *Obfuscator, normalizer *Nor
var lastToken Token // The last token that is not whitespace or comment
var groupablePlaceholder groupablePlaceholder
ctes := make(map[string]bool) // Holds the CTEs that are currently being processed
for {
token := lexer.Scan()
if token.Type == EOF {
break
}
token.Value = obfuscator.ObfuscateTokenValue(token, lexerOpts...)
normalizer.collectMetadata(&token, &lastToken, statementMetadata)
normalizer.collectMetadata(&token, &lastToken, statementMetadata, ctes)
normalizer.normalizeSQL(&token, &lastToken, &normalizedSQLBuilder, &groupablePlaceholder, lexerOpts...)
}

View File

@@ -49,8 +49,10 @@ var tableIndicators = map[string]bool{
"INTO": true,
"UPDATE": true,
"TABLE": true,
"EXISTS": true, // Drop Table If Exists
"STRAIGHT_JOIN": true, // MySQL
"CLONE": true, // Snowflake
"ONLY": true, // PostgreSQL
}
var keywords = map[string]bool{
@@ -145,6 +147,8 @@ var keywords = map[string]bool{
"OFFSET": true,
"OF": true,
"SKIP": true,
"IF": true,
"ONLY": true,
}
func isWhitespace(ch rune) bool {