From 5c1b48df2c5fd0770d4452798cfe73fd2d75260b Mon Sep 17 00:00:00 2001 From: Luc Talatinian <102624213+lucix-aws@users.noreply.github.com> Date: Mon, 13 Nov 2023 12:29:36 -0500 Subject: [PATCH] feat: ini2 parser (#2365) --- .../7d0bbc1477cd4cefa6958c9762ace056.json | 9 + config/config_test.go | 64 +++ config/shared_config_test.go | 25 +- internal/ini/ast.go | 120 ------ internal/ini/bench_test.go | 33 -- internal/ini/comma_token.go | 11 - internal/ini/comment_token.go | 35 -- internal/ini/dependency.go | 6 - internal/ini/doc.go | 43 -- internal/ini/empty_token.go | 4 - internal/ini/expression.go | 24 -- internal/ini/ini.go | 48 ++- internal/ini/ini_lexer.go | 161 -------- internal/ini/ini_lexer_test.go | 50 --- internal/ini/ini_parser.go | 356 ----------------- internal/ini/ini_parser_test.go | 376 ------------------ internal/ini/{walker_test.go => ini_test.go} | 77 +--- internal/ini/ini_trim_spaces_test.go | 75 ---- internal/ini/literal_tokens.go | 274 ------------- internal/ini/literal_tokens_test.go | 142 ------- internal/ini/newline_token.go | 30 -- internal/ini/op_tokens.go | 39 -- internal/ini/op_tokens_test.go | 73 ---- internal/ini/parse.go | 109 +++++ internal/ini/parse_error.go | 19 - internal/ini/parse_stack.go | 60 --- internal/ini/parse_stack_test.go | 59 --- internal/ini/sections.go | 157 ++++++++ internal/ini/sep_tokens.go | 41 -- internal/ini/sep_tokens_test.go | 70 ---- internal/ini/skipper.go | 45 --- internal/ini/skipper_test.go | 85 ---- internal/ini/statement.go | 35 -- internal/ini/strings.go | 83 ++++ .../ini/testdata/invalid/bad_section_name | 1 - internal/ini/testdata/invalid/bad_syntax_1 | 1 - internal/ini/testdata/invalid/bad_syntax_2 | 1 - .../invalid/incomplete_section_profile | 1 - internal/ini/testdata/invalid/invalid_keys | 2 - .../ini/testdata/invalid/syntax_error_comment | 1 - internal/ini/testdata/valid/nested_fields | 19 +- .../ini/testdata/valid/nested_fields_expected | 11 +- .../testdata/valid/op_sep_in_values_expected | 4 +- internal/ini/token.go | 32 ++ internal/ini/tokenize.go | 91 +++++ internal/ini/value.go | 104 +++++ internal/ini/value_util.go | 123 ------ internal/ini/value_util_test.go | 49 --- internal/ini/visitor.go | 288 -------------- internal/ini/walker.go | 25 -- internal/ini/ws_token.go | 24 -- 51 files changed, 703 insertions(+), 2912 deletions(-) create mode 100644 .changelog/7d0bbc1477cd4cefa6958c9762ace056.json delete mode 100644 internal/ini/ast.go delete mode 100644 internal/ini/bench_test.go delete mode 100644 internal/ini/comma_token.go delete mode 100644 internal/ini/comment_token.go delete mode 100644 internal/ini/dependency.go delete mode 100644 internal/ini/doc.go delete mode 100644 internal/ini/empty_token.go delete mode 100644 internal/ini/expression.go delete mode 100644 internal/ini/ini_lexer.go delete mode 100644 internal/ini/ini_lexer_test.go delete mode 100644 internal/ini/ini_parser.go delete mode 100644 internal/ini/ini_parser_test.go rename internal/ini/{walker_test.go => ini_test.go} (50%) delete mode 100644 internal/ini/ini_trim_spaces_test.go delete mode 100644 internal/ini/literal_tokens.go delete mode 100644 internal/ini/literal_tokens_test.go delete mode 100644 internal/ini/newline_token.go delete mode 100644 internal/ini/op_tokens.go delete mode 100644 internal/ini/op_tokens_test.go create mode 100644 internal/ini/parse.go delete mode 100644 internal/ini/parse_error.go delete mode 100644 internal/ini/parse_stack.go delete mode 100644 internal/ini/parse_stack_test.go create mode 100644 internal/ini/sections.go delete mode 100644 internal/ini/sep_tokens.go delete mode 100644 internal/ini/sep_tokens_test.go delete mode 100644 internal/ini/skipper.go delete mode 100644 internal/ini/skipper_test.go delete mode 100644 internal/ini/statement.go create mode 100644 internal/ini/strings.go delete mode 100644 internal/ini/testdata/invalid/bad_section_name delete mode 100644 internal/ini/testdata/invalid/bad_syntax_1 delete mode 100644 internal/ini/testdata/invalid/bad_syntax_2 delete mode 100644 internal/ini/testdata/invalid/incomplete_section_profile delete mode 100644 internal/ini/testdata/invalid/invalid_keys delete mode 100644 internal/ini/testdata/invalid/syntax_error_comment create mode 100644 internal/ini/token.go create mode 100644 internal/ini/tokenize.go create mode 100644 internal/ini/value.go delete mode 100644 internal/ini/value_util.go delete mode 100644 internal/ini/value_util_test.go delete mode 100644 internal/ini/visitor.go delete mode 100644 internal/ini/walker.go delete mode 100644 internal/ini/ws_token.go diff --git a/.changelog/7d0bbc1477cd4cefa6958c9762ace056.json b/.changelog/7d0bbc1477cd4cefa6958c9762ace056.json new file mode 100644 index 00000000000..9ee25aa199c --- /dev/null +++ b/.changelog/7d0bbc1477cd4cefa6958c9762ace056.json @@ -0,0 +1,9 @@ +{ + "id": "7d0bbc14-77cd-4cef-a695-8c9762ace056", + "type": "feature", + "description": "Replace the legacy config parser with a modern, less-strict implementation. Parsing failures within a section will now simply ignore the invalid line rather than silently drop the entire section.", + "modules": [ + "config", + "internal/ini" + ] +} \ No newline at end of file diff --git a/config/config_test.go b/config/config_test.go index 0638fb96779..98200fc61ed 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -3,6 +3,7 @@ package config import ( "context" "fmt" + "os" "testing" "github.com/aws/aws-sdk-go-v2/aws" @@ -146,3 +147,66 @@ func TestLoadDefaultConfig(t *testing.T) { t.Fatal("expect error when optFn returns error, got nil") } } + +func BenchmarkLoadProfile1(b *testing.B) { + benchConfigLoad(b, 1) +} + +func BenchmarkLoadProfile10(b *testing.B) { + benchConfigLoad(b, 10) +} + +func BenchmarkLoadProfile100(b *testing.B) { + benchConfigLoad(b, 100) +} + +func BenchmarkLoadProfile1000(b *testing.B) { + benchConfigLoad(b, 1000) +} + +func benchConfigLoad(b *testing.B, n int) { + f, err := generateProfiles(n) + if err != nil { + b.Fatal(err) + } + + defer os.Remove(f) + opt := WithSharedConfigFiles([]string{f}) + + b.ResetTimer() + for n := 0; n < b.N; n++ { + LoadDefaultConfig(context.Background(), opt) + } +} + +const profileTemplate = ` +[profile role%d] +tool_sso_start_url = https://example.awsapps.com/start +tool_sso_region = us-west-2 +tool_sso_account_id = 12345678901234 +tool_sso_role_name = some_role_name +tool_generated_from = some_tool +credential_process = some_tool credential-process +` + +func generateProfiles(n int) (string, error) { + f, err := os.CreateTemp("", fmt.Sprintf("aws-bench-config-%d-*", n)) + if err != nil { + return "", err + } + + for i := 0; i < n; i++ { + if _, err := fmt.Fprintf(f, profileTemplate, n); err != nil { + f.Close() + os.Remove(f.Name()) + return "", err + } + } + + if err := f.Close(); err != nil { + os.Remove(f.Name()) + return "", err + } + + return f.Name(), nil +} diff --git a/config/shared_config_test.go b/config/shared_config_test.go index f09c548e9dd..359a28b894c 100644 --- a/config/shared_config_test.go +++ b/config/shared_config_test.go @@ -22,11 +22,9 @@ import ( var _ regionProvider = (*SharedConfig)(nil) var ( - testConfigFilename = filepath.Join("testdata", "shared_config") - testConfigOtherFilename = filepath.Join("testdata", "shared_config_other") - testCredentialsFilename = filepath.Join("testdata", "shared_credentials") - testConfigLeadingWSFilename1 = filepath.Join("testdata", "leading_ws") - testConfigLeadingWSFilename2 = filepath.Join("testdata", "leading_ws_trailing_nl") + testConfigFilename = filepath.Join("testdata", "shared_config") + testConfigOtherFilename = filepath.Join("testdata", "shared_config_other") + testCredentialsFilename = filepath.Join("testdata", "shared_credentials") ) func TestNewSharedConfig(t *testing.T) { @@ -140,9 +138,10 @@ func TestNewSharedConfig(t *testing.T) { "Invalid INI file": { ConfigFilenames: []string{filepath.Join("testdata", "shared_config_invalid_ini")}, Profile: "profile_name", - Err: SharedConfigLoadError{ - Filename: filepath.Join("testdata", "shared_config_invalid_ini"), - Err: fmt.Errorf("invalid state"), + Err: SharedConfigProfileNotExistError{ + Filename: []string{filepath.Join("testdata", "shared_config_invalid_ini")}, + Profile: "profile_name", + Err: nil, }, }, "S3UseARNRegion property on profile": { @@ -687,16 +686,6 @@ func TestNewSharedConfig(t *testing.T) { EC2IMDSv1Disabled: aws.Bool(false), }, }, - "leading whitespace error 1": { - ConfigFilenames: []string{testConfigLeadingWSFilename1}, - Profile: "leading-whitespace-error", - Err: fmt.Errorf("Invalid token, remove leading whitespace"), - }, - "leading whitespace error 2": { - ConfigFilenames: []string{testConfigLeadingWSFilename2}, - Profile: "leading-whitespace-error", - Err: fmt.Errorf("Invalid token, remove leading whitespace"), - }, } for name, c := range cases { diff --git a/internal/ini/ast.go b/internal/ini/ast.go deleted file mode 100644 index e83a99886bc..00000000000 --- a/internal/ini/ast.go +++ /dev/null @@ -1,120 +0,0 @@ -package ini - -// ASTKind represents different states in the parse table -// and the type of AST that is being constructed -type ASTKind int - -// ASTKind* is used in the parse table to transition between -// the different states -const ( - ASTKindNone = ASTKind(iota) - ASTKindStart - ASTKindExpr - ASTKindEqualExpr - ASTKindStatement - ASTKindSkipStatement - ASTKindExprStatement - ASTKindSectionStatement - ASTKindNestedSectionStatement - ASTKindCompletedNestedSectionStatement - ASTKindCommentStatement - ASTKindCompletedSectionStatement -) - -func (k ASTKind) String() string { - switch k { - case ASTKindNone: - return "none" - case ASTKindStart: - return "start" - case ASTKindExpr: - return "expr" - case ASTKindStatement: - return "stmt" - case ASTKindSectionStatement: - return "section_stmt" - case ASTKindExprStatement: - return "expr_stmt" - case ASTKindCommentStatement: - return "comment" - case ASTKindNestedSectionStatement: - return "nested_section_stmt" - case ASTKindCompletedSectionStatement: - return "completed_stmt" - case ASTKindSkipStatement: - return "skip" - default: - return "" - } -} - -// AST interface allows us to determine what kind of node we -// are on and casting may not need to be necessary. -// -// The root is always the first node in Children -type AST struct { - Kind ASTKind - Root Token - RootToken bool - Children []AST -} - -func newAST(kind ASTKind, root AST, children ...AST) AST { - return AST{ - Kind: kind, - Children: append([]AST{root}, children...), - } -} - -func newASTWithRootToken(kind ASTKind, root Token, children ...AST) AST { - return AST{ - Kind: kind, - Root: root, - RootToken: true, - Children: children, - } -} - -// AppendChild will append to the list of children an AST has. -func (a *AST) AppendChild(child AST) { - a.Children = append(a.Children, child) -} - -// GetRoot will return the root AST which can be the first entry -// in the children list or a token. -func (a *AST) GetRoot() AST { - if a.RootToken { - return *a - } - - if len(a.Children) == 0 { - return AST{} - } - - return a.Children[0] -} - -// GetChildren will return the current AST's list of children -func (a *AST) GetChildren() []AST { - if len(a.Children) == 0 { - return []AST{} - } - - if a.RootToken { - return a.Children - } - - return a.Children[1:] -} - -// SetChildren will set and override all children of the AST. -func (a *AST) SetChildren(children []AST) { - if a.RootToken { - a.Children = children - } else { - a.Children = append(a.Children[:1], children...) - } -} - -// Start is used to indicate the starting state of the parse table. -var Start = newAST(ASTKindStart, AST{}) diff --git a/internal/ini/bench_test.go b/internal/ini/bench_test.go deleted file mode 100644 index e4ebec22077..00000000000 --- a/internal/ini/bench_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package ini - -import ( - "testing" -) - -const ( - section = `[default] -region = us-west-2 -credential_source = Ec2InstanceMetadata -s3 = - foo=bar - bar=baz -output = json - -[assumerole] -output = json -region = us-west-2 -` -) - -func BenchmarkINIParser(b *testing.B) { - for i := 0; i < b.N; i++ { - ParseBytes([]byte(section)) - } -} - -func BenchmarkTokenize(b *testing.B) { - lexer := iniLexer{} - for i := 0; i < b.N; i++ { - lexer.tokenize([]byte(section)) - } -} diff --git a/internal/ini/comma_token.go b/internal/ini/comma_token.go deleted file mode 100644 index 0895d53cbe6..00000000000 --- a/internal/ini/comma_token.go +++ /dev/null @@ -1,11 +0,0 @@ -package ini - -var commaRunes = []rune(",") - -func isComma(b rune) bool { - return b == ',' -} - -func newCommaToken() Token { - return newToken(TokenComma, commaRunes, NoneType) -} diff --git a/internal/ini/comment_token.go b/internal/ini/comment_token.go deleted file mode 100644 index 0b76999ba1f..00000000000 --- a/internal/ini/comment_token.go +++ /dev/null @@ -1,35 +0,0 @@ -package ini - -// isComment will return whether or not the next byte(s) is a -// comment. -func isComment(b []rune) bool { - if len(b) == 0 { - return false - } - - switch b[0] { - case ';': - return true - case '#': - return true - } - - return false -} - -// newCommentToken will create a comment token and -// return how many bytes were read. -func newCommentToken(b []rune) (Token, int, error) { - i := 0 - for ; i < len(b); i++ { - if b[i] == '\n' { - break - } - - if len(b)-i > 2 && b[i] == '\r' && b[i+1] == '\n' { - break - } - } - - return newToken(TokenComment, b[:i], NoneType), i, nil -} diff --git a/internal/ini/dependency.go b/internal/ini/dependency.go deleted file mode 100644 index f5ebe52e1a5..00000000000 --- a/internal/ini/dependency.go +++ /dev/null @@ -1,6 +0,0 @@ -package ini - -import ( - // internal/ini module was carved out of this module - _ "github.com/aws/aws-sdk-go-v2" -) diff --git a/internal/ini/doc.go b/internal/ini/doc.go deleted file mode 100644 index fdd5321b4c6..00000000000 --- a/internal/ini/doc.go +++ /dev/null @@ -1,43 +0,0 @@ -// Package ini is an LL(1) parser for configuration files. -// -// Example: -// sections, err := ini.OpenFile("/path/to/file") -// if err != nil { -// panic(err) -// } -// -// profile := "foo" -// section, ok := sections.GetSection(profile) -// if !ok { -// fmt.Printf("section %q could not be found", profile) -// } -// -// Below is the BNF that describes this parser -// -// Grammar: -// stmt -> section | stmt' -// stmt' -> epsilon | expr -// expr -> value (stmt)* | equal_expr (stmt)* -// equal_expr -> value ( ':' | '=' ) equal_expr' -// equal_expr' -> number | string | quoted_string -// quoted_string -> " quoted_string' -// quoted_string' -> string quoted_string_end -// quoted_string_end -> " -// -// section -> [ section' -// section' -> section_value section_close -// section_value -> number | string_subset | boolean | quoted_string_subset -// quoted_string_subset -> " quoted_string_subset' -// quoted_string_subset' -> string_subset quoted_string_end -// quoted_string_subset -> " -// section_close -> ] -// -// value -> number | string_subset | boolean -// string -> ? UTF-8 Code-Points except '\n' (U+000A) and '\r\n' (U+000D U+000A) ? -// string_subset -> ? Code-points excepted by grammar except ':' (U+003A), '=' (U+003D), '[' (U+005B), and ']' (U+005D) ? -// -// SkipState will skip (NL WS)+ -// -// comment -> # comment' | ; comment' -// comment' -> epsilon | value -package ini diff --git a/internal/ini/empty_token.go b/internal/ini/empty_token.go deleted file mode 100644 index 04345a54c20..00000000000 --- a/internal/ini/empty_token.go +++ /dev/null @@ -1,4 +0,0 @@ -package ini - -// emptyToken is used to satisfy the Token interface -var emptyToken = newToken(TokenNone, []rune{}, NoneType) diff --git a/internal/ini/expression.go b/internal/ini/expression.go deleted file mode 100644 index 91ba2a59dd5..00000000000 --- a/internal/ini/expression.go +++ /dev/null @@ -1,24 +0,0 @@ -package ini - -// newExpression will return an expression AST. -// Expr represents an expression -// -// grammar: -// expr -> string | number -func newExpression(tok Token) AST { - return newASTWithRootToken(ASTKindExpr, tok) -} - -func newEqualExpr(left AST, tok Token) AST { - return newASTWithRootToken(ASTKindEqualExpr, tok, left) -} - -// EqualExprKey will return a LHS value in the equal expr -func EqualExprKey(ast AST) string { - children := ast.GetChildren() - if len(children) == 0 || ast.Kind != ASTKindEqualExpr { - return "" - } - - return string(children[0].Root.Raw()) -} diff --git a/internal/ini/ini.go b/internal/ini/ini.go index f7406231318..cefcce91e76 100644 --- a/internal/ini/ini.go +++ b/internal/ini/ini.go @@ -1,13 +1,26 @@ +// Package ini implements parsing of the AWS shared config file. +// +// Example: +// sections, err := ini.OpenFile("/path/to/file") +// if err != nil { +// panic(err) +// } +// +// profile := "foo" +// section, ok := sections.GetSection(profile) +// if !ok { +// fmt.Printf("section %q could not be found", profile) +// } package ini import ( "fmt" "io" "os" + "strings" ) -// OpenFile takes a path to a given file, and will open and parse -// that file. +// OpenFile parses shared config from the given file path. func OpenFile(path string) (sections Sections, err error) { f, oerr := os.Open(path) if oerr != nil { @@ -26,33 +39,18 @@ func OpenFile(path string) (sections Sections, err error) { return Parse(f, path) } -// Parse will parse the given file using the shared config -// visitor. -func Parse(f io.Reader, path string) (Sections, error) { - tree, err := ParseAST(f) +// Parse parses shared config from the given reader. +func Parse(r io.Reader, path string) (Sections, error) { + contents, err := io.ReadAll(r) if err != nil { - return Sections{}, err + return Sections{}, fmt.Errorf("read all: %v", err) } - v := NewDefaultVisitor(path) - if err = Walk(tree, v); err != nil { - return Sections{}, err - } - - return v.Sections, nil -} - -// ParseBytes will parse the given bytes and return the parsed sections. -func ParseBytes(b []byte) (Sections, error) { - tree, err := ParseASTBytes(b) + lines := strings.Split(string(contents), "\n") + tokens, err := tokenize(lines) if err != nil { - return Sections{}, err - } - - v := NewDefaultVisitor("") - if err = Walk(tree, v); err != nil { - return Sections{}, err + return Sections{}, fmt.Errorf("tokenize: %v", err) } - return v.Sections, nil + return parse(tokens, path), nil } diff --git a/internal/ini/ini_lexer.go b/internal/ini/ini_lexer.go deleted file mode 100644 index ab16760dd16..00000000000 --- a/internal/ini/ini_lexer.go +++ /dev/null @@ -1,161 +0,0 @@ -package ini - -import ( - "bytes" - "io" - "io/ioutil" -) - -// TokenType represents the various different tokens types -type TokenType int - -func (t TokenType) String() string { - switch t { - case TokenNone: - return "none" - case TokenLit: - return "literal" - case TokenSep: - return "sep" - case TokenOp: - return "op" - case TokenWS: - return "ws" - case TokenNL: - return "newline" - case TokenComment: - return "comment" - case TokenComma: - return "comma" - default: - return "" - } -} - -// TokenType enums -const ( - TokenNone = TokenType(iota) - TokenLit - TokenSep - TokenComma - TokenOp - TokenWS - TokenNL - TokenComment -) - -type iniLexer struct{} - -// Tokenize will return a list of tokens during lexical analysis of the -// io.Reader. -func (l *iniLexer) Tokenize(r io.Reader) ([]Token, error) { - b, err := ioutil.ReadAll(r) - if err != nil { - return nil, &UnableToReadFile{Err: err} - } - - return l.tokenize(b) -} - -func (l *iniLexer) tokenize(b []byte) ([]Token, error) { - runes := bytes.Runes(b) - var err error - n := 0 - tokenAmount := countTokens(runes) - tokens := make([]Token, tokenAmount) - count := 0 - - for len(runes) > 0 && count < tokenAmount { - switch { - case isSubProperty(runes): - tokens[count], n, err = newLitToken(runes) - case isWhitespace(runes[0]): - tokens[count], n, err = newWSToken(runes) - case isComma(runes[0]): - tokens[count], n = newCommaToken(), 1 - case isComment(runes): - tokens[count], n, err = newCommentToken(runes) - case isNewline(runes): - tokens[count], n, err = newNewlineToken(runes) - case isSep(runes): - tokens[count], n, err = newSepToken(runes) - case isOp(runes): - tokens[count], n, err = newOpToken(runes) - default: - tokens[count], n, err = newLitToken(runes) - } - - if err != nil { - return nil, err - } - - count++ - - runes = runes[n:] - } - - return tokens[:count], nil -} - -func countTokens(runes []rune) int { - count, n := 0, 0 - var err error - - for len(runes) > 0 { - switch { - case isSubProperty(runes): - _, n, err = newLitToken(runes) - case isWhitespace(runes[0]): - _, n, err = newWSToken(runes) - case isComma(runes[0]): - _, n = newCommaToken(), 1 - case isComment(runes): - _, n, err = newCommentToken(runes) - case isNewline(runes): - _, n, err = newNewlineToken(runes) - case isSep(runes): - _, n, err = newSepToken(runes) - case isOp(runes): - _, n, err = newOpToken(runes) - default: - _, n, err = newLitToken(runes) - } - - if err != nil { - return 0 - } - - count++ - runes = runes[n:] - } - - return count + 1 -} - -// Token indicates a metadata about a given value. -type Token struct { - t TokenType - ValueType ValueType - base int - raw []rune -} - -var emptyValue = Value{} - -func newToken(t TokenType, raw []rune, v ValueType) Token { - return Token{ - t: t, - raw: raw, - ValueType: v, - } -} - -// Raw return the raw runes that were consumed -func (tok Token) Raw() []rune { - return tok.raw -} - -// Type returns the token type -func (tok Token) Type() TokenType { - return tok.t -} diff --git a/internal/ini/ini_lexer_test.go b/internal/ini/ini_lexer_test.go deleted file mode 100644 index b6868f2ed3e..00000000000 --- a/internal/ini/ini_lexer_test.go +++ /dev/null @@ -1,50 +0,0 @@ -package ini - -import ( - "bytes" - "io" - "reflect" - "testing" -) - -func TestTokenize(t *testing.T) { - cases := []struct { - r io.Reader - expectedTokens []Token - expectedError bool - }{ - { - r: bytes.NewBuffer([]byte(`x = 123`)), - expectedTokens: []Token{ - newToken(TokenLit, []rune("x"), StringType), - newToken(TokenWS, []rune(" "), NoneType), - newToken(TokenOp, []rune("="), NoneType), - newToken(TokenWS, []rune(" "), NoneType), - newToken(TokenLit, []rune("123"), StringType), - }, - }, - { - r: bytes.NewBuffer([]byte(`[ foo ]`)), - expectedTokens: []Token{ - newToken(TokenSep, []rune("["), NoneType), - newToken(TokenWS, []rune(" "), NoneType), - newToken(TokenLit, []rune("foo"), StringType), - newToken(TokenWS, []rune(" "), NoneType), - newToken(TokenSep, []rune("]"), NoneType), - }, - }, - } - - for _, c := range cases { - lex := iniLexer{} - tokens, err := lex.Tokenize(c.r) - - if e, a := c.expectedError, err != nil; e != a { - t.Errorf("expected %t, but received %t", e, a) - } - - if e, a := c.expectedTokens, tokens; !reflect.DeepEqual(e, a) { - t.Errorf("expected %v, but received %v", e, a) - } - } -} diff --git a/internal/ini/ini_parser.go b/internal/ini/ini_parser.go deleted file mode 100644 index e7f8edab3a6..00000000000 --- a/internal/ini/ini_parser.go +++ /dev/null @@ -1,356 +0,0 @@ -package ini - -import ( - "fmt" - "io" -) - -// ParseState represents the current state of the parser. -type ParseState uint - -// State enums for the parse table -const ( - InvalidState ParseState = iota - // stmt -> value stmt' - StatementState - // stmt' -> MarkComplete | op stmt - StatementPrimeState - // value -> number | string | boolean | quoted_string - ValueState - // section -> [ section' - OpenScopeState - // section' -> value section_close - SectionState - // section_close -> ] - CloseScopeState - // SkipState will skip (NL WS)+ - SkipState - // SkipTokenState will skip any token and push the previous - // state onto the stack. - SkipTokenState - // comment -> # comment' | ; comment' - // comment' -> MarkComplete | value - CommentState - // MarkComplete state will complete statements and move that - // to the completed AST list - MarkCompleteState - // TerminalState signifies that the tokens have been fully parsed - TerminalState -) - -// parseTable is a state machine to dictate the grammar above. -var parseTable = map[ASTKind]map[TokenType]ParseState{ - ASTKindStart: { - TokenLit: StatementState, - TokenSep: OpenScopeState, - TokenWS: SkipTokenState, - TokenNL: SkipTokenState, - TokenComment: CommentState, - TokenNone: TerminalState, - }, - ASTKindCommentStatement: { - TokenLit: StatementState, - TokenSep: OpenScopeState, - TokenWS: SkipTokenState, - TokenNL: SkipTokenState, - TokenComment: CommentState, - TokenNone: MarkCompleteState, - }, - ASTKindExpr: { - TokenOp: StatementPrimeState, - TokenLit: ValueState, - TokenSep: OpenScopeState, - TokenWS: ValueState, - TokenNL: SkipState, - TokenComment: CommentState, - TokenNone: MarkCompleteState, - }, - ASTKindEqualExpr: { - TokenLit: ValueState, - TokenSep: ValueState, - TokenOp: ValueState, - TokenWS: SkipTokenState, - TokenNL: SkipState, - }, - ASTKindStatement: { - TokenLit: SectionState, - TokenSep: CloseScopeState, - TokenWS: SkipTokenState, - TokenNL: SkipTokenState, - TokenComment: CommentState, - TokenNone: MarkCompleteState, - }, - ASTKindExprStatement: { - TokenLit: ValueState, - TokenSep: ValueState, - TokenOp: ValueState, - TokenWS: ValueState, - TokenNL: MarkCompleteState, - TokenComment: CommentState, - TokenNone: TerminalState, - TokenComma: SkipState, - }, - ASTKindSectionStatement: { - TokenLit: SectionState, - TokenOp: SectionState, - TokenSep: CloseScopeState, - TokenWS: SectionState, - TokenNL: SkipTokenState, - }, - ASTKindCompletedSectionStatement: { - TokenWS: SkipTokenState, - TokenNL: SkipTokenState, - TokenLit: StatementState, - TokenSep: OpenScopeState, - TokenComment: CommentState, - TokenNone: MarkCompleteState, - }, - ASTKindSkipStatement: { - TokenLit: StatementState, - TokenSep: OpenScopeState, - TokenWS: SkipTokenState, - TokenNL: SkipTokenState, - TokenComment: CommentState, - TokenNone: TerminalState, - }, -} - -// ParseAST will parse input from an io.Reader using -// an LL(1) parser. -func ParseAST(r io.Reader) ([]AST, error) { - lexer := iniLexer{} - tokens, err := lexer.Tokenize(r) - if err != nil { - return []AST{}, err - } - - return parse(tokens) -} - -// ParseASTBytes will parse input from a byte slice using -// an LL(1) parser. -func ParseASTBytes(b []byte) ([]AST, error) { - lexer := iniLexer{} - tokens, err := lexer.tokenize(b) - if err != nil { - return []AST{}, err - } - - return parse(tokens) -} - -func parse(tokens []Token) ([]AST, error) { - start := Start - stack := newParseStack(3, len(tokens)) - - stack.Push(start) - s := newSkipper() - -loop: - for stack.Len() > 0 { - k := stack.Pop() - - var tok Token - if len(tokens) == 0 { - // this occurs when all the tokens have been processed - // but reduction of what's left on the stack needs to - // occur. - tok = emptyToken - } else { - tok = tokens[0] - } - - step := parseTable[k.Kind][tok.Type()] - if s.ShouldSkip(tok) { - // being in a skip state with no tokens will break out of - // the parse loop since there is nothing left to process. - if len(tokens) == 0 { - break loop - } - // if should skip is true, we skip the tokens until should skip is set to false. - step = SkipTokenState - } - - switch step { - case TerminalState: - // Finished parsing. Push what should be the last - // statement to the stack. If there is anything left - // on the stack, an error in parsing has occurred. - if k.Kind != ASTKindStart { - stack.MarkComplete(k) - } - break loop - case SkipTokenState: - // When skipping a token, the previous state was popped off the stack. - // To maintain the correct state, the previous state will be pushed - // onto the stack. - stack.Push(k) - case StatementState: - if k.Kind != ASTKindStart { - if tok.Type() == TokenLit && isSubProperty(tok.raw) { - return nil, NewParseError( - fmt.Sprintf( - "Invalid token, remove leading whitespace %s", - string(tok.raw)), - ) - } - stack.MarkComplete(k) - } - expr := newExpression(tok) - stack.Push(expr) - case StatementPrimeState: - if tok.Type() != TokenOp { - stack.MarkComplete(k) - continue - } - - if k.Kind != ASTKindExpr { - return nil, NewParseError( - fmt.Sprintf("invalid expression: expected Expr type, but found %T type", k), - ) - } - - k = trimSpaces(k) - expr := newEqualExpr(k, tok) - stack.Push(expr) - case ValueState: - // ValueState requires the previous state to either be an equal expression - // or an expression statement. - switch k.Kind { - case ASTKindEqualExpr: - // assigning a value to some key - k.AppendChild(newExpression(tok)) - stack.Push(newExprStatement(k)) - case ASTKindExpr: - k.Root.raw = append(k.Root.raw, tok.Raw()...) - stack.Push(k) - case ASTKindExprStatement: - root := k.GetRoot() - children := root.GetChildren() - if len(children) == 0 { - return nil, NewParseError( - fmt.Sprintf("invalid expression: AST contains no children %s", k.Kind), - ) - } - - rhs := children[len(children)-1] - - if rhs.Root.ValueType != QuotedStringType { - rhs.Root.ValueType = StringType - rhs.Root.raw = append(rhs.Root.raw, tok.Raw()...) - - } - - children[len(children)-1] = rhs - root.SetChildren(children) - - stack.Push(k) - } - case OpenScopeState: - if !runeCompare(tok.Raw(), openBrace) { - return nil, NewParseError("expected '['") - } - // If OpenScopeState is not at the start, we must mark the previous ast as complete - // - // for example: if previous ast was a skip statement; - // we should mark it as complete before we create a new statement - if k.Kind != ASTKindStart { - stack.MarkComplete(k) - } - - stmt := newStatement() - stack.Push(stmt) - case CloseScopeState: - if !runeCompare(tok.Raw(), closeBrace) { - return nil, NewParseError("expected ']'") - } - - k = trimSpaces(k) - stack.Push(newCompletedSectionStatement(k)) - case SectionState: - var stmt AST - - switch k.Kind { - case ASTKindStatement: - // If there are multiple literals inside of a scope declaration, - // then the current token's raw value will be appended to the Name. - // - // This handles cases like [ profile default ] - // - // k will represent a SectionStatement with the children representing - // the label of the section - stmt = newSectionStatement(tok) - case ASTKindSectionStatement: - k.Root.raw = append(k.Root.raw, tok.Raw()...) - stmt = k - default: - return nil, NewParseError( - fmt.Sprintf("invalid statement: expected statement: %v", k.Kind), - ) - } - - stack.Push(stmt) - case MarkCompleteState: - if k.Kind != ASTKindStart { - stack.MarkComplete(k) - } - - if stack.Len() == 0 { - stack.Push(start) - } - case SkipState: - stack.Push(newSkipStatement(k)) - s.Skip() - case CommentState: - if k.Kind == ASTKindStart { - stack.Push(k) - } else { - stack.MarkComplete(k) - } - - stmt := newCommentStatement(tok) - stack.Push(stmt) - default: - return nil, NewParseError( - fmt.Sprintf("invalid state with ASTKind %v and TokenType %v", - k.Kind, tok.Type())) - } - - if len(tokens) > 0 { - tokens = tokens[1:] - } - } - - // this occurs when a statement has not been completed - if stack.top > 1 { - return nil, NewParseError(fmt.Sprintf("incomplete ini expression")) - } - - // returns a sublist which exludes the start symbol - return stack.List(), nil -} - -// trimSpaces will trim spaces on the left and right hand side of -// the literal. -func trimSpaces(k AST) AST { - // trim left hand side of spaces - for i := 0; i < len(k.Root.raw); i++ { - if !isWhitespace(k.Root.raw[i]) { - break - } - - k.Root.raw = k.Root.raw[1:] - i-- - } - - // trim right hand side of spaces - for i := len(k.Root.raw) - 1; i >= 0; i-- { - if !isWhitespace(k.Root.raw[i]) { - break - } - - k.Root.raw = k.Root.raw[:len(k.Root.raw)-1] - } - - return k -} diff --git a/internal/ini/ini_parser_test.go b/internal/ini/ini_parser_test.go deleted file mode 100644 index a4e51088507..00000000000 --- a/internal/ini/ini_parser_test.go +++ /dev/null @@ -1,376 +0,0 @@ -package ini - -import ( - "bytes" - "fmt" - "io" - "reflect" - "testing" -) - -func TestParser(t *testing.T) { - xID, _, _ := newLitToken([]rune("x = 1234")) - s3ID, _, _ := newLitToken([]rune("s3 = 1234")) - fooSlashes, _, _ := newLitToken([]rune("//foo")) - - regionID, _, _ := newLitToken([]rune("region")) - regionLit, _, _ := newLitToken([]rune(`"us-west-2"`)) - regionNoQuotesLit, _, _ := newLitToken([]rune("us-west-2")) - - s3ServiceID, _, _ := newLitToken([]rune("s3")) - nestedParamsLit, _, _ := newLitToken([]rune("\n\tfoo=bar\n\tbar=baz\n")) - - credentialID, _, _ := newLitToken([]rune("credential_source")) - ec2MetadataLit, _, _ := newLitToken([]rune("Ec2InstanceMetadata")) - - outputID, _, _ := newLitToken([]rune("output")) - outputLit, _, _ := newLitToken([]rune("json")) - - sepInValueID, _, _ := newLitToken([]rune("sepInValue")) - sepInValueLit := newToken(TokenOp, []rune("=:[foo]]bar["), StringType) - - equalOp, _, _ := newOpToken([]rune("= 1234")) - equalColonOp, _, _ := newOpToken([]rune(": 1234")) - numLit, _, _ := newLitToken([]rune("1234")) - defaultID, _, _ := newLitToken([]rune("default")) - assumeID, _, _ := newLitToken([]rune("assumerole")) - - defaultProfileStmt := newSectionStatement(defaultID) - assumeProfileStmt := newSectionStatement(assumeID) - - fooSlashesExpr := newExpression(fooSlashes) - - xEQ1234 := newEqualExpr(newExpression(xID), equalOp) - xEQ1234.AppendChild(newExpression(numLit)) - xEQColon1234 := newEqualExpr(newExpression(xID), equalColonOp) - xEQColon1234.AppendChild(newExpression(numLit)) - - regionEQRegion := newEqualExpr(newExpression(regionID), equalOp) - regionEQRegion.AppendChild(newExpression(regionLit)) - - noQuotesRegionEQRegion := newEqualExpr(newExpression(regionID), equalOp) - noQuotesRegionEQRegion.AppendChild(newExpression(regionNoQuotesLit)) - - credEQExpr := newEqualExpr(newExpression(credentialID), equalOp) - credEQExpr.AppendChild(newExpression(ec2MetadataLit)) - - outputEQExpr := newEqualExpr(newExpression(outputID), equalOp) - outputEQExpr.AppendChild(newExpression(outputLit)) - - sepInValueExpr := newEqualExpr(newExpression(sepInValueID), equalOp) - sepInValueExpr.AppendChild(newExpression(sepInValueLit)) - - nestedEQExpr := newEqualExpr(newExpression(s3ServiceID), equalOp) - nestedEQExpr.AppendChild(newExpression(nestedParamsLit)) - - cases := []struct { - name string - r io.Reader - expectedStack []AST - expectedError bool - }{ - { - name: "semicolon comment", - r: bytes.NewBuffer([]byte(`;foo`)), - expectedStack: []AST{ - newCommentStatement(newToken(TokenComment, []rune(";foo"), NoneType)), - }, - }, - { - name: "0==0", - r: bytes.NewBuffer([]byte(`0==0`)), - expectedStack: []AST{ - func() AST { - equalExpr := newEqualExpr(newExpression(newToken(TokenLit, []rune("0"), StringType)), equalOp) - equalExpr.AppendChild(newExpression(newToken(TokenOp, []rune("=0"), StringType))) - return newExprStatement(equalExpr) - }(), - }, - }, - { - name: "0=:0", - r: bytes.NewBuffer([]byte(`0=:0`)), - expectedStack: []AST{ - func() AST { - equalExpr := newEqualExpr(newExpression(newToken(TokenLit, []rune("0"), StringType)), equalOp) - equalExpr.AppendChild(newExpression(newToken(TokenOp, []rune(":0"), StringType))) - return newExprStatement(equalExpr) - }(), - }, - }, - { - name: "0:=0", - r: bytes.NewBuffer([]byte(`0:=0`)), - expectedStack: []AST{ - func() AST { - equalExpr := newEqualExpr(newExpression(newToken(TokenLit, []rune("0"), StringType)), equalColonOp) - equalExpr.AppendChild(newExpression(newToken(TokenOp, []rune("=0"), StringType))) - return newExprStatement(equalExpr) - }(), - }, - }, - { - name: "0::0", - r: bytes.NewBuffer([]byte(`0::0`)), - expectedStack: []AST{ - func() AST { - equalExpr := newEqualExpr(newExpression(newToken(TokenLit, []rune("0"), StringType)), equalColonOp) - equalExpr.AppendChild(newExpression(newToken(TokenOp, []rune(":0"), StringType))) - return newExprStatement(equalExpr) - }(), - }, - }, - { - name: "section with variable", - r: bytes.NewBuffer([]byte(`[ default ]x`)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - newExpression(xID), - }, - }, - { - name: "# comment", - r: bytes.NewBuffer([]byte(`# foo`)), - expectedStack: []AST{ - newCommentStatement(newToken(TokenComment, []rune("# foo"), NoneType)), - }, - }, - { - name: "// not a comment", - r: bytes.NewBuffer([]byte(`//foo`)), - expectedStack: []AST{ - fooSlashesExpr, - }, - }, - { - name: "multiple comments", - r: bytes.NewBuffer([]byte(`;foo - # baz - `)), - expectedStack: []AST{ - newCommentStatement(newToken(TokenComment, []rune(";foo"), NoneType)), - newCommentStatement(newToken(TokenComment, []rune("# baz"), NoneType)), - }, - }, - { - name: "comment followed by skip state", - r: bytes.NewBuffer([]byte(`;foo - //foo - # baz - `)), - expectedStack: []AST{ - newCommentStatement(newToken(TokenComment, []rune(";foo"), NoneType)), - }, - }, - { - name: "assignment", - r: bytes.NewBuffer([]byte(`x = 1234`)), - expectedStack: []AST{ - newExprStatement(xEQ1234), - }, - }, - { - name: "assignment spaceless", - r: bytes.NewBuffer([]byte(`x=1234`)), - expectedStack: []AST{ - newExprStatement(xEQ1234), - }, - }, - { - name: "assignment :", - r: bytes.NewBuffer([]byte(`x : 1234`)), - expectedStack: []AST{ - newExprStatement(xEQColon1234), - }, - }, - { - name: "assignment : no spaces", - r: bytes.NewBuffer([]byte(`x:1234`)), - expectedStack: []AST{ - newExprStatement(xEQColon1234), - }, - }, - { - name: "section expression", - r: bytes.NewBuffer([]byte(`[ default ]`)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - }, - }, - { - name: "section expression no spaces", - r: bytes.NewBuffer([]byte(`[default]`)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - }, - }, - { - name: "section statement", - r: bytes.NewBuffer([]byte( - `[default] -region="us-west-2"`)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - newExprStatement(regionEQRegion), - }, - }, - { - name: "complex section statement", - r: bytes.NewBuffer([]byte( - `[default] -region = us-west-2 -credential_source = Ec2InstanceMetadata -output = json - -[assumerole] -output = json -region = us-west-2`)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - newExprStatement(noQuotesRegionEQRegion), - newExprStatement(credEQExpr), - newExprStatement(outputEQExpr), - newCompletedSectionStatement( - assumeProfileStmt, - ), - newExprStatement(outputEQExpr), - newExprStatement(noQuotesRegionEQRegion), - }, - }, - { - name: "complex section statement with nested params", - r: bytes.NewBuffer([]byte(`[default] -s3 = - foo=bar - bar=baz -region = us-west-2 -credential_source = Ec2InstanceMetadata -output = json - -[assumerole] -output = json -region = us-west-2 - `)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - newExprStatement(nestedEQExpr), - newExprStatement(noQuotesRegionEQRegion), - newExprStatement(credEQExpr), - newExprStatement(outputEQExpr), - newCompletedSectionStatement( - assumeProfileStmt, - ), - newExprStatement(outputEQExpr), - newExprStatement(noQuotesRegionEQRegion), - }, - }, - { - name: "complex section statement", - r: bytes.NewBuffer([]byte(`[default] -region = us-west-2 -credential_source = Ec2InstanceMetadata -s3 = - foo=bar - bar=baz -output = json - -[assumerole] -output = json -region = us-west-2 - `)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - newExprStatement(noQuotesRegionEQRegion), - newExprStatement(credEQExpr), - newExprStatement(nestedEQExpr), - newExprStatement(outputEQExpr), - newCompletedSectionStatement( - assumeProfileStmt, - ), - newExprStatement(outputEQExpr), - newExprStatement(noQuotesRegionEQRegion), - }, - }, - { - name: "missing section statement", - r: bytes.NewBuffer([]byte( - `[default] -s3 = -[assumerole] -output = json - `)), - expectedStack: []AST{ - newCompletedSectionStatement( - defaultProfileStmt, - ), - newSkipStatement(newEqualExpr(newExpression(s3ID), equalOp)), - newCompletedSectionStatement( - assumeProfileStmt, - ), - newExprStatement(outputEQExpr), - }, - }, - { - name: "token seperators [ and ] in values", - r: bytes.NewBuffer([]byte( - `[default] -sepInValue = =:[foo]]bar[ -output = json -[assumerole] -sepInValue==:[foo]]bar[ -output = json -`)), - expectedStack: []AST{ - newCompletedSectionStatement(defaultProfileStmt), - newExprStatement(sepInValueExpr), - newExprStatement(outputEQExpr), - newCompletedSectionStatement(assumeProfileStmt), - newExprStatement(sepInValueExpr), - newExprStatement(outputEQExpr), - }, - }, - } - - for i, c := range cases { - t.Run(c.name, func(t *testing.T) { - stack, err := ParseAST(c.r) - - if e, a := c.expectedError, err != nil; e != a { - t.Errorf("%d: expected %t, but received %t with error %v", i, e, a, err) - } - - if e, a := len(c.expectedStack), len(stack); e != a { - t.Errorf("expected same length %d, but received %d", e, a) - } - - if e, a := c.expectedStack, stack; !reflect.DeepEqual(e, a) { - buf := bytes.Buffer{} - buf.WriteString("expected:\n") - for j := 0; j < len(e); j++ { - buf.WriteString(fmt.Sprintf("\t%d: %v\n", j, e[j])) - } - - buf.WriteString("\nreceived:\n") - for j := 0; j < len(a); j++ { - buf.WriteString(fmt.Sprintf("\t%d: %v\n", j, a[j])) - } - - t.Errorf("%s", buf.String()) - } - }) - } -} diff --git a/internal/ini/walker_test.go b/internal/ini/ini_test.go similarity index 50% rename from internal/ini/walker_test.go rename to internal/ini/ini_test.go index 87cef0b5419..92c7e2007bd 100644 --- a/internal/ini/walker_test.go +++ b/internal/ini/ini_test.go @@ -36,17 +36,11 @@ func TestValidDataFiles(t *testing.T) { } }() - tree, err := ParseAST(f) + v, err := Parse(f, path) if err != nil { t.Errorf("%s: unexpected parse error, %v", path, err) } - v := NewDefaultVisitor(path) - err = Walk(tree, v) - if err != nil { - t.Errorf("%s: unexpected walk error, %v", path, err) - } - expectedPath := path + "_expected" e := map[string]interface{}{} @@ -62,7 +56,7 @@ func TestValidDataFiles(t *testing.T) { } for profile, tableIface := range e { - p, ok := v.Sections.GetSection(profile) + p, ok := v.GetSection(profile) if !ok { t.Fatal("could not find profile " + profile) } @@ -75,7 +69,7 @@ func TestValidDataFiles(t *testing.T) { if p.values[k].mp != nil { a = fmt.Sprintf("%v", p.values[k].mp) } else { - a = p.String(k) + a = p.values[k].str } if e != a { t.Errorf("%s: expected %v, but received %v for profile %v", path, e, a, profile) @@ -92,68 +86,3 @@ func TestValidDataFiles(t *testing.T) { t.Fatalf("Error while walking the file tree rooted at root, %d", err) } } - -func TestInvalidDataFiles(t *testing.T) { - cases := []struct { - path string - expectedParseError bool - expectedWalkError bool - }{ - { - path: "./testdata/invalid/bad_syntax_1", - expectedParseError: true, - }, - { - path: "./testdata/invalid/bad_syntax_2", - expectedParseError: true, - }, - { - path: "./testdata/invalid/incomplete_section_profile", - expectedParseError: true, - }, - { - path: "./testdata/invalid/syntax_error_comment", - expectedParseError: true, - }, - { - path: "./testdata/invalid/invalid_keys", - expectedParseError: true, - }, - { - path: "./testdata/invalid/bad_section_name", - expectedParseError: true, - }, - } - - for i, c := range cases { - t.Run(c.path, func(t *testing.T) { - f, err := os.Open(c.path) - if err != nil { - t.Errorf("unexpected error, %v", err) - } - defer func() { - closeErr := f.Close() - if closeErr != nil { - t.Errorf("unexpected file close error: %v", closeErr) - } - }() - - tree, err := ParseAST(f) - if err != nil && !c.expectedParseError { - t.Errorf("%d: unexpected error, %v", i+1, err) - } else if err == nil && c.expectedParseError { - t.Errorf("%d: expected error, but received none", i+1) - } - - if c.expectedParseError { - return - } - - v := NewDefaultVisitor(c.path) - err = Walk(tree, v) - if err == nil && c.expectedWalkError { - t.Errorf("%d: expected error, but received none", i+1) - } - }) - } -} diff --git a/internal/ini/ini_trim_spaces_test.go b/internal/ini/ini_trim_spaces_test.go deleted file mode 100644 index 2b8005ad42e..00000000000 --- a/internal/ini/ini_trim_spaces_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package ini - -import ( - "reflect" - "testing" -) - -func TestTrimSpaces(t *testing.T) { - cases := []struct { - name string - node AST - expectedNode AST - }{ - { - name: "simple case", - node: AST{ - Root: Token{ - raw: []rune("foo"), - }, - }, - expectedNode: AST{ - Root: Token{ - raw: []rune("foo"), - }, - }, - }, - { - name: "LHS case", - node: AST{ - Root: Token{ - raw: []rune(" foo"), - }, - }, - expectedNode: AST{ - Root: Token{ - raw: []rune("foo"), - }, - }, - }, - { - name: "RHS case", - node: AST{ - Root: Token{ - raw: []rune("foo "), - }, - }, - expectedNode: AST{ - Root: Token{ - raw: []rune("foo"), - }, - }, - }, - { - name: "both sides case", - node: AST{ - Root: Token{ - raw: []rune(" foo "), - }, - }, - expectedNode: AST{ - Root: Token{ - raw: []rune("foo"), - }, - }, - }, - } - - for _, c := range cases { - node := trimSpaces(c.node) - - if e, a := c.expectedNode, node; !reflect.DeepEqual(e, a) { - t.Errorf("%s: expected %v, but received %v", c.name, e, a) - } - } -} diff --git a/internal/ini/literal_tokens.go b/internal/ini/literal_tokens.go deleted file mode 100644 index f5a5c9e2e70..00000000000 --- a/internal/ini/literal_tokens.go +++ /dev/null @@ -1,274 +0,0 @@ -package ini - -import ( - "fmt" - "strconv" - "strings" - "unicode" -) - -var ( - runesTrue = []rune("true") - runesFalse = []rune("false") -) - -// isCaselessLitValue is a caseless value comparison, assumes want is already lower-cased for efficiency. -func isCaselessLitValue(want, have []rune) bool { - if len(have) < len(want) { - return false - } - - for i := 0; i < len(want); i++ { - if want[i] != unicode.ToLower(have[i]) { - return false - } - } - - return true -} - -func isValid(b []rune) (bool, int, error) { - if len(b) == 0 { - // TODO: should probably return an error - return false, 0, nil - } - - return isValidRune(b[0]), 1, nil -} - -func isValidRune(r rune) bool { - return r != ':' && r != '=' && r != '[' && r != ']' && r != ' ' && r != '\n' -} - -// ValueType is an enum that will signify what type -// the Value is -type ValueType int - -func (v ValueType) String() string { - switch v { - case NoneType: - return "NONE" - case StringType: - return "STRING" - } - - return "" -} - -// ValueType enums -const ( - NoneType = ValueType(iota) - StringType - QuotedStringType -) - -// Value is a union container -type Value struct { - Type ValueType - raw []rune - - str string - mp map[string]string -} - -func newValue(t ValueType, base int, raw []rune) (Value, error) { - v := Value{ - Type: t, - raw: raw, - } - - switch t { - case StringType: - v.str = string(raw) - if isSubProperty(raw) { - v.mp = v.MapValue() - } - case QuotedStringType: - v.str = string(raw[1 : len(raw)-1]) - } - - return v, nil -} - -// NewStringValue returns a Value type generated using a string input. -func NewStringValue(str string) (Value, error) { - return newValue(StringType, 10, []rune(str)) -} - -func (v Value) String() string { - switch v.Type { - case StringType: - return fmt.Sprintf("string: %s", string(v.raw)) - case QuotedStringType: - return fmt.Sprintf("quoted string: %s", string(v.raw)) - default: - return "union not set" - } -} - -func newLitToken(b []rune) (Token, int, error) { - n := 0 - var err error - - token := Token{} - if b[0] == '"' { - n, err = getStringValue(b) - if err != nil { - return token, n, err - } - token = newToken(TokenLit, b[:n], QuotedStringType) - } else if isSubProperty(b) { - offset := 0 - end, err := getSubProperty(b, offset) - if err != nil { - return token, n, err - } - token = newToken(TokenLit, b[offset:end], StringType) - n = end - } else { - n, err = getValue(b) - token = newToken(TokenLit, b[:n], StringType) - } - - return token, n, err -} - -// replace with slices.Contains when Go 1.21 -// is min supported Go version in the SDK -func containsRune(runes []rune, val rune) bool { - for i := range runes { - if val == runes[i] { - return true - } - } - return false -} - -func isSubProperty(runes []rune) bool { - // needs at least - // (1) newline (2) whitespace (3) literal - if len(runes) < 3 { - return false - } - - // must have an equal expression - if !containsRune(runes, '=') && !containsRune(runes, ':') { - return false - } - - // must start with a new line - if !isNewline(runes) { - return false - } - _, n, err := newNewlineToken(runes) - if err != nil { - return false - } - // whitespace must follow newline - return isWhitespace(runes[n]) -} - -// getSubProperty pulls all subproperties and terminates when -// it hits a newline that is not the start of another subproperty. -// offset allows for removal of leading newline and whitespace -// characters -func getSubProperty(runes []rune, offset int) (int, error) { - for idx, val := range runes[offset:] { - if val == '\n' && !isSubProperty(runes[offset+idx:]) { - return offset + idx, nil - } - } - return offset + len(runes), nil -} - -// MapValue returns a map value for sub properties -func (v Value) MapValue() map[string]string { - newlineParts := strings.Split(string(v.raw), "\n") - mp := make(map[string]string) - for _, part := range newlineParts { - operandParts := strings.Split(part, "=") - if len(operandParts) < 2 { - continue - } - key := strings.TrimSpace(operandParts[0]) - val := strings.TrimSpace(operandParts[1]) - mp[key] = val - } - return mp -} - -// IntValue returns an integer value -func (v Value) IntValue() (int64, bool) { - i, err := strconv.ParseInt(string(v.raw), 0, 64) - if err != nil { - return 0, false - } - return i, true -} - -// FloatValue returns a float value -func (v Value) FloatValue() (float64, bool) { - f, err := strconv.ParseFloat(string(v.raw), 64) - if err != nil { - return 0, false - } - return f, true -} - -// BoolValue returns a bool value -func (v Value) BoolValue() (bool, bool) { - // we don't use ParseBool as it recognizes more than what we've - // historically supported - if isCaselessLitValue(runesTrue, v.raw) { - return true, true - } else if isCaselessLitValue(runesFalse, v.raw) { - return false, true - } - return false, false -} - -func isTrimmable(r rune) bool { - switch r { - case '\n', ' ': - return true - } - return false -} - -// StringValue returns the string value -func (v Value) StringValue() string { - switch v.Type { - - case StringType: - return strings.TrimFunc(string(v.raw), isTrimmable) - case QuotedStringType: - // preserve all characters in the quotes - return string(removeEscapedCharacters(v.raw[1 : len(v.raw)-1])) - default: - return strings.TrimFunc(string(v.raw), isTrimmable) - } -} - -func contains(runes []rune, c rune) bool { - for i := 0; i < len(runes); i++ { - if runes[i] == c { - return true - } - } - - return false -} - -func runeCompare(v1 []rune, v2 []rune) bool { - if len(v1) != len(v2) { - return false - } - - for i := 0; i < len(v1); i++ { - if v1[i] != v2[i] { - return false - } - } - - return true -} diff --git a/internal/ini/literal_tokens_test.go b/internal/ini/literal_tokens_test.go deleted file mode 100644 index 928eac82b8f..00000000000 --- a/internal/ini/literal_tokens_test.go +++ /dev/null @@ -1,142 +0,0 @@ -package ini - -import ( - "reflect" - "testing" -) - -// TODO: test errors -func TestNewLiteralToken(t *testing.T) { - cases := []struct { - name string - b []rune - expectedRead int - expectedToken Token - expectedError bool - }{ - { - name: "numbers", - b: []rune("123"), - expectedRead: 3, - expectedToken: newToken(TokenLit, - []rune("123"), - StringType, - ), - }, - { - name: "decimal", - b: []rune("123.456"), - expectedRead: 7, - expectedToken: newToken(TokenLit, - []rune("123.456"), - StringType, - ), - }, - { - name: "two numbers", - b: []rune("123 456"), - expectedRead: 3, - expectedToken: newToken(TokenLit, - []rune("123"), - StringType, - ), - }, - { - name: "number followed by alpha", - b: []rune("123 abc"), - expectedRead: 3, - expectedToken: newToken(TokenLit, - []rune("123"), - StringType, - ), - }, - { - name: "quoted string followed by number", - b: []rune(`"Hello" 123`), - expectedRead: 7, - expectedToken: newToken(TokenLit, - []rune("Hello"), - QuotedStringType, - ), - }, - { - name: "quoted string", - b: []rune(`"Hello World"`), - expectedRead: 13, - expectedToken: newToken(TokenLit, - []rune("Hello World"), - QuotedStringType, - ), - }, - { - name: "boolean true", - b: []rune("true"), - expectedRead: 4, - expectedToken: newToken(TokenLit, - []rune("true"), - StringType, - ), - }, - { - name: "boolean false", - b: []rune("false"), - expectedRead: 5, - expectedToken: newToken(TokenLit, - []rune("false"), - StringType, - ), - }, - { - name: "utf8 whitespace", - b: []rune("0 0"), - expectedRead: 3, - expectedToken: newToken(TokenLit, - []rune("0"), - StringType, - ), - }, - { - name: "utf8 whitespace expr", - b: []rune("0=0 0"), - expectedRead: 1, - expectedToken: newToken(TokenLit, - []rune("0"), - StringType, - ), - }, - } - - for i, c := range cases { - t.Run(c.name, func(t *testing.T) { - tok, n, err := newLitToken(c.b) - - if e, a := c.expectedToken.ValueType, tok.ValueType; !reflect.DeepEqual(e, a) { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - - if e, a := c.expectedRead, n; e != a { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - - if e, a := c.expectedError, err != nil; e != a { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - }) - } -} - -func TestNewStringValue(t *testing.T) { - const expect = "abc123" - - actual, err := NewStringValue(expect) - if err != nil { - t.Fatalf("expect no error, %v", err) - } - - if e, a := StringType, actual.Type; e != a { - t.Errorf("expect %v type got %v", e, a) - } - if e, a := expect, actual.str; e != a { - t.Errorf("expect %v string got %v", e, a) - } -} diff --git a/internal/ini/newline_token.go b/internal/ini/newline_token.go deleted file mode 100644 index e52ac399f17..00000000000 --- a/internal/ini/newline_token.go +++ /dev/null @@ -1,30 +0,0 @@ -package ini - -func isNewline(b []rune) bool { - if len(b) == 0 { - return false - } - - if b[0] == '\n' { - return true - } - - if len(b) < 2 { - return false - } - - return b[0] == '\r' && b[1] == '\n' -} - -func newNewlineToken(b []rune) (Token, int, error) { - i := 1 - if b[0] == '\r' && isNewline(b[1:]) { - i++ - } - - if !isNewline([]rune(b[:i])) { - return emptyToken, 0, NewParseError("invalid new line token") - } - - return newToken(TokenNL, b[:i], NoneType), i, nil -} diff --git a/internal/ini/op_tokens.go b/internal/ini/op_tokens.go deleted file mode 100644 index 8a84c7cbe08..00000000000 --- a/internal/ini/op_tokens.go +++ /dev/null @@ -1,39 +0,0 @@ -package ini - -import ( - "fmt" -) - -var ( - equalOp = []rune("=") - equalColonOp = []rune(":") -) - -func isOp(b []rune) bool { - if len(b) == 0 { - return false - } - - switch b[0] { - case '=': - return true - case ':': - return true - default: - return false - } -} - -func newOpToken(b []rune) (Token, int, error) { - tok := Token{} - - switch b[0] { - case '=': - tok = newToken(TokenOp, equalOp, NoneType) - case ':': - tok = newToken(TokenOp, equalColonOp, NoneType) - default: - return tok, 0, NewParseError(fmt.Sprintf("unexpected op type, %v", b[0])) - } - return tok, 1, nil -} diff --git a/internal/ini/op_tokens_test.go b/internal/ini/op_tokens_test.go deleted file mode 100644 index b422f6c332a..00000000000 --- a/internal/ini/op_tokens_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package ini - -import ( - "reflect" - "testing" -) - -func TestIsOp(t *testing.T) { - cases := []struct { - b []rune - expected bool - }{ - { - b: []rune(``), - }, - { - b: []rune("123"), - }, - { - b: []rune(`"wee"`), - }, - { - b: []rune("="), - expected: true, - }, - { - b: []rune(":"), - expected: true, - }, - } - - for i, c := range cases { - if e, a := c.expected, isOp(c.b); e != a { - t.Errorf("%d: expected %t, but received %t", i+0, e, a) - } - } -} - -func TestNewOp(t *testing.T) { - cases := []struct { - b []rune - expectedRead int - expectedError bool - expectedToken Token - }{ - { - b: []rune("="), - expectedRead: 1, - expectedToken: newToken(TokenOp, []rune("="), NoneType), - }, - { - b: []rune(":"), - expectedRead: 1, - expectedToken: newToken(TokenOp, []rune(":"), NoneType), - }, - } - - for i, c := range cases { - tok, n, err := newOpToken(c.b) - - if e, a := c.expectedToken, tok; !reflect.DeepEqual(e, a) { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - - if e, a := c.expectedRead, n; e != a { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - - if e, a := c.expectedError, err != nil; e != a { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - } -} diff --git a/internal/ini/parse.go b/internal/ini/parse.go new file mode 100644 index 00000000000..0fcb8ec0763 --- /dev/null +++ b/internal/ini/parse.go @@ -0,0 +1,109 @@ +package ini + +import ( + "fmt" + "strings" +) + +func parse(tokens []lineToken, path string) Sections { + parser := &parser{ + path: path, + sections: NewSections(), + } + parser.parse(tokens) + return parser.sections +} + +type parser struct { + csection, ckey string // current state + path string // source file path + sections Sections // parse result +} + +func (p *parser) parse(tokens []lineToken) { + for _, otok := range tokens { + switch tok := otok.(type) { + case *lineTokenProfile: + p.handleProfile(tok) + case *lineTokenProperty: + p.handleProperty(tok) + case *lineTokenSubProperty: + p.handleSubProperty(tok) + case *lineTokenContinuation: + p.handleContinuation(tok) + } + } +} + +func (p *parser) handleProfile(tok *lineTokenProfile) { + name := tok.Name + if tok.Type != "" { + name = fmt.Sprintf("%s %s", tok.Type, tok.Name) + } + p.ckey = "" + p.csection = name + if _, ok := p.sections.container[name]; !ok { + p.sections.container[name] = NewSection(name) + } +} + +func (p *parser) handleProperty(tok *lineTokenProperty) { + if p.csection == "" { + return // LEGACY: don't error on "global" properties + } + + p.ckey = tok.Key + if _, ok := p.sections.container[p.csection].values[tok.Key]; ok { + section := p.sections.container[p.csection] + section.Logs = append(p.sections.container[p.csection].Logs, + fmt.Sprintf( + "For profile: %v, overriding %v value, with a %v value found in a duplicate profile defined later in the same file %v. \n", + p.csection, tok.Key, tok.Key, p.path, + ), + ) + p.sections.container[p.csection] = section + } + + p.sections.container[p.csection].values[tok.Key] = Value{ + str: tok.Value, + } + p.sections.container[p.csection].SourceFile[tok.Key] = p.path +} + +func (p *parser) handleSubProperty(tok *lineTokenSubProperty) { + if p.csection == "" { + return // LEGACY: don't error on "global" properties + } + + if p.ckey == "" || p.sections.container[p.csection].values[p.ckey].str != "" { + // This is an "orphaned" subproperty, either because it's at + // the beginning of a section or because the last property's + // value isn't empty. Either way we're lenient here and + // "promote" this to a normal property. + p.handleProperty(&lineTokenProperty{ + Key: tok.Key, + Value: strings.TrimSpace(trimComment(tok.Value)), + }) + return + } + + if p.sections.container[p.csection].values[p.ckey].mp == nil { + p.sections.container[p.csection].values[p.ckey] = Value{ + mp: map[string]string{}, + } + } + p.sections.container[p.csection].values[p.ckey].mp[tok.Key] = tok.Value +} + +func (p *parser) handleContinuation(tok *lineTokenContinuation) { + if p.ckey == "" { + return + } + + value, _ := p.sections.container[p.csection].values[p.ckey] + if value.str != "" && value.mp == nil { + value.str = fmt.Sprintf("%s\n%s", value.str, tok.Value) + } + + p.sections.container[p.csection].values[p.ckey] = value +} diff --git a/internal/ini/parse_error.go b/internal/ini/parse_error.go deleted file mode 100644 index 30ae0b8f228..00000000000 --- a/internal/ini/parse_error.go +++ /dev/null @@ -1,19 +0,0 @@ -package ini - -// ParseError is an error which is returned during any part of -// the parsing process. -type ParseError struct { - msg string -} - -// NewParseError will return a new ParseError where message -// is the description of the error. -func NewParseError(message string) *ParseError { - return &ParseError{ - msg: message, - } -} - -func (err *ParseError) Error() string { - return err.msg -} diff --git a/internal/ini/parse_stack.go b/internal/ini/parse_stack.go deleted file mode 100644 index 7f01cf7c703..00000000000 --- a/internal/ini/parse_stack.go +++ /dev/null @@ -1,60 +0,0 @@ -package ini - -import ( - "bytes" - "fmt" -) - -// ParseStack is a stack that contains a container, the stack portion, -// and the list which is the list of ASTs that have been successfully -// parsed. -type ParseStack struct { - top int - container []AST - list []AST - index int -} - -func newParseStack(sizeContainer, sizeList int) ParseStack { - return ParseStack{ - container: make([]AST, sizeContainer), - list: make([]AST, sizeList), - } -} - -// Pop will return and truncate the last container element. -func (s *ParseStack) Pop() AST { - s.top-- - return s.container[s.top] -} - -// Push will add the new AST to the container -func (s *ParseStack) Push(ast AST) { - s.container[s.top] = ast - s.top++ -} - -// MarkComplete will append the AST to the list of completed statements -func (s *ParseStack) MarkComplete(ast AST) { - s.list[s.index] = ast - s.index++ -} - -// List will return the completed statements -func (s ParseStack) List() []AST { - return s.list[:s.index] -} - -// Len will return the length of the container -func (s *ParseStack) Len() int { - return s.top -} - -func (s ParseStack) String() string { - buf := bytes.Buffer{} - for i, node := range s.list { - buf.WriteString(fmt.Sprintf("%d: %v\n", i+1, node)) - } - - return buf.String() -} diff --git a/internal/ini/parse_stack_test.go b/internal/ini/parse_stack_test.go deleted file mode 100644 index af3d999989d..00000000000 --- a/internal/ini/parse_stack_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package ini - -import ( - "reflect" - "testing" -) - -func newMockAST(v []rune) AST { - return newASTWithRootToken(ASTKindNone, Token{raw: v}) -} - -func TestStack(t *testing.T) { - cases := []struct { - asts []AST - expected []AST - }{ - { - asts: []AST{ - newMockAST([]rune("0")), - newMockAST([]rune("1")), - newMockAST([]rune("2")), - newMockAST([]rune("3")), - newMockAST([]rune("4")), - }, - expected: []AST{ - newMockAST([]rune("0")), - newMockAST([]rune("1")), - newMockAST([]rune("2")), - newMockAST([]rune("3")), - newMockAST([]rune("4")), - }, - }, - } - - for _, c := range cases { - p := newParseStack(10, 10) - for _, ast := range c.asts { - p.Push(ast) - p.MarkComplete(ast) - } - - if e, a := len(c.expected), p.Len(); e != a { - t.Errorf("expected the same legnth with %d, but received %d", e, a) - } - for i := len(c.expected) - 1; i >= 0; i-- { - if e, a := c.expected[i], p.Pop(); !reflect.DeepEqual(e, a) { - t.Errorf("stack element %d invalid: expected %v, but received %v", i, e, a) - } - } - - if e, a := len(c.expected), p.index; e != a { - t.Errorf("expected %d, but received %d", e, a) - } - - if e, a := c.asts, p.list[:p.index]; !reflect.DeepEqual(e, a) { - t.Errorf("expected %v, but received %v", e, a) - } - } -} diff --git a/internal/ini/sections.go b/internal/ini/sections.go new file mode 100644 index 00000000000..dd89848e696 --- /dev/null +++ b/internal/ini/sections.go @@ -0,0 +1,157 @@ +package ini + +import ( + "sort" +) + +// Sections is a map of Section structures that represent +// a configuration. +type Sections struct { + container map[string]Section +} + +// NewSections returns empty ini Sections +func NewSections() Sections { + return Sections{ + container: make(map[string]Section, 0), + } +} + +// GetSection will return section p. If section p does not exist, +// false will be returned in the second parameter. +func (t Sections) GetSection(p string) (Section, bool) { + v, ok := t.container[p] + return v, ok +} + +// HasSection denotes if Sections consist of a section with +// provided name. +func (t Sections) HasSection(p string) bool { + _, ok := t.container[p] + return ok +} + +// SetSection sets a section value for provided section name. +func (t Sections) SetSection(p string, v Section) Sections { + t.container[p] = v + return t +} + +// DeleteSection deletes a section entry/value for provided section name./ +func (t Sections) DeleteSection(p string) { + delete(t.container, p) +} + +// values represents a map of union values. +type values map[string]Value + +// List will return a list of all sections that were successfully +// parsed. +func (t Sections) List() []string { + keys := make([]string, len(t.container)) + i := 0 + for k := range t.container { + keys[i] = k + i++ + } + + sort.Strings(keys) + return keys +} + +// Section contains a name and values. This represent +// a sectioned entry in a configuration file. +type Section struct { + // Name is the Section profile name + Name string + + // values are the values within parsed profile + values values + + // Errors is the list of errors + Errors []error + + // Logs is the list of logs + Logs []string + + // SourceFile is the INI Source file from where this section + // was retrieved. They key is the property, value is the + // source file the property was retrieved from. + SourceFile map[string]string +} + +// NewSection returns an initialize section for the name +func NewSection(name string) Section { + return Section{ + Name: name, + values: values{}, + SourceFile: map[string]string{}, + } +} + +// List will return a list of all +// services in values +func (t Section) List() []string { + keys := make([]string, len(t.values)) + i := 0 + for k := range t.values { + keys[i] = k + i++ + } + + sort.Strings(keys) + return keys +} + +// UpdateSourceFile updates source file for a property to provided filepath. +func (t Section) UpdateSourceFile(property string, filepath string) { + t.SourceFile[property] = filepath +} + +// UpdateValue updates value for a provided key with provided value +func (t Section) UpdateValue(k string, v Value) error { + t.values[k] = v + return nil +} + +// Has will return whether or not an entry exists in a given section +func (t Section) Has(k string) bool { + _, ok := t.values[k] + return ok +} + +// ValueType will returned what type the union is set to. If +// k was not found, the NoneType will be returned. +func (t Section) ValueType(k string) (ValueType, bool) { + v, ok := t.values[k] + return v.Type, ok +} + +// Bool returns a bool value at k +func (t Section) Bool(k string) (bool, bool) { + return t.values[k].BoolValue() +} + +// Int returns an integer value at k +func (t Section) Int(k string) (int64, bool) { + return t.values[k].IntValue() +} + +// Map returns a map value at k +func (t Section) Map(k string) map[string]string { + return t.values[k].MapValue() +} + +// Float64 returns a float value at k +func (t Section) Float64(k string) (float64, bool) { + return t.values[k].FloatValue() +} + +// String returns the string value at k +func (t Section) String(k string) string { + _, ok := t.values[k] + if !ok { + return "" + } + return t.values[k].StringValue() +} diff --git a/internal/ini/sep_tokens.go b/internal/ini/sep_tokens.go deleted file mode 100644 index f82095ba259..00000000000 --- a/internal/ini/sep_tokens.go +++ /dev/null @@ -1,41 +0,0 @@ -package ini - -import ( - "fmt" -) - -var ( - emptyRunes = []rune{} -) - -func isSep(b []rune) bool { - if len(b) == 0 { - return false - } - - switch b[0] { - case '[', ']': - return true - default: - return false - } -} - -var ( - openBrace = []rune("[") - closeBrace = []rune("]") -) - -func newSepToken(b []rune) (Token, int, error) { - tok := Token{} - - switch b[0] { - case '[': - tok = newToken(TokenSep, openBrace, NoneType) - case ']': - tok = newToken(TokenSep, closeBrace, NoneType) - default: - return tok, 0, NewParseError(fmt.Sprintf("unexpected sep type, %v", b[0])) - } - return tok, 1, nil -} diff --git a/internal/ini/sep_tokens_test.go b/internal/ini/sep_tokens_test.go deleted file mode 100644 index 24094c3c966..00000000000 --- a/internal/ini/sep_tokens_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package ini - -import ( - "reflect" - "testing" -) - -func TestIsSep(t *testing.T) { - cases := []struct { - b []rune - expected bool - }{ - { - b: []rune(``), - }, - { - b: []rune(`"wee"`), - }, - { - b: []rune("["), - expected: true, - }, - { - b: []rune("]"), - expected: true, - }, - } - - for i, c := range cases { - if e, a := c.expected, isSep(c.b); e != a { - t.Errorf("%d: expected %t, but received %t", i+0, e, a) - } - } -} - -func TestNewSep(t *testing.T) { - cases := []struct { - b []rune - expectedRead int - expectedError bool - expectedToken Token - }{ - { - b: []rune("["), - expectedRead: 1, - expectedToken: newToken(TokenSep, []rune("["), NoneType), - }, - { - b: []rune("]"), - expectedRead: 1, - expectedToken: newToken(TokenSep, []rune("]"), NoneType), - }, - } - - for i, c := range cases { - tok, n, err := newSepToken(c.b) - - if e, a := c.expectedToken, tok; !reflect.DeepEqual(e, a) { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - - if e, a := c.expectedRead, n; e != a { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - - if e, a := c.expectedError, err != nil; e != a { - t.Errorf("%d: expected %v, but received %v", i+1, e, a) - } - } -} diff --git a/internal/ini/skipper.go b/internal/ini/skipper.go deleted file mode 100644 index 07e90876a4a..00000000000 --- a/internal/ini/skipper.go +++ /dev/null @@ -1,45 +0,0 @@ -package ini - -// skipper is used to skip certain blocks of an ini file. -// Currently skipper is used to skip nested blocks of ini -// files. See example below -// -// [ foo ] -// nested = ; this section will be skipped -// a=b -// c=d -// bar=baz ; this will be included -type skipper struct { - shouldSkip bool - TokenSet bool - prevTok Token -} - -func newSkipper() skipper { - return skipper{ - prevTok: emptyToken, - } -} - -func (s *skipper) ShouldSkip(tok Token) bool { - // should skip state will be modified only if previous token was new line (NL); - // and the current token is not WhiteSpace (WS). - if s.shouldSkip && - s.prevTok.Type() == TokenNL && - tok.Type() != TokenWS { - s.Continue() - return false - } - - s.prevTok = tok - return s.shouldSkip -} - -func (s *skipper) Skip() { - s.shouldSkip = true -} - -func (s *skipper) Continue() { - s.shouldSkip = false - s.prevTok = emptyToken -} diff --git a/internal/ini/skipper_test.go b/internal/ini/skipper_test.go deleted file mode 100644 index 82f6a712c93..00000000000 --- a/internal/ini/skipper_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package ini - -import ( - "reflect" - "testing" -) - -func TestSkipper(t *testing.T) { - idTok, _, _ := newLitToken([]rune("id")) - nlTok := newToken(TokenNL, []rune("\n"), NoneType) - - cases := []struct { - name string - Fn func(s *skipper) - param Token - expected bool - expectedShouldSkip bool - expectedPrevTok Token - }{ - { - name: "empty case", - Fn: func(s *skipper) { - }, - param: emptyToken, - expectedPrevTok: emptyToken, - }, - { - name: "skip case", - Fn: func(s *skipper) { - s.Skip() - }, - param: idTok, - expectedShouldSkip: true, - expected: true, - expectedPrevTok: emptyToken, - }, - { - name: "continue case", - Fn: func(s *skipper) { - s.Continue() - }, - param: emptyToken, - expectedPrevTok: emptyToken, - }, - { - name: "skip then continue case", - Fn: func(s *skipper) { - s.Skip() - s.Continue() - }, - param: emptyToken, - expectedPrevTok: emptyToken, - }, - { - name: "do not skip case", - Fn: func(s *skipper) { - s.Skip() - s.prevTok = nlTok - }, - param: idTok, - expectedShouldSkip: true, - expectedPrevTok: nlTok, - }, - } - - for _, c := range cases { - - t.Run(c.name, func(t *testing.T) { - s := newSkipper() - c.Fn(&s) - - if e, a := c.expectedShouldSkip, s.shouldSkip; e != a { - t.Errorf("%s: expected %t, but received %t", c.name, e, a) - } - - if e, a := c.expectedPrevTok, s.prevTok; !reflect.DeepEqual(e, a) { - t.Errorf("%s: expected %v, but received %v", c.name, e, a) - } - - if e, a := c.expected, s.ShouldSkip(c.param); e != a { - t.Errorf("%s: expected %t, but received %t", c.name, e, a) - } - }) - } -} diff --git a/internal/ini/statement.go b/internal/ini/statement.go deleted file mode 100644 index ba0af01b53b..00000000000 --- a/internal/ini/statement.go +++ /dev/null @@ -1,35 +0,0 @@ -package ini - -// Statement is an empty AST mostly used for transitioning states. -func newStatement() AST { - return newAST(ASTKindStatement, AST{}) -} - -// SectionStatement represents a section AST -func newSectionStatement(tok Token) AST { - return newASTWithRootToken(ASTKindSectionStatement, tok) -} - -// ExprStatement represents a completed expression AST -func newExprStatement(ast AST) AST { - return newAST(ASTKindExprStatement, ast) -} - -// CommentStatement represents a comment in the ini defintion. -// -// grammar: -// comment -> #comment' | ;comment' -// comment' -> epsilon | value -func newCommentStatement(tok Token) AST { - return newAST(ASTKindCommentStatement, newExpression(tok)) -} - -// CompletedSectionStatement represents a completed section -func newCompletedSectionStatement(ast AST) AST { - return newAST(ASTKindCompletedSectionStatement, ast) -} - -// SkipStatement is used to skip whole statements -func newSkipStatement(ast AST) AST { - return newAST(ASTKindSkipStatement, ast) -} diff --git a/internal/ini/strings.go b/internal/ini/strings.go new file mode 100644 index 00000000000..478239a2505 --- /dev/null +++ b/internal/ini/strings.go @@ -0,0 +1,83 @@ +package ini + +import "strings" + +func trimComment(v string) string { + rest, _, _ := strings.Cut(v, "#") + rest, _, _ = strings.Cut(rest, ";") + return rest +} + +// assumes no surrounding comment +func splitProperty(s string) (string, string, bool) { + equalsi := strings.Index(s, "=") + coloni := strings.Index(s, ":") // LEGACY: also supported for property assignment + sep := "=" + if equalsi == -1 || coloni != -1 && coloni < equalsi { + sep = ":" + } + + k, v, ok := strings.Cut(s, sep) + if !ok { + return "", "", false + } + return strings.TrimSpace(k), strings.TrimSpace(v), true +} + +// assumes no surrounding comment, whitespace, or profile brackets +func splitProfile(s string) (string, string) { + var first int + for i, r := range s { + if isLineSpace(r) { + if first == 0 { + first = i + } + } else { + if first != 0 { + return s[:first], s[i:] + } + } + } + if first == 0 { + return "", s // type component is effectively blank + } + return "", "" +} + +func isLineSpace(r rune) bool { + return r == ' ' || r == '\t' +} + +func unquote(s string) string { + if isSingleQuoted(s) || isDoubleQuoted(s) { + return s[1 : len(s)-1] + } + return s +} + +// applies various legacy conversions to property values: +// - remote wrapping single/doublequotes +// - expand escaped quote and newline sequences +func legacyStrconv(s string) string { + s = unquote(s) + s = strings.ReplaceAll(s, `\"`, `"`) + s = strings.ReplaceAll(s, `\'`, `'`) + s = strings.ReplaceAll(s, `\n`, "\n") + return s +} + +func isSingleQuoted(s string) bool { + return hasAffixes(s, "'", "'") +} + +func isDoubleQuoted(s string) bool { + return hasAffixes(s, `"`, `"`) +} + +func isBracketed(s string) bool { + return hasAffixes(s, "[", "]") +} + +func hasAffixes(s, left, right string) bool { + return strings.HasPrefix(s, left) && strings.HasSuffix(s, right) +} diff --git a/internal/ini/testdata/invalid/bad_section_name b/internal/ini/testdata/invalid/bad_section_name deleted file mode 100644 index ac64f5dd685..00000000000 --- a/internal/ini/testdata/invalid/bad_section_name +++ /dev/null @@ -1 +0,0 @@ -[ :=foo ] diff --git a/internal/ini/testdata/invalid/bad_syntax_1 b/internal/ini/testdata/invalid/bad_syntax_1 deleted file mode 100644 index 6416ab898aa..00000000000 --- a/internal/ini/testdata/invalid/bad_syntax_1 +++ /dev/null @@ -1 +0,0 @@ -[[ foo ] diff --git a/internal/ini/testdata/invalid/bad_syntax_2 b/internal/ini/testdata/invalid/bad_syntax_2 deleted file mode 100644 index 4d3de17351b..00000000000 --- a/internal/ini/testdata/invalid/bad_syntax_2 +++ /dev/null @@ -1 +0,0 @@ -[ foo ]] diff --git a/internal/ini/testdata/invalid/incomplete_section_profile b/internal/ini/testdata/invalid/incomplete_section_profile deleted file mode 100644 index f65d472ca69..00000000000 --- a/internal/ini/testdata/invalid/incomplete_section_profile +++ /dev/null @@ -1 +0,0 @@ -[ default diff --git a/internal/ini/testdata/invalid/invalid_keys b/internal/ini/testdata/invalid/invalid_keys deleted file mode 100644 index b4978931da9..00000000000 --- a/internal/ini/testdata/invalid/invalid_keys +++ /dev/null @@ -1,2 +0,0 @@ -[assumerole] -key[id] = value diff --git a/internal/ini/testdata/invalid/syntax_error_comment b/internal/ini/testdata/invalid/syntax_error_comment deleted file mode 100644 index 4d55ec3378c..00000000000 --- a/internal/ini/testdata/invalid/syntax_error_comment +++ /dev/null @@ -1 +0,0 @@ -[ default #] diff --git a/internal/ini/testdata/valid/nested_fields b/internal/ini/testdata/valid/nested_fields index b8c7380f77c..365259508c0 100644 --- a/internal/ini/testdata/valid/nested_fields +++ b/internal/ini/testdata/valid/nested_fields @@ -1,12 +1,17 @@ [foo] aws_access_key_id = - aws_secret_access_key = valid -aws_session_token = valid -[bar] -aws_access_key_id = valid -aws_secret_access_key = valid + aws_secret_access_key = valid;comment + aws_secret_access_key2 = valid2 aws_session_token = valid + [bar] + aws_access_key_id = valid ; comment + aws_secret_access_key = valid + aws_session_token = valid +not_nested = i +mp = + a = b + b=c [baz] -aws_access_key_id = valid -aws_secret_access_key = valid + aws_access_key_id = + aws_secret_access_key = valid aws_session_token = valid diff --git a/internal/ini/testdata/valid/nested_fields_expected b/internal/ini/testdata/valid/nested_fields_expected index c51a24cbdea..f4c4e4cf532 100644 --- a/internal/ini/testdata/valid/nested_fields_expected +++ b/internal/ini/testdata/valid/nested_fields_expected @@ -1,16 +1,17 @@ { "foo": { - "aws_access_key_id": "map[aws_secret_access_key:valid]", + "aws_access_key_id": "map[aws_secret_access_key:valid;comment aws_secret_access_key2:valid2]", "aws_session_token": "valid" }, "bar": { "aws_access_key_id": "valid", "aws_secret_access_key": "valid", - "aws_session_token": "valid" + "aws_session_token": "valid", + "not_nested": "i", + "mp": "map[a:b b:c]" }, "baz": { - "aws_access_key_id": "valid", - "aws_secret_access_key": "valid", + "aws_access_key_id": "map[aws_secret_access_key:valid]", "aws_session_token": "valid" } -} \ No newline at end of file +} diff --git a/internal/ini/testdata/valid/op_sep_in_values_expected b/internal/ini/testdata/valid/op_sep_in_values_expected index 7a85944b1ad..a327bab516c 100644 --- a/internal/ini/testdata/valid/op_sep_in_values_expected +++ b/internal/ini/testdata/valid/op_sep_in_values_expected @@ -19,7 +19,9 @@ "key": "value5" }, "case6": { - "s3": "map[key:valuen6]", + "s3": "" + }, + "nested6": { "key": "=value6" }, "case7": { diff --git a/internal/ini/token.go b/internal/ini/token.go new file mode 100644 index 00000000000..6e9a03744e0 --- /dev/null +++ b/internal/ini/token.go @@ -0,0 +1,32 @@ +package ini + +type lineToken interface { + isLineToken() +} + +type lineTokenProfile struct { + Type string + Name string +} + +func (*lineTokenProfile) isLineToken() {} + +type lineTokenProperty struct { + Key string + Value string +} + +func (*lineTokenProperty) isLineToken() {} + +type lineTokenContinuation struct { + Value string +} + +func (*lineTokenContinuation) isLineToken() {} + +type lineTokenSubProperty struct { + Key string + Value string +} + +func (*lineTokenSubProperty) isLineToken() {} diff --git a/internal/ini/tokenize.go b/internal/ini/tokenize.go new file mode 100644 index 00000000000..9778a1738b3 --- /dev/null +++ b/internal/ini/tokenize.go @@ -0,0 +1,91 @@ +package ini + +import ( + "strings" +) + +func tokenize(lines []string) ([]lineToken, error) { + tokens := make([]lineToken, 0, len(lines)) + for _, line := range lines { + if len(strings.TrimSpace(line)) == 0 || isLineComment(line) { + continue + } + + if tok := asProfile(line); tok != nil { + tokens = append(tokens, tok) + } else if tok := asProperty(line); tok != nil { + tokens = append(tokens, tok) + } else if tok := asSubProperty(line); tok != nil { + tokens = append(tokens, tok) + } else if tok := asContinuation(line); tok != nil { + tokens = append(tokens, tok) + } // unrecognized tokens are effectively ignored + } + return tokens, nil +} + +func isLineComment(line string) bool { + trimmed := strings.TrimLeft(line, " \t") + return strings.HasPrefix(trimmed, "#") || strings.HasPrefix(trimmed, ";") +} + +func asProfile(line string) *lineTokenProfile { // " [ type name ] ; comment" + trimmed := strings.TrimSpace(trimComment(line)) // "[ type name ]" + if !isBracketed(trimmed) { + return nil + } + trimmed = trimmed[1 : len(trimmed)-1] // " type name " (or just " name ") + trimmed = strings.TrimSpace(trimmed) // "type name" / "name" + typ, name := splitProfile(trimmed) + return &lineTokenProfile{ + Type: typ, + Name: name, + } +} + +func asProperty(line string) *lineTokenProperty { + if isLineSpace(rune(line[0])) { + return nil + } + + trimmed := strings.TrimRight(trimComment(line), " \t") + k, v, ok := splitProperty(trimmed) + if !ok { + return nil + } + + return &lineTokenProperty{ + Key: strings.ToLower(k), // LEGACY: normalize key case + Value: legacyStrconv(v), // LEGACY: see func docs + } +} + +func asSubProperty(line string) *lineTokenSubProperty { + if !isLineSpace(rune(line[0])) { + return nil + } + + // comments on sub-properties are included in the value + trimmed := strings.TrimLeft(line, " \t") + k, v, ok := splitProperty(trimmed) + if !ok { + return nil + } + + return &lineTokenSubProperty{ // same LEGACY constraints as in normal property + Key: strings.ToLower(k), + Value: legacyStrconv(v), + } +} + +func asContinuation(line string) *lineTokenContinuation { + if !isLineSpace(rune(line[0])) { + return nil + } + + // includes comments like sub-properties + trimmed := strings.TrimLeft(line, " \t") + return &lineTokenContinuation{ + Value: trimmed, + } +} diff --git a/internal/ini/value.go b/internal/ini/value.go new file mode 100644 index 00000000000..ade75bf34e4 --- /dev/null +++ b/internal/ini/value.go @@ -0,0 +1,104 @@ +package ini + +import ( + "fmt" + "strconv" + "strings" +) + +// ValueType is an enum that will signify what type +// the Value is +type ValueType int + +func (v ValueType) String() string { + switch v { + case NoneType: + return "NONE" + case StringType: + return "STRING" + } + + return "" +} + +// ValueType enums +const ( + NoneType = ValueType(iota) + StringType + QuotedStringType +) + +// Value is a union container +type Value struct { + Type ValueType + + str string + mp map[string]string +} + +// NewStringValue returns a Value type generated using a string input. +func NewStringValue(str string) (Value, error) { + return Value{str: str}, nil +} + +func (v Value) String() string { + switch v.Type { + case StringType: + return fmt.Sprintf("string: %s", string(v.str)) + case QuotedStringType: + return fmt.Sprintf("quoted string: %s", string(v.str)) + default: + return "union not set" + } +} + +// MapValue returns a map value for sub properties +func (v Value) MapValue() map[string]string { + newlineParts := strings.Split(string(v.str), "\n") + mp := make(map[string]string) + for _, part := range newlineParts { + operandParts := strings.Split(part, "=") + if len(operandParts) < 2 { + continue + } + key := strings.TrimSpace(operandParts[0]) + val := strings.TrimSpace(operandParts[1]) + mp[key] = val + } + return mp +} + +// IntValue returns an integer value +func (v Value) IntValue() (int64, bool) { + i, err := strconv.ParseInt(string(v.str), 0, 64) + if err != nil { + return 0, false + } + return i, true +} + +// FloatValue returns a float value +func (v Value) FloatValue() (float64, bool) { + f, err := strconv.ParseFloat(string(v.str), 64) + if err != nil { + return 0, false + } + return f, true +} + +// BoolValue returns a bool value +func (v Value) BoolValue() (bool, bool) { + // we don't use ParseBool as it recognizes more than what we've + // historically supported + if strings.EqualFold(v.str, "true") { + return true, true + } else if strings.EqualFold(v.str, "false") { + return false, true + } + return false, false +} + +// StringValue returns the string value +func (v Value) StringValue() string { + return v.str +} diff --git a/internal/ini/value_util.go b/internal/ini/value_util.go deleted file mode 100644 index d38a9cd4857..00000000000 --- a/internal/ini/value_util.go +++ /dev/null @@ -1,123 +0,0 @@ -package ini - -import ( - "fmt" -) - -// getStringValue will return a quoted string and the amount -// of bytes read -// -// an error will be returned if the string is not properly formatted -func getStringValue(b []rune) (int, error) { - if b[0] != '"' { - return 0, NewParseError("strings must start with '\"'") - } - - endQuote := false - i := 1 - - for ; i < len(b) && !endQuote; i++ { - if escaped := isEscaped(b[:i], b[i]); b[i] == '"' && !escaped { - endQuote = true - break - } else if escaped { - /*c, err := getEscapedByte(b[i]) - if err != nil { - return 0, err - } - - b[i-1] = c - b = append(b[:i], b[i+1:]...) - i--*/ - - continue - } - } - - if !endQuote { - return 0, NewParseError("missing '\"' in string value") - } - - return i + 1, nil -} - -func getValue(b []rune) (int, error) { - i := 0 - - for i < len(b) { - if isNewline(b[i:]) { - break - } - - if isOp(b[i:]) { - break - } - - valid, n, err := isValid(b[i:]) - if err != nil { - return 0, err - } - - if !valid { - break - } - - i += n - } - - return i, nil -} - -// isEscaped will return whether or not the character is an escaped -// character. -func isEscaped(value []rune, b rune) bool { - if len(value) == 0 { - return false - } - - switch b { - case '\'': // single quote - case '"': // quote - case 'n': // newline - case 't': // tab - case '\\': // backslash - default: - return false - } - - return value[len(value)-1] == '\\' -} - -func getEscapedByte(b rune) (rune, error) { - switch b { - case '\'': // single quote - return '\'', nil - case '"': // quote - return '"', nil - case 'n': // newline - return '\n', nil - case 't': // table - return '\t', nil - case '\\': // backslash - return '\\', nil - default: - return b, NewParseError(fmt.Sprintf("invalid escaped character %c", b)) - } -} - -func removeEscapedCharacters(b []rune) []rune { - for i := 0; i < len(b); i++ { - if isEscaped(b[:i], b[i]) { - c, err := getEscapedByte(b[i]) - if err != nil { - return b - } - - b[i-1] = c - b = append(b[:i], b[i+1:]...) - i-- - } - } - - return b -} diff --git a/internal/ini/value_util_test.go b/internal/ini/value_util_test.go deleted file mode 100644 index b24aee67618..00000000000 --- a/internal/ini/value_util_test.go +++ /dev/null @@ -1,49 +0,0 @@ -package ini - -import ( - "testing" -) - -func TestStringValue(t *testing.T) { - cases := []struct { - b []rune - expectedRead int - expectedError bool - expectedValue string - }{ - { - b: []rune(`"foo"`), - expectedRead: 5, - expectedValue: `"foo"`, - }, - { - b: []rune(`"123 !$_ 456 abc"`), - expectedRead: 17, - expectedValue: `"123 !$_ 456 abc"`, - }, - { - b: []rune("foo"), - expectedError: true, - }, - { - b: []rune(` "foo"`), - expectedError: true, - }, - } - - for i, c := range cases { - n, err := getStringValue(c.b) - - if e, a := c.expectedValue, string(c.b[:n]); e != a { - t.Errorf("%d: expected %v, but received %v", i, e, a) - } - - if e, a := c.expectedRead, n; e != a { - t.Errorf("%d: expected %v, but received %v", i, e, a) - } - - if e, a := c.expectedError, err != nil; e != a { - t.Errorf("%d: expected %v, but received %v", i, e, a) - } - } -} diff --git a/internal/ini/visitor.go b/internal/ini/visitor.go deleted file mode 100644 index c124ad61030..00000000000 --- a/internal/ini/visitor.go +++ /dev/null @@ -1,288 +0,0 @@ -package ini - -import ( - "fmt" - "sort" - "strings" -) - -// Visitor is an interface used by walkers that will -// traverse an array of ASTs. -type Visitor interface { - VisitExpr(AST) error - VisitStatement(AST) error -} - -// DefaultVisitor is used to visit statements and expressions -// and ensure that they are both of the correct format. -// In addition, upon visiting this will build sections and populate -// the Sections field which can be used to retrieve profile -// configuration. -type DefaultVisitor struct { - - // scope is the profile which is being visited - scope string - - // path is the file path which the visitor is visiting - path string - - // Sections defines list of the profile section - Sections Sections -} - -// NewDefaultVisitor returns a DefaultVisitor. It takes in a filepath -// which points to the file it is visiting. -func NewDefaultVisitor(filepath string) *DefaultVisitor { - return &DefaultVisitor{ - Sections: Sections{ - container: map[string]Section{}, - }, - path: filepath, - } -} - -// VisitExpr visits expressions... -func (v *DefaultVisitor) VisitExpr(expr AST) error { - t := v.Sections.container[v.scope] - if t.values == nil { - t.values = values{} - } - if t.SourceFile == nil { - t.SourceFile = make(map[string]string, 0) - } - - switch expr.Kind { - case ASTKindExprStatement: - opExpr := expr.GetRoot() - switch opExpr.Kind { - case ASTKindEqualExpr: - children := opExpr.GetChildren() - if len(children) <= 1 { - return NewParseError("unexpected token type") - } - - rhs := children[1] - - // The right-hand value side the equality expression is allowed to contain '[', ']', ':', '=' in the values. - // If the token is not either a literal or one of the token types that identifies those four additional - // tokens then error. - if !(rhs.Root.Type() == TokenLit || rhs.Root.Type() == TokenOp || rhs.Root.Type() == TokenSep) { - return NewParseError("unexpected token type") - } - - key := EqualExprKey(opExpr) - val, err := newValue(rhs.Root.ValueType, rhs.Root.base, rhs.Root.Raw()) - if err != nil { - return err - } - - // lower case key to standardize - k := strings.ToLower(key) - - // identify if the section already had this key, append log on section - if t.Has(k) { - t.Logs = append(t.Logs, - fmt.Sprintf("For profile: %v, overriding %v value, "+ - "with a %v value found in a duplicate profile defined later in the same file %v. \n", - t.Name, k, k, v.path)) - } - - // assign the value - t.values[k] = val - // update the source file path for region - t.SourceFile[k] = v.path - default: - return NewParseError(fmt.Sprintf("unsupported expression %v", expr)) - } - default: - return NewParseError(fmt.Sprintf("unsupported expression %v", expr)) - } - - v.Sections.container[v.scope] = t - return nil -} - -// VisitStatement visits statements... -func (v *DefaultVisitor) VisitStatement(stmt AST) error { - switch stmt.Kind { - case ASTKindCompletedSectionStatement: - child := stmt.GetRoot() - if child.Kind != ASTKindSectionStatement { - return NewParseError(fmt.Sprintf("unsupported child statement: %T", child)) - } - - name := string(child.Root.Raw()) - - // trim start and end space - name = strings.TrimSpace(name) - - // if has prefix "profile " + [ws+] + "profile-name", - // we standardize by removing the [ws+] between prefix and profile-name. - if strings.HasPrefix(name, "profile ") { - names := strings.SplitN(name, " ", 2) - name = names[0] + " " + strings.TrimLeft(names[1], " ") - } - - // attach profile name on section - if !v.Sections.HasSection(name) { - v.Sections.container[name] = NewSection(name) - } - v.scope = name - default: - return NewParseError(fmt.Sprintf("unsupported statement: %s", stmt.Kind)) - } - - return nil -} - -// Sections is a map of Section structures that represent -// a configuration. -type Sections struct { - container map[string]Section -} - -// NewSections returns empty ini Sections -func NewSections() Sections { - return Sections{ - container: make(map[string]Section, 0), - } -} - -// GetSection will return section p. If section p does not exist, -// false will be returned in the second parameter. -func (t Sections) GetSection(p string) (Section, bool) { - v, ok := t.container[p] - return v, ok -} - -// HasSection denotes if Sections consist of a section with -// provided name. -func (t Sections) HasSection(p string) bool { - _, ok := t.container[p] - return ok -} - -// SetSection sets a section value for provided section name. -func (t Sections) SetSection(p string, v Section) Sections { - t.container[p] = v - return t -} - -// DeleteSection deletes a section entry/value for provided section name./ -func (t Sections) DeleteSection(p string) { - delete(t.container, p) -} - -// values represents a map of union values. -type values map[string]Value - -// List will return a list of all sections that were successfully -// parsed. -func (t Sections) List() []string { - keys := make([]string, len(t.container)) - i := 0 - for k := range t.container { - keys[i] = k - i++ - } - - sort.Strings(keys) - return keys -} - -// Section contains a name and values. This represent -// a sectioned entry in a configuration file. -type Section struct { - // Name is the Section profile name - Name string - - // values are the values within parsed profile - values values - - // Errors is the list of errors - Errors []error - - // Logs is the list of logs - Logs []string - - // SourceFile is the INI Source file from where this section - // was retrieved. They key is the property, value is the - // source file the property was retrieved from. - SourceFile map[string]string -} - -// NewSection returns an initialize section for the name -func NewSection(name string) Section { - return Section{ - Name: name, - values: values{}, - SourceFile: map[string]string{}, - } -} - -// List will return a list of all -// services in values -func (t Section) List() []string { - keys := make([]string, len(t.values)) - i := 0 - for k := range t.values { - keys[i] = k - i++ - } - - sort.Strings(keys) - return keys -} - -// UpdateSourceFile updates source file for a property to provided filepath. -func (t Section) UpdateSourceFile(property string, filepath string) { - t.SourceFile[property] = filepath -} - -// UpdateValue updates value for a provided key with provided value -func (t Section) UpdateValue(k string, v Value) error { - t.values[k] = v - return nil -} - -// Has will return whether or not an entry exists in a given section -func (t Section) Has(k string) bool { - _, ok := t.values[k] - return ok -} - -// ValueType will returned what type the union is set to. If -// k was not found, the NoneType will be returned. -func (t Section) ValueType(k string) (ValueType, bool) { - v, ok := t.values[k] - return v.Type, ok -} - -// Bool returns a bool value at k -func (t Section) Bool(k string) (bool, bool) { - return t.values[k].BoolValue() -} - -// Int returns an integer value at k -func (t Section) Int(k string) (int64, bool) { - return t.values[k].IntValue() -} - -// Map returns a map value at k -func (t Section) Map(k string) map[string]string { - return t.values[k].MapValue() -} - -// Float64 returns a float value at k -func (t Section) Float64(k string) (float64, bool) { - return t.values[k].FloatValue() -} - -// String returns the string value at k -func (t Section) String(k string) string { - _, ok := t.values[k] - if !ok { - return "" - } - return t.values[k].StringValue() -} diff --git a/internal/ini/walker.go b/internal/ini/walker.go deleted file mode 100644 index 99915f7f777..00000000000 --- a/internal/ini/walker.go +++ /dev/null @@ -1,25 +0,0 @@ -package ini - -// Walk will traverse the AST using the v, the Visitor. -func Walk(tree []AST, v Visitor) error { - for _, node := range tree { - switch node.Kind { - case ASTKindExpr, - ASTKindExprStatement: - - if err := v.VisitExpr(node); err != nil { - return err - } - case ASTKindStatement, - ASTKindCompletedSectionStatement, - ASTKindNestedSectionStatement, - ASTKindCompletedNestedSectionStatement: - - if err := v.VisitStatement(node); err != nil { - return err - } - } - } - - return nil -} diff --git a/internal/ini/ws_token.go b/internal/ini/ws_token.go deleted file mode 100644 index 7ffb4ae06ff..00000000000 --- a/internal/ini/ws_token.go +++ /dev/null @@ -1,24 +0,0 @@ -package ini - -import ( - "unicode" -) - -// isWhitespace will return whether or not the character is -// a whitespace character. -// -// Whitespace is defined as a space or tab. -func isWhitespace(c rune) bool { - return unicode.IsSpace(c) && c != '\n' && c != '\r' -} - -func newWSToken(b []rune) (Token, int, error) { - i := 0 - for ; i < len(b); i++ { - if !isWhitespace(b[i]) { - break - } - } - - return newToken(TokenWS, b[:i], NoneType), i, nil -}