From 97747c5ac093dbc525b19a183b827d53c5e694fe Mon Sep 17 00:00:00 2001 From: Vadym Slizov Date: Mon, 17 Aug 2020 20:31:04 +0300 Subject: [PATCH] [refactoring] remove scanner token --- internal/php5/parser.go | 31 +- internal/php5/parser_test.go | 774 +++++--- internal/php5/php5.go | Bin 327033 -> 301917 bytes internal/php5/php5.y | 2314 +++++++---------------- internal/php5/php5_bench_test.go | 2 +- internal/php5/php5_test.go | 11 +- internal/php7/parser.go | 31 +- internal/php7/parser_test.go | 846 ++++++--- internal/php7/php7.go | Bin 276863 -> 256975 bytes internal/php7/php7.y | 1909 ++++++------------- internal/php7/php7_bench_test.go | 2 +- internal/php7/php7_test.go | 11 +- internal/position/position.go | 16 +- internal/position/position_test.go | 54 +- internal/scanner/lexer.go | 56 +- internal/scanner/lexer_tokens.go | 145 -- internal/scanner/scanner.go | Bin 411853 -> 412458 bytes internal/scanner/scanner.rl | 402 ++-- internal/scanner/scanner_test.go | 1062 ++++++----- internal/scanner/token.go | 14 - internal/scanner/token_pool.go | 22 - internal/scanner/token_pool_test.go | 34 - internal/scanner/tokenid_string.go | 161 -- pkg/ast/node.go | 4 + pkg/ast/visitor/dump.go | 2 +- pkg/ast/visitor/dump_test.go | 4 +- pkg/ast/visitor/filter_tokens.go | 14 + pkg/parser/parser.go | 2 +- pkg/position/pool.go | 29 + pkg/printer/printer_parsed_php5_test.go | 2 +- pkg/printer/printer_parsed_php7_test.go | 4 +- pkg/printer/printer_test.go | 4 +- pkg/token/pool.go | 29 + pkg/token/pool_bench_test.go | 173 ++ pkg/token/position.go | 2 +- pkg/token/token.go | 13 +- 36 files changed, 3396 insertions(+), 4783 deletions(-) delete mode 100644 internal/scanner/lexer_tokens.go delete mode 100644 internal/scanner/token.go delete mode 100644 internal/scanner/token_pool.go delete mode 100644 internal/scanner/token_pool_test.go delete mode 100644 internal/scanner/tokenid_string.go create mode 100644 pkg/ast/visitor/filter_tokens.go create mode 100644 pkg/position/pool.go create mode 100644 pkg/token/pool.go create mode 100644 pkg/token/pool_bench_test.go diff --git a/internal/php5/parser.go b/internal/php5/parser.go index 1fe5085..5b06343 100644 --- a/internal/php5/parser.go +++ b/internal/php5/parser.go @@ -12,7 +12,7 @@ import ( // Parser structure type Parser struct { Lexer *scanner.Lexer - currentToken *scanner.Token + currentToken *token.Token rootNode ast.Vertex errHandlerFunc func(*errors.Error) } @@ -40,8 +40,7 @@ func (p *Parser) Error(msg string) { return } - var pos = p.currentToken.Position - p.errHandlerFunc(errors.NewError(msg, &pos)) + p.errHandlerFunc(errors.NewError(msg, p.currentToken.Position)) } // Parse the php7 Parser entrypoint @@ -82,7 +81,7 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) { delete(src.GetNode().Tokens, token.Start) } -func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -98,7 +97,7 @@ func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []to } } -func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -108,14 +107,14 @@ func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, token *dstCollection = make(token.Collection) } - (*dstCollection)[pos] = make([]token.Token, 0) + (*dstCollection)[pos] = make([]*token.Token, 0) for _, v := range tokens { (*dstCollection)[pos] = append((*dstCollection)[pos], v) } } -func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -141,7 +140,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. } if semiColon[0].Value[0] == ';' { - p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{ + p.setFreeFloatingTokens(prevNode, token.SemiColon, []*token.Token{ { ID: token.ID(';'), Value: semiColon[0].Value[0:1], @@ -155,28 +154,18 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. tlen = 3 } - phpCloseTag := []token.Token{} + phpCloseTag := []*token.Token{} if vlen-tlen > 1 { - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: token.T_WHITESPACE, Value: semiColon[0].Value[1 : vlen-tlen], }) } - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: T_CLOSE_TAG, Value: semiColon[0].Value[vlen-tlen:], }) p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...)) } - -func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) { - for i := 1; i < len(yyDollar); i++ { - if yyDollar[i].token != nil { - p.Lexer.ReturnTokenToPool(yyDollar[i].token) - } - yyDollar[i].token = nil - } - yyVAL.token = nil -} diff --git a/internal/php5/parser_test.go b/internal/php5/parser_test.go index a33aa36..bc6d43c 100644 --- a/internal/php5/parser_test.go +++ b/internal/php5/parser_test.go @@ -60,11 +60,12 @@ func TestIdentifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -844,11 +845,12 @@ func TestPhp5ArgumentNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1658,11 +1660,12 @@ func TestPhp5ParameterNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1681,11 +1684,12 @@ func TestCommentEndFile(t *testing.T) { Stmts: []ast.Vertex{}, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1760,11 +1764,12 @@ func TestName(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1837,11 +1842,12 @@ func TestFullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1914,11 +1920,12 @@ func TestRelative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1994,11 +2001,12 @@ func TestScalarEncapsed_SimpleVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2072,11 +2080,12 @@ func TestScalarEncapsed_SimpleVarOneChar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2161,11 +2170,12 @@ func TestScalarEncapsed_SimpleVarEndsEcapsed(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2260,11 +2270,12 @@ func TestScalarEncapsed_StringVarCurveOpen(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2370,11 +2381,12 @@ func TestScalarEncapsed_SimpleVarPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2448,11 +2460,12 @@ func TestScalarEncapsed_DollarOpenCurlyBraces(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2547,11 +2560,12 @@ func TestScalarEncapsed_DollarOpenCurlyBracesDimNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2656,11 +2670,12 @@ func TestScalarEncapsed_CurlyOpenMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2749,11 +2764,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2842,11 +2858,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2903,11 +2920,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2950,11 +2968,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3011,11 +3030,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3057,11 +3077,12 @@ func TestScalarMagicConstant(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3102,11 +3123,12 @@ func TestScalarNumber_LNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3147,11 +3169,12 @@ func TestScalarNumber_DNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3192,11 +3215,12 @@ func TestScalarNumber_Float(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3237,11 +3261,12 @@ func TestScalarNumber_BinaryLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3282,11 +3307,12 @@ func TestScalarNumber_BinaryDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3327,11 +3353,12 @@ func TestScalarNumber_HLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3372,11 +3399,12 @@ func TestScalarNumber_HDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3417,11 +3445,12 @@ func TestScalarString_DoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3462,11 +3491,12 @@ func TestScalarString_DoubleQuotedScalarStringWithEscapedVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3509,11 +3539,12 @@ func TestScalarString_MultilineDoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3554,11 +3585,12 @@ func TestScalarString_SingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3601,11 +3633,12 @@ func TestScalarString_MultilineSingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3672,11 +3705,12 @@ func TestStmtAltIf_AltIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3786,11 +3820,12 @@ func TestStmtAltIf_AltElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3877,11 +3912,12 @@ func TestStmtAltIf_AltElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4056,11 +4092,12 @@ func TestStmtAltIf_AltElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4179,11 +4216,12 @@ func TestStmtClassConstList_WithoutModifiers(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4259,11 +4297,12 @@ func TestStmtClassMethod_SimpleClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4409,11 +4448,12 @@ func TestStmtClassMethod_PrivateProtectedClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4513,11 +4553,12 @@ func TestStmtClassMethod_Php5ClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4629,11 +4670,12 @@ func TestStmtClassMethod_AbstractClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4675,11 +4717,12 @@ func TestStmtClass_SimpleClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4734,11 +4777,12 @@ func TestStmtClass_AbstractClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4826,11 +4870,12 @@ func TestStmtClass_ClassExtends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4920,11 +4965,12 @@ func TestStmtClass_ClassImplement(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5037,11 +5083,12 @@ func TestStmtClass_ClassImplements(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5137,11 +5184,12 @@ func TestStmtConstList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5204,11 +5252,12 @@ func TestStmtContinue_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5282,11 +5331,12 @@ func TestStmtContinue_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5360,11 +5410,12 @@ func TestStmtContinue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5438,11 +5489,12 @@ func TestStmtDeclare(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5549,11 +5601,12 @@ func TestStmtDeclare_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5629,11 +5682,12 @@ func TestStmtDeclare_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5685,11 +5739,12 @@ func TestStmtDo(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5753,11 +5808,12 @@ func TestStmtEcho(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5810,11 +5866,12 @@ func TestStmtEcho_Parenthesis(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5855,11 +5912,12 @@ func TestStmtExpression(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6052,11 +6110,12 @@ func TestStmtFor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6174,11 +6233,12 @@ func TestStmtFor_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6261,11 +6321,12 @@ func TestStmtForeach(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6338,11 +6399,12 @@ func TestStmtForeach_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6425,11 +6487,12 @@ func TestStmtForeach_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6533,11 +6596,12 @@ func TestStmtForeach_WithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6631,11 +6695,12 @@ func TestStmtForeach_ExprWithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6749,11 +6814,12 @@ func TestStmtForeach_WithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6879,11 +6945,12 @@ func TestStmtForeach_WithList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6926,11 +6993,12 @@ func TestStmtFunction(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6984,11 +7052,12 @@ func TestStmtFunction_Return(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7149,11 +7218,12 @@ func TestStmtFunction_ReturnVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7218,11 +7288,12 @@ func TestStmtFunction_Ref(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7275,11 +7346,12 @@ func TestStmtGlobal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7437,11 +7509,12 @@ func TestStmtGlobal_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7503,11 +7576,12 @@ func TestStmtGotoLabel(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7537,11 +7611,12 @@ func TestStmtHaltCompiler(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7603,11 +7678,12 @@ func TestStmtIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7713,11 +7789,12 @@ func TestStmtIf_ElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7800,11 +7877,12 @@ func TestStmtIf_Else(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7973,11 +8051,12 @@ func TestStmtIf_ElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8156,11 +8235,12 @@ func TestStmtIf_ElseIfElseIfElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8201,11 +8281,12 @@ func TestStmtInlineHtml(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8247,11 +8328,12 @@ func TestStmtInterface(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8328,11 +8410,12 @@ func TestStmtInterface_Extend(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8432,11 +8515,12 @@ func TestStmtInterface_Extends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8489,11 +8573,12 @@ func TestStmtNamespace(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8547,11 +8632,12 @@ func TestStmtNamespace_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8582,11 +8668,12 @@ func TestStmtNamespace_Anonymous(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8685,11 +8772,12 @@ func TestStmtProperty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8841,11 +8929,12 @@ func TestStmtProperty_Properties(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8997,11 +9086,12 @@ func TestStmtProperty_Properties2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9064,11 +9154,12 @@ func TestStmtStaticVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9173,11 +9264,12 @@ func TestStmtStaticVar_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9282,11 +9374,12 @@ func TestStmtStaticVar_Vars2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9410,11 +9503,12 @@ func TestStmtSwitch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9538,11 +9632,12 @@ func TestStmtSwitch_Semicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9656,11 +9751,12 @@ func TestStmtSwitch_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9762,11 +9858,12 @@ func TestStmtSwitch_AltSemicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9817,11 +9914,12 @@ func TestStmtThrow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9863,11 +9961,12 @@ func TestStmtTrait(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9955,11 +10054,12 @@ func TestStmtTraitUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10070,11 +10170,12 @@ func TestStmtTraitUse_Uses(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10185,11 +10286,12 @@ func TestStmtTraitUse_EmptyAdaptations(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10344,11 +10446,12 @@ func TestStmtTraitUse_Modifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10514,11 +10617,12 @@ func TestStmtTraitUse_AliasModifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10798,11 +10902,12 @@ func TestStmtTraitUse_Adaptions(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10836,11 +10941,12 @@ func TestStmtTry_Try(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10932,11 +11038,12 @@ func TestStmtTry_TryCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11085,11 +11192,12 @@ func TestStmtTry_TryCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11192,11 +11300,12 @@ func TestStmtTry_TryCatchFinally(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11400,11 +11509,12 @@ func TestStmtTry_TryCatchCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11457,11 +11567,12 @@ func TestStmtUnset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11535,11 +11646,12 @@ func TestStmtUnset_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11614,11 +11726,12 @@ func TestStmtUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11693,11 +11806,12 @@ func TestStmtUse_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11783,11 +11897,12 @@ func TestStmtUse_FullyQualifiedAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11895,11 +12010,12 @@ func TestStmtUse_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12018,11 +12134,12 @@ func TestStmtUse_ListAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12151,11 +12268,12 @@ func TestStmtUse_ListFunctionType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12306,11 +12424,12 @@ func TestStmtUse_ListFunctionTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12440,11 +12559,12 @@ func TestStmtUse_ListConstType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12595,11 +12715,12 @@ func TestStmtUse_ListConstTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12662,11 +12783,12 @@ func TestStmtBreak_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12740,11 +12862,12 @@ func TestStmtBreak_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12818,11 +12941,12 @@ func TestStmtBreak(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12896,11 +13020,12 @@ func TestExprArrayDimFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12993,11 +13118,12 @@ func TestExprArrayDimFetch_Nested(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13038,11 +13164,12 @@ func TestExprArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13105,11 +13232,12 @@ func TestExprArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13225,11 +13353,12 @@ func TestExprArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13290,11 +13419,12 @@ func TestExprBitwiseNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13355,11 +13485,12 @@ func TestExprBooleanNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13433,11 +13564,12 @@ func TestExprClassConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13499,11 +13631,12 @@ func TestExprClassConstFetch_Static(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13564,11 +13697,12 @@ func TestExprClone_Brackets(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13629,11 +13763,12 @@ func TestExprClone(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13676,11 +13811,12 @@ func TestExprClosure(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13851,11 +13987,12 @@ func TestExprClosure_Use(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14026,11 +14163,12 @@ func TestExprClosure_Use2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14093,11 +14231,12 @@ func TestExprConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14160,11 +14299,12 @@ func TestExprConstFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14227,11 +14367,12 @@ func TestExprConstFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14292,11 +14433,12 @@ func TestExprEmpty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14357,11 +14499,12 @@ func TestExprErrorSuppress(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14422,11 +14565,12 @@ func TestExprEval(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14467,11 +14611,12 @@ func TestExprExit(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14512,11 +14657,12 @@ func TestExprExit_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14578,11 +14724,12 @@ func TestExprExit_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14623,11 +14770,12 @@ func TestExprDie(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14668,11 +14816,12 @@ func TestExprDie_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14734,11 +14883,12 @@ func TestExprDie_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14811,11 +14961,12 @@ func TestExprFunctionCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14888,11 +15039,12 @@ func TestExprFunctionCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14990,11 +15142,12 @@ func TestExprFunctionCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15110,11 +15263,12 @@ func TestExprFunctionCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15243,11 +15397,12 @@ func TestExprFunctionCall_ExprArg(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15308,11 +15463,12 @@ func TestExprPostDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15373,11 +15529,12 @@ func TestExprPostInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15438,11 +15595,12 @@ func TestExprPreDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15503,11 +15661,12 @@ func TestExprPreInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15568,11 +15727,12 @@ func TestExprInclude(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15633,11 +15793,12 @@ func TestExprInclude_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15698,11 +15859,12 @@ func TestExprRequire(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15763,11 +15925,12 @@ func TestExprRequire_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15851,11 +16014,12 @@ func TestExprInstanceOf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15939,11 +16103,12 @@ func TestExprInstanceOf_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16027,11 +16192,12 @@ func TestExprInstanceOf_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16094,11 +16260,12 @@ func TestExprIsset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16182,11 +16349,12 @@ func TestExprIsset_Variables(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16258,11 +16426,12 @@ func TestExprList_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16366,11 +16535,12 @@ func TestExprList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16484,11 +16654,12 @@ func TestExprList_ArrayIndex(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16614,11 +16785,12 @@ func TestExprList_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16723,11 +16895,12 @@ func TestExprList_EmptyItem(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16834,11 +17007,12 @@ func TestExprList_EmptyItems(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16920,11 +17094,12 @@ func TestExprMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16987,11 +17162,12 @@ func TestExprNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17064,11 +17240,12 @@ func TestExprNew_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17141,11 +17318,12 @@ func TestExprNew_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17206,11 +17384,12 @@ func TestExprPrint(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17282,11 +17461,12 @@ func TestExprPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17401,11 +17581,12 @@ func TestExprReference_ForeachWithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17479,11 +17660,12 @@ func TestExprShellExec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17524,11 +17706,12 @@ func TestExprShortArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17591,11 +17774,12 @@ func TestExprShortArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17711,11 +17895,12 @@ func TestExprShortArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17799,11 +17984,12 @@ func TestExprStaticCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17887,11 +18073,12 @@ func TestExprStaticCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17975,11 +18162,12 @@ func TestExprStaticCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18073,11 +18261,12 @@ func TestExprStaticCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18169,11 +18358,12 @@ func TestExprStaticCall_VarVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18257,11 +18447,12 @@ func TestExprStaticPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18345,11 +18536,12 @@ func TestExprStaticPropertyFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18433,11 +18625,12 @@ func TestExprStaticPropertyFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18540,11 +18733,12 @@ func TestExprTernary(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18626,11 +18820,12 @@ func TestExprTernary_Simple(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18785,11 +18980,12 @@ func TestExprTernary_NestedTrue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18944,11 +19140,12 @@ func TestExprTernary_NestedCond(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19009,11 +19206,12 @@ func TestExprUnaryMinus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19074,11 +19272,12 @@ func TestExprUnaryPlus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19129,11 +19328,12 @@ func TestExprVariable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19194,11 +19394,12 @@ func TestExprVariable_Variable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19238,11 +19439,12 @@ func TestExprYield(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19303,11 +19505,12 @@ func TestExprYield_Val(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19389,11 +19592,12 @@ func TestExprYield_KeyVal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19444,11 +19648,12 @@ func TestExprYield_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19520,11 +19725,12 @@ func TestExprYield_KeyExpr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19608,11 +19814,12 @@ func TestExprAssign_Assign(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19694,11 +19901,12 @@ func TestExprAssign_Reference(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19792,11 +20000,12 @@ func TestExprAssign_ReferenceNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19935,11 +20144,12 @@ func TestExprAssign_ReferenceArgs(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20021,11 +20231,12 @@ func TestExprAssign_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20107,11 +20318,12 @@ func TestExprAssign_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20193,11 +20405,12 @@ func TestExprAssign_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20279,11 +20492,12 @@ func TestExprAssign_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20365,11 +20579,12 @@ func TestExprAssign_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20451,11 +20666,12 @@ func TestExprAssign_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20537,11 +20753,12 @@ func TestExprAssign_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20623,11 +20840,12 @@ func TestExprAssign_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20709,11 +20927,12 @@ func TestExprAssign_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20795,11 +21014,12 @@ func TestExprAssign_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20881,11 +21101,12 @@ func TestExprAssign_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20967,11 +21188,12 @@ func TestExprAssign_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21055,11 +21277,12 @@ func TestExprBinary_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21141,11 +21364,12 @@ func TestExprBinary_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21227,11 +21451,12 @@ func TestExprBinary_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21313,11 +21538,12 @@ func TestExprBinary_BooleanAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21399,11 +21625,12 @@ func TestExprBinary_BooleanOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21485,11 +21712,12 @@ func TestExprBinary_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21571,11 +21799,12 @@ func TestExprBinary_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21657,11 +21886,12 @@ func TestExprBinary_Equal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21743,11 +21973,12 @@ func TestExprBinary_GreaterOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21829,11 +22060,12 @@ func TestExprBinary_Greater(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21915,11 +22147,12 @@ func TestExprBinary_Identical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22001,11 +22234,12 @@ func TestExprBinary_LogicalAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22087,11 +22321,12 @@ func TestExprBinary_LogicalOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22173,11 +22408,12 @@ func TestExprBinary_LogicalXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22259,11 +22495,12 @@ func TestExprBinary_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22345,11 +22582,12 @@ func TestExprBinary_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22431,11 +22669,12 @@ func TestExprBinary_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22517,11 +22756,12 @@ func TestExprBinary_NotEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22603,11 +22843,12 @@ func TestExprBinary_NotIdentical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22689,11 +22930,12 @@ func TestExprBinary_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22775,11 +23017,12 @@ func TestExprBinary_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22861,11 +23104,12 @@ func TestExprBinary_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22947,11 +23191,12 @@ func TestExprBinary_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23033,11 +23278,12 @@ func TestExprBinary_SmallerOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23119,11 +23365,12 @@ func TestExprBinary_Smaller(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23186,11 +23433,12 @@ func TestExprCast_Array(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23251,11 +23499,12 @@ func TestExprCast_Bool(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23316,11 +23565,12 @@ func TestExprCast_BoolShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23381,11 +23631,12 @@ func TestExprCast_Double(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23446,11 +23697,12 @@ func TestExprCast_CastFloat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23511,11 +23763,12 @@ func TestExprCast_Int(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23576,11 +23829,12 @@ func TestExprCast_IntShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23641,11 +23895,12 @@ func TestExprCast_Object(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23706,11 +23961,12 @@ func TestExprCast_String(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23771,11 +24027,12 @@ func TestExprCast_BinaryString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23836,10 +24093,11 @@ func TestExprCast_Unset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } diff --git a/internal/php5/php5.go b/internal/php5/php5.go index 1134714ebd42dc839c0eee38c41e5b3f83393e2b..f05d4520c2db1614610b9a406fafb5cd28d64c81 100644 GIT binary patch delta 24427 zcmb_kd3;nw^8a+d#}No5+;}4t?S&APMFkUZvZy~8im-ImFgk6;5?VEJ?GPgVXjW#YKW&$Z`wHa3I4KxpohO~sQ<**ly{7xLsP zff{jfGpI{<)_}IOV81pQFmXb0@wl-AWv6Lv@Z-!HabreL8<#@s`bE4UCAMU>;z95B*}jdZQI~X<#C_RxPb=1u%2U~IwIT+gHm#Un z5i!4*(TWXV)%5SQ%+K1A?+i2P;uS2-(`{6m?nZ6zU>TH{4q4<-ESrvZH8W^uSi|TO zMg~pJuqcv=ZQ>c!qBZl$4$0&nWHzJ1doax6*6e3z^ezvWSJSQorb(lw@WjNd;q8?w z+uPEOn$A)k;yKe<)6|H0wQt|PjryT#t@W`V!X+CL{)guG-JU3Z z_hx~gKi^2MlBfHg zgxDM&DMX*zg z2UCabXfIsKuAqd)tTy#I!{QtnvG{V8+tL2~DD5>U&?3^L5UW8?UCp2LsO6?YO~Hb6 zuosL$?pHeYM_!j2cY&(Z{Bm=oMfSH=b7vR!F_m@2Z*_J@%W$TSVFrZ% zp~mzc31|B1;iRaTVaZXpx@p9w4Bw1dbZ`N0Lggc{(bhLKlj(Rj9-trk$zuT`vw*W@ zlBmP=AWB?k)D2V2Q~fn1t?J1F0wl{XQ!V{(kSo#EOlHphx3p53u%-9n{5?BV%4zT9 z|3*h|55w5o$C*iVzCY~bn{K?8RFh{t;zvjc=V7pe<033&-w0_b^UM^FnbdSo{cP&; z7E4oO@N6}~|Hx+Aj>XXHH$XYc6y@O<(_}8(47!6mElCvBWvQcL6=xH>682D3WW+1s zSx9O+NJVe)IpUdHSyOj)m+j}hmA$(r*_;pX=y3BC2#5=&1??&{^W-5!1-F9f;Y~(0 zI`+I-MJz66C*9)=Ob3SZ>qQ+7M4d!~r)j1}ZnjTf!<^bG#bZp;P|o728A z>G*U#NuHZWsT){BB~-X$;qQZcwD=cnN;z*M7cR+HGuT>JnFsf=zUuJwEK1{9HtRf} zWSuOwKFJNYACB10wGpP_Ahh(%!YZU*KC2F9?KJx7O=;e=l6aKo#_V&-c&MU^v$)m{&SdzGJ2|MTE|Kidxw`a~WRzS6vh3P}B-_knV zqzTJdV`oSZ-sG85qrd3UjF2x#Qx>oS@$_HVa_`LVSz!wm!hQV%*Ph~;73@ccyBf53 zZP<8dsqm(LtHc>t9*--quVxK4U#Hoxv38Lx%btm5=~Y_lN=LpNI^F|@xpXBvAIayk zMJs+(Gs7{p{uVfO;MIiAylmE`rGw3F|D}lg);J>OQvZCtcc|bo_S1hUXOnfVoUZc; zhV^LeI`(}e*O*$YB+&7V=jrNLkxH0id$#`xbC0FH`#7X!8)OvZvzAB(8=`OnVyJLY zE3}vhFhUn`SN-RSC`u%rt=ye?n= zlb_XN8$R3`*s)2Qjz1fw|KrcG8^h;`#W-w6EJkPA_dIJ8P80J4ocDKUpimC=A8*#D z;+?FzSo#V};q>-xaK$oTWGBM;bf%)0A{V|h9lP6fjy{1~B-MMUCBI^glR|aYnhl&W zn!EnCI@10N&M)q=I0F(`$doe?C)$=mf=HPcyzZ5G%6+^(jaqDYL$6M6Xof5=?CjY3 z2Ky+SqqTKT-xn2uG~R8?Q(9kdWwCHKD|O{z*Bb@2a1Uz`&adJqHRmmhVUm&M_t6{z z`0>PBk!F5*2EF(lhf!7*!FvfR7qR>o)=+fW%L>9IeB5kDoA4(He`&L|;EJzG8uYw-lglRm<4rSC8aDZs<- zNC(Cl)#<8_S$oyWeHJXB6iS7=<$bx5RiPmV;Dt`u%o_`^67W zV65;__h4{`ntU>bQ6wr`@eGw;ZVi)aw^b#6se+U)a&#XWRH33?=n* z*c6=j$>&%q?Ky{_SGC}LMoSBkcb)<1iU>97k>7Mb4A|v!%(sJ&x1iJuh>=y_Y#%kd z$Wo2T6K>2DYWHtSbE|m81#rsT9B#%zXUWNZo!*~ z1D9Bqw{rpG*>v^`mSQ1LjB~&GQ{H%I%uGF{3w z3n+079OA7eZ{%TNR?#eQ7$Lf>v56 zq?GBtR^)%#WD%$8jV^l-p$j+_{O}wJ78&Z-mBzx(|dHKHwB;L|CR?K;UH81 z+D%m4*Gv~3n(=(Me{2-35~~2-*U?Nv zD>=*z;D+?4_U}fC`&+UlZVJm0;P{FwpF(72x8{Dwl)z(%XD?N4h3?BP;U2?owFO$2 z#jA-|Tk*awGiP{*nc57OZI`{%ZLQ{tb?JPo%Q?1~QNz@U^wZ=Xhy}v@*v1|wM5P;` z?G0DL_~df(P@Xt5UE!CmaIs6|$(e4=Fv1d3Ki7w8w4AK?cLmpQ0cAdL!|?96FQ0 zM~h8aypJn}T9W_A;o%Qx!FZm|v6L+X6!lTS-oJUde$g(-hkMV|J-JRt?WC%Xlk=R8 zAs@}33v}*$9v|Ym7B*;@M#CmX#Xe^{+G+~PP(}LIk95)ufo+sN%Th#*&YXFL>6x$l z(T+yECf#zvsHHBMGx@k=l#WBtv6P02z5!Sc9CM)ftbo7jD$S|9m6hQkyh`XelZsMUJo!w~$=(?-JT99>q)?*sWu5soJne8@w&|L+52!47;%D;{~iP*|G6FA>MZ4a7p zbj$s$mN-_#8*s`Tpa>bE|_008-8!%T%1P8n*U)~te zG|dNT8tzlrn+2s)*&t_Bg|IiHwZowACs(q+Q{fOZ%h`r*TD0u_sD(T<*kK;BKyk%o zMOckA{|w z(mmjvA%sz=brm_)>h#zj_(^L02a5$fuNc4X7-J8hawVSL_37HN{D^13G2`q3afwH< ztVLgs;}<<$T8+26!2Q-qOApB(|7ersSuWz06SQQsCLeCPx~j__g~Ozrw!-;lrgu+Y zI}t2OyKx!7O2JCb@;Cxv11g)y%RS@$`&LaX&j^6P@B|&&I9fW1f92Q*HhMNaF_{OQ zmLrq-+jSy5JDJy^Q&TLSDG&hLX@|H_%s0Dhj+3`7&AgkZ#!4f1oQA%0uI7jp1^O;> z&%elSjtr`K%^ke8XJW>kdSX0?OIsce%E380jZY!S(?F%fMc!R}idTPM-EH?oN-Cn~ zT_jy2LZ`D-bTe;5i#YEhrxJ0OwIzs?OvgTq+C-ga+MGkm-_NCe;}9y=n#DU%|3PZO zaPqZ6I`8FKI22?P$B`r>HIaEQFOJZmynMYtQ2RL;Kykw9y@`8k4UWabZoYh-)uEL6 zks80RAyq;iG_=fWtJct~rga9y6fGHz`q{(L`Q@2(o6a9kZrSxb_) zgR^Rj=JVOkABtwwRKrb&A&_rJDYqHv^jfX3UMh-DSil{`lT;Mn{w|CiBd~sBftH$r8TJ znI0_EQt#<#%yP&>;88jP=F;*$I}U(|Rp|YPxLckC4xOpWGTzc-qKYqCM|m#3nHf@Uq(4K@_i(z3xmD`104kY(ivk=MOu^rmAE02-|Q zFmE0`l^9hiuRN@$0&HjMAj|fuQ`rihPjgDboO4GVF?h5k&4UMh0?5G}?950k3WzeWi(4cZH0JG3p?|iBV&$Qp%`2q{=23@#B3$FqP@9}!r^0OvSRQ#{-Q%; z0Youq=D)0Ij9|zwue147w43M1JFq>a?Br)?>0|>K_Eb0*DF;xVwu|%b;mg;SM(sg> zJ#$A?p~MF}z*?4;N{7?t+*y488hrBU}}NvXv+#Y$b+7k9{!T;JYr zzs^ys6XAe_LrVaY78rWtDlxM|k@tB>Kows_%bMZYO1t;i6AkD?1nBfycelnyH#|%} z*lRP%Rd;3&Pl*r>`&#D`#QuH!jWDS$r&#EY1DZRof*5zzD`ogu9bS1T&t)IsjGlSW z-uHP_HV(kO+Dk3pvo~R$KcMzyyj$*tQ#GQqI92c;ri7OUyaWhq-pa9!GNO)Zg6vFQH|aGhyX4IJqRx5+!|UjS@s~kRZUk zm-nnqIicl%slf3|roQW;`9+WSB4bOp_7DaDrl4$K9DO6&Q)s?$#(Z z&-vP(TOc4l{F>)@lF&{+XA*#&iaes9Ic+mRc7dJaNn*@*oO{Lp+xIpV&=gQzAzGZ_ zS9z%(E(aCL$`JbF-dAJ^XL&y_+3FuO8TSD=-@s6KbAs0p4Sxjc=aFvxPnwi|*@g>3 z(KY)ynr`DS4ol@&Tly6#|B5Dk`d5ckW#?o-mmt#4@gi0+fQq!QD$SIW;{X;=&Xg|N z{{{riBk+qC9PYZz==QDpNQ+rwtjs1rsbDsO5cjA&+571f(W zPiJ{kTL9cHr47wAQL9<>*z1}*V*}uoV-v`#iPRR+#_>#&!%-!#n@J7T5M_1E{@y9Q zkm8UA)e2H^KIHVJ!j?SyWJgPfRH18W;?jAe$V;{R3Wo~#4Zs)SOEaP%;JxXZipo9$ z>U#bM1P^N9T9=}ybFj6|5{XI`XR7J+ZWB%V;gy=y$j0TNR;Y<#M(v)%?HrC-RuBOp z6U3m*X!AJLUXvnz+|>|ioiQDv7v~qBOXSZqQpA!RGb)w1CCiZw&bvCdy9do`UImWN zae1MvNLhCk@mY=$#V{r4FvLZG$)<>spi#xk@Z(&E3^#$&N#o?`&GJ>qB9ow*I^mU) zM_y$_#Z^Cbv?*0}aI&>>lRHHl!-dW^WtPAddY5~3>lnDPyeYla8#fl(y^tyX0my)| zjIu)GdhgUmb+sw2^ra~4YK-v`O}$zZarpwNyU|u`Y-9HJQqAaYv$D(P+Q4=%bdMf% zQ4dfa{DG$koYQ~u@>$-?9u3MKjX0XF?TuPzoVv+`sw`b{e$H-#vx>a7N@3rX5aGwS zOQFLoNpt_GNfE!Jc3NBY zw|}$^vv@AB;WZO%;|K;NAeKS>ZgE=F6_PAUZ^67PcH(g0tEZUCX$b<~f*&sKycKiy z*pis!$c_p(>8;1lLQvm%vhkjWvhNg|GS_EO4qs+eqqy5)LHo_>w6quezVD_O>F#pa z6<2YTJ=GZM*tLz?rl}`tPO}Z&x>Z>~ZO1r2qnt6H%5O80oE8-^sER&WY2`Fuyvt}! zi>4WS>7hRv>Tykyk(;GAij-ME=H9N9ip{b+47c<{$vx>hBtQ1v5kv0FL7Rs}7Tnpw zH=s3lg^B9SppM_?eL0G0{79{*+Y6toJaN4|kNNQG50UCg&l(+_VVhSjO`VH^J(<8! z3tCZ+M`pS`%H`P%>iQnt$ja-Au@Y{_CN*PJYw&kqlGZ? z5Zm^@-w3P!K%Q@?hy1s>nw*j{j8rk@ej~+|I0k*|d72)mf|xnac-*B%Y(K;?oETl} zFL1hMt8-eE&oQrX`^V_gWuYydFVjdRNLPhAoiToMhEa=-`lc#$ceznNs@W=g@=STt zVn+_k-4tui!efc*oa*WK>O;0LJ^e-<+VMG_j9$GI@0{hq0JtHWmMk@r!ys!^jTHd0 zc2*NKxss-t7`V)68Zn@s*8ataC|6f4b*biZ!<%oIxEyl$alCL}mMC3reCl#Sdfa*@ z=MlYqiQUC-<>&=6MV-T-GnowB`506z-;_FvGY#*HV6-OL9dBM^%=OG923kJ@ueasoQ|R^6Ko!r7V(9|UMC&jb zydK+IsUQM^HUZ9oGguIm=>c-#Z=EyC0hgoDkzRMcE7E9 zY#TSiESk{lr|}f%Ut5iDsL6IHOEJ+dQw$7!$FnvM4N1&?#W1Pki^hZW<8yX*{k};4 zB&NJ%#JMYotr=Ci^@7dO3d`i1wkiUB>v>hPM`%jxL6AH0#TPNMeroRK`!8?!#8<#n z^#uz&EQ>kv!B=e}6_sU#0k5!lye`C>V7zoM*#rnK_cdFX?8szWd-VqIs}ZKVrE8Bg z7t!op#!6AR%lL}Z@Hec!DbjDupoBLyD=J&gJ6Xu=5xWgm)?dCSShCwDYW@dcds`2& z#FBp)w|REkwtrZa>$|8~LZjeBr^W5v;`-&2md7G-0rkDwyq*dF7gv*~dRtFJKB zq8J4zBj0v-BQtJiq9e|{^;1o}6wSvM7p&26r$P2`HL6FNC`^YiMpkwXHQH^OiDD9k@E+O+CCFy#G{I=B>ZGQ>>oJ32p}xnEQAte}{^-)QKy zvJWIaCRw4N1z~Or+IAQ>Nv|^FXOBu}9@K1A@aLKl^hAk_(`i&nnIaaMSPEctoh>HI9tPL{=#QQR3X~ZPo1%I zDr8h}*kuHuAts~mk7ybr8#qJaYvw2#^(9K%m4Xj+H#5Z-M~$wPIo&}zI1-VbS{*>~ zvOag)Q82CgxpCaHI%!`xgW*{+a>C4Wvmy2CO(w;kKp*)v2p|nAirjTV?k%f9#R<2a zv>Q-lVOx5*nLqi5=w>EUf4hmrG)4lV!yRkE(g{58*GgMZfYttNiL{tsP&u%FwL8S&NHWnmz&Mw7GRV=^vp}$1rc8--WSeR=MgB;v2e8mRO zp(@6bV^pgahcp9)q(+v0fn^mGd_oYE4eL+wxL*irs4m?w7Zf?yOuJI7bhpulK3xel6YDl;4HGDP$w$-fUGvv@3 zUt>lkOb2(@19Fr@NJ`7Gy3pPH-ZSCWJse7vT5T4~XZ*0ICNi8?1haZ+@zI1rm_zsk z+J#=`L-itZ3$jKiZYCY*ZCY`O5yB~BKTNGnJ^G-6Ox@EVlr{B11%~{?2;qY++xnRu zMbExwII^|b>`7(UnSDGn7}C!XE))>xI5^BXkbNHP4^g}54`yxcXLb@zu8-cgFAo44 zNrlG=A(1%H%wjbC2B)XOvvttX?nd2g_)wKQS|(qP*iG`qfeKo8?SU!d8zeER_AA+y zFgp1f%aT?>n9axz;*zL%kom1+$953seFRWrsGbPiUi*6ib?CLB&Qw$yBZG>E=@!r- zYpo5i<#2}%*9j_{#k%3L4)ehT&t$1@6Ap# zjO1qX3c6^wpzOS8ycq?UWK6K6PnO{0s6wd-A7M+Rrnj1JI~ubq zle}e3cTUn8lhO2XnW6c1lKB+9JK0u6mS0s@EuNwqjI1nOWmHh?&;VVMAApm^U18xt z4tCX@2;J0*q8DryiVuuiW;s8o5loAjZE0bw7)0P3x z4pn^ic6f`DbH*%9L%*74W2+=S^IqL%WM|pRE1T``Qq{T{bl)7OMSVTxGQx&u=Gtn? zwh*)52d_VQKzBh5tWSa^^PGX9>@ShHs{C$v`p=pLEv;>){=c7ulq>{0`Q<$;=(TE* zJ1xQiEE{F zcn}qH<@?Q{C4=$Aazvilq{2*i4DV&_Bbsf^ygd2xTwZOH>G;R2rAS$6*70u7DXVPq zP{>1GzRKL|B|o*bG@~wxDin&A(EMbatU7F zY)g#nxa$&B%2|E(NCB1>XKV$_OL%sht@|Ln>ebJiokh3pW?0Imi|x5MH<9u|RWjz% zb5W0{yFYI)bTAMU;FsuPUFx|)yQfg+6_*zEIg$e5bVSY>9q%D{I(2Eqc5GzzVbXgC znytj-7tOFj4p)>I-x?kFvcoYipr%GeBY;GG{=9jk%aOpbI~|Jyes}5>bC1VxzJC=w zO67++!$0%#+71)XV9<)1NdfRMs&;DiPP-e9Df>&v@=~u$BX&XI=qsPSq3gATy7f#- zFEx*OhQ9iB&4UAiz6?Ko%$qj7PGU*U^$3}-0@^nL?gVbhrlNSaxys!<5`U`1ZbG#E zdx_l2nZnj0Ub%(D?6=Ho+{IN(QT~pZ?wQ8Gy;`iQx$5Ylg#BiF`NpszT`T{Wvd`S% zm{vt@;qC9*`m{d5`NDqlo`_kfjDXjftY3y+Q~N;G-mD%z$kWaG>}KICU_He_Rzz#2 z;+0+z1}m&IJ>Q9&6Gvk~VNP2zb`*~aTTCqN<^4t5CDDr8l4c5ld zln=tju={&wG4Y>vSLJ{;q@s`Dp%xG1Z&mEFzzMs65L0bi8_PKHq_{AbC@VUZjN@Bz5f^H2xmR~-i%JP#*Qyh%kHu8OJ7-Z)|+8fh5@o_V)_X)DuMCRNzGH#`r7=*<PS tJve!r!VN?JWa>BGtRYVQWJZ2FRK2GLjw0#r%s(Z zb?R*O<(3P6{^i1|0}g@CRCu4V0#)f`2{rkO(hrMKVdf!4t=go-nyx8u$xHqI(XYmpFXrlKe#)G_yx+qjffKE%WfmR3GC0Y>OiD`G`#q~ zBea92UgDDwyQl{Sj;9yXnfCauzP>N^b&oufL;Zdje-SB$wf$)p?7zus3_Wtmd-}7= zzQ~@H03`u_+eKs$fq~0Neq{l%;BYR9RDIyKtraj0`eD@sk|%lXYObqV+nkbQ z1!41i))Y!BSQ7{pkPKG{nRyCv@i^Nf`VssqXlv^VIx#c^PR!U|p2- zhK+4W7QEM%*?gq;qAkBmj4A*CVb~r(K4hf$p(0Z5JpqYo6 z2R?j)ND*%Zo-a^hGTxy+I6ITJ1Mhwsfa+gK0QNmXQ2i}35uWYEihd|rPoL9g!6%(< z2$F5(P+tg^=hL8jg)+R0y&c8Za9J z!>tZbF@ogjPiydH3FI6a3?`uEKIYv=5=Lu+c5-pjplHubOTB5-J?0L+-!x9|I+(w!})hH_qD%R6EHD}=F zw4HmEVhbE^Ok2b8JLm=O*E_YZz1{~`A~}VUs~ojs7ID4g^ZMBoAOh+kwJ`r>&9#@u z|7SeGyT4;Ej~uMfrZ=I@&m{$Jc)0Ew#|(nCjtIAF-%(H8pu7cb0Tm0>bVvkFG{)oe z4}?LJUxiR!ib7T0S`9#p3N@9%i`OQEBZo!Wfnmv@}3B@C64@*Xc!22kj%|9PaT2RP+oEF2@ zaU?_9nc_zS+hofaH5XbW_i6~1jj}G}C1XhkHAt4gzY123FtHxG-ewPi7FR1)^J z0X`j{Fbi9eBJgHgt+g%zeJ7~zX;Acoze#HSX;B19r_g+PqtK@K#1i75I1=HF zCzCYho`8iY{^q1QeD2@7H&pza9$x>qR zTc?pf)iO)m97unkt^ipy5a#)Jl6$CFB|muD(>z-Ry!Q<03pw*y8cdu?dO_z|qy_Ao zN#2luN>djmI2fRfq0^@%2k0z3W&RuU}RF9jh27#AWlKOo6T+)yV@Gj77 zK3+C5@3y+aovZPFeENR!gP?ne;hkgiB8A9&!HCogJZCXs1UC0(Lk#c1hn6^D z!+u-cyg!!Ifr{ni0!Uv-4kkl(`f`O&eG%AJW$(Hym{o!oGjCmM3zUo_9sXzzn%XD? zH4IRDRg0qbtg2R!ef;`c%ME*jGy4Id;VsbyFV3Yz)-_UZx>+!QACa>FNG z!}i33tVO~SlWg)A+KbP-KTP_f^W%3nW6h-q!}5Bzu_WNdog=>X7Qbng;FsLl2iD+z zHuK@MU+rXCD7MO9&7&#sm(}FA|A?*mC(ctQc92Zh{g**tji z#1rx53_XPwetcmsfa)#6$s#oPiKj?A!;@k5dNnKy6#Ae?FZ2+XK1b^Db@fmp zYb#kTY0ig_Uq_n;t=XZ5YHDk>f`;;)u^W!20z$XL65LtWZL>WEsD(T<8isX)Xfc!y zA{WB3K{U$K-ypeYfo>rD881O^x2^?AkqFjYGChV}UV6o3*hRqi{NJ>m*TEC5=pM=b;e>=f{ z>64WH8{a~j>7K^ior*>p3_->=o$ya(t*>*lEwoeI@~7oHYHSt^JDib~UUETYHM8Wk z&5izk`W?jy7K{XIi4XXOvpVq1B&&fq*;4qbUF5CU8YpR!%KXOna2v}%p%LB)@;^hl zk}K4ImWk%*{JxW}XpwS&=?dhisSbagkSH4Roll2dk-ZJ$4m*1UzlrDJsy%91X-H~h z-Jsky2Lx<5LNa0E6q@ZmELilRB7ud1=$`u(y&r2oBthZg5_5ynqlminSJoQZeni^D zf6YSZmx%x4MzG=|Wg6jM6^XwJK>d%MAE4Y~m_ho=E(Fg^+LTv)Oy&~6W4al1Kf}Us zAj&$zYkSE+=-Q1HLB_sVuolw0v&-NEuhj_Fj)_|n1G=*m$o>qKn3O{K4dTYASFyTA=uVb5IOsOApg+-=JgS z#xo=hb{`<0>Z5Kx=tK<_@_OHp{W0y){#)X$iKx4;%g0+S5^i!KnP_frevEkG%|ql+ zO=vePc;SB(u{2!7>wZJN`a@dX01o?>6vlT4bYZkUwE2!Su_lbYO(P{@*S9uRdm2*@ z^#ys(cchzv347~dn-&SzaJ$73@8X4QxxFptJB7nz4GcYMZ|xJ2Dy=(-NzL%=<5_dY|V9v zo=6bnwH?@NyI8GY>xtUdof~qz|FX%+L>C{*pk|`UL;tc#5ntmaIkOI?45a-)>KH6i zs~;61KsqolEd3Epe6+*6!iGLpGdS@R8L7YXZ#b!7X<^J|uW)&nl`32593B_LaBe6?63vt1cWEgbVlENph2GZg2n1#*~&8 zdhL<$kuTg%&nvh8NohiPivTJH51T?oe_99_Z;L{!w?zulJyvSvV01fd8jDBd^Hb6C z$k>l=jb+6&4N5G!-Hye1$D1toT`7tH7PZqRq^Wc*T>TJj!{^kYUCH^0EzYUu915+n zOB>P=JeW#HVv^8ybmMPG$Lxe|8B||tE9*Omi&>HnHKplrwkd5c zG*oNo(TsM~=l)&I6phRmitrDbiA{;tcs`tNPBY+^EOgDt0XAe&zy1J+vz!A&!nPSx z)`Chi#-10-n7nEncKnw5oQ~6ymUNM8UGsTMH6-;#IqDh|C^YehCe}46t$rp);p6Fu z(eh@n_B7t#UHbyYcpS^>4}|zD9y-F{aLvnAlnBo}X7A#~Pm7f0xr!74(f&>n<=Gd} z;m!zX$r}cG1iowSq)vt#b#0;{c~0sgwxdu6dw1kfV!*MF+c;^8N;^x_PV3&*NmEpE z%vO(i7tyZpw>wD|&%cDe8q+KoLN?f!!`mx5!INz@hlHm~)8pue9ux6OnDFiiO%e#i z+)sM4(ly^P^FX0+$Xu*4;B+k-bUnmlF-Q|M+=gfIAEaZsvFASNpg6_?=!sbu_o?z} z1+vuA-KwDD^WAiT82u>Lq{2xTD*(mTfE zI9>(S%*!vhG%+5ki>RhM_Cd$jn1&z&1>UL5ApDfpIl)*D4MzC#rSt-Woj0?K0|tY9 zda3UxyC`PB%j3{Jw(Lq;55{$+GM{E3R7@~YQG2>2i0T9tgIR+FY$lXuV`^vhYElKu zM0Wh~7w~_W6k?ix{c?}QQ0Vdx@rIYt=cI}-1?JGf6XM(Fa1ovZ4 zieW4B^w_t0DT)xT{1nDkahZkADuuO%#`FihX#{g|&?c#(cn9fpnv90s zSI}Xu?Sh%T6?ww@FQZOKUpCb3L-*G-ZFH3xqIsDeSVnSj^61sd=A~B|ZIxMl6`?GM ziKNNqEidn<04e6&qMe`-xoF;BNeK)2WTt0`V~x*H12w<~I7kUem%9h<8i8dZB9Wx> zVrJk5oL}O^MEdq&=|CJa6|P=jrGj@L-6ythA$)!f8c7qHV}y1{l2ayB8 z@*C+@mBUSRU(?}E@=?l={Nl&V;LBlX|Fs=#SR?>N&>*+*;mEL}X!lz+0@dKo@ zfaeH*m?Fi- zqi7j9M}%2h2KE%A48(J+#XX7QEkB;udhU9ds`(gO*t7*hJzWlo2+QwMf85wnDWer% z&^u3h;zrRca=Ux9;tV3>=3{6G-7sD|z%7GDoo=B|P!S-yN}Q`1Ffko(*;kCE72nwf*M~`mc)H_A}{1k+l6VC4N_R<0LDWDJ;LyK z8U4k+N*^kxxiyK^Jvh;AUm8m)2Af3>=nW&L&$xh+KZmbPo|)Y$80YTo@&F&>R>75G2jD( zSSDoMZVde3?G9k%cPvLJl{cD(9>|&!;YNJ#yu*Q4`(o+JlqJ*cWx%O=;pb=Yi2nma z)<8h>?6|n#q6qQJ;4Yd!dA#~g+Sy>XsduT_0#@NV6s-0!QC5n_M7**|u5+YvEgJzy znunLnr1!?0dB-2d5<{V8^%tjjmLeCOLrZ2+Oy`!xg=R&0m|buC>BXTI_1ik-`&sBa zxn=?D1IIt3Tj9kyw3$Ao`{p>2(FLF%h3h>xww;kUxb0k`R`0}HshPXx zoy7?pFA17@Yq~5=pm^{$E3e9C^i(ZVNRasmy(^}Di&x{RWY)5x{e$)(qNM_s6riIa zHcA)3YhVCylw(kJE&Zz%+sqo8-4Wk<#5s7OooR@dxC(VWgiULZ08Ba_ z3^;Z>{go|W?Hoh61n?fZWexSqnxTAwDyLv@9>6*$bR_1vz&g)0s`@gi63^L>I|qoy z3Q3n?ji-n?_k!VORjtrADThtc$+*KQwvVSYR1{ zQ+OC3&@Enuzu8(;N@)OV#cNvenr2M^4tBP)4h0@XYB1NfsS1I(vIy^d+DTHdC}}U+ z!VOLqg&aqP_Vh|Gb51X8-x`)aOB=X$WH3NZ8YTFpMcP3-Cu=P*F}KGSKyp zjI5Uv1EGxr{%g?R`}$6-7hA|_I$!-V?P6exe*20IdLo5+g@q9Z<)yDuyZuyGIJ}KE z*C)5t)&#jZEa><1@~t!|%bjdQn<>FJs6nTeTi#IE6m!3Y@0)-22BiiZ_{aZsVxcDn z^ZsDjE?S4*_$Iy70Pu&pFb9N~}jFsT!HX%lpyNnQzzpD_jg$)+##dtVoH!U%k^7i)>9t+{g zp(yO~z7d7+fkK2!b6BoiW2pX^*5z-%!NLTV_G1RSyzE1Ti!(p5h&8L+gB1tIKBWB& zCf@tw8gS@$$%cw6tQ1>-klMippt*Y$C<|f7GsVq^_M+O*R~A2ist{pYb&Q+5{IlQD zr+a##^&X2+N~;P(GPr*{yyxe%rvaTSzfg2iEI5~nr|v$=46vGg>BK_gQ6wDS_oWeC zpYL~KVH6tg)4bnTMs(e9z=;)-{DSIJ6sxPgM&Z8gBW9v>LSHLXyZ!;&Vth-}ID!r_ z;9jFc3Xg@0uyF4z=6;rbgA^_~WVEBd{U0Y{G$;`{BGx8P+wjGpzH(Q}%R&TwE{fok zciGhjELioOBADVm4+SU>jvhe{9IG~3qp3#}AQ}T@yYS=^3(pa>HWS>KpaYI7JXrTR zYim>Y-cf2Y-FC+m8jWCz@mX|6U-AwcXu!JPjytg;!XM(Xr#kI~(INfOzZ4*{Zqy;n zeD3>$(OLD>j|z&4c9Q#89{2ua#IqSEZLEP}ez){R!$Gd+4=0V#u0G{JJF%Cw;X6(l z0pbLpvRoH^7uYhmybVVbQJCWHG$@Z3+_dao22dPx0%Tn zc=ERdng^~M$}*t)Vb+9y@GUb*%XJajTYIOqOh={LCRg%Pne0}0(BKsUBFY1vnlhw1<$XS9yTwb@_c?|8593%YLA-qM7+HMYa4#lEbfQVx%6PZ>qpV z#TxDt-9qYuW=vK~*b_8r(EPXq0_!2La1I@i@Jt1%2aRsG@?mXjD^H}X)P-(^Rz|8n zUNQ{px>}qM%3{}PWE4N$(n+r@8uak&Y<9EsnskAa9;_7^U(Y0F`&}ysH?hZ&u&4o{ z+wh7*3>yhc=L_N+cr#jh6*T(g@IvZ{U%PTVKHzoC8+&yL;&@6fvtrx(Or8l{<1VT} zmoVA*mlvU)wRMIW=oPIMs3?!XK$wTW*g8qn+%^i8`7s$LE3;8CN&uJ0Q2t!(5aD`&b6g?T{n`?kh;(dPFmN z$=1^a?6iSGWPR8{LhmvFUPqSp*u#oYb}kQWxr#zz`08v9dx^E`VdZF_?59kJbh63mCX6 zD`R!xi(c%2Y(%9mLanb%jN=i8p_Iq-KFWoC>`~UC4-d9Iix-pw=na<@B|V`E+p*{q z-ng%Vr2Z%$m2Y9aGA7NN0ygQd07)Sq)1N&i@h+Dh;bf5a4R9c1Yh@AjNP7NX4pg8t z9OUTB%|TP?X(kIt4Z0NM3|4^f?nW$!K|N>jWrM}}i6zZL;OGF8)-QF3CU8akOBT;A z9?E1%?TMEd<}vx082YkmPoe7x5%;m1d-(GMlp;+ENdEoxZx zW1H*PMf&nK+osbjB zhep?0xps43(O*rU{JU;U4CKWt;JSGE1kNDcuAC?@MFG+WUNtHK?4j}YUNWgfPGQ=g z*xne2%n~|ZxY>d4cr>N66i*mAnw{3lULJOfjVJD1EQ~$KpT31*AD-o7*tPnTKRLz; zhM6r0)(VcyWCbwbDeQoO)MfDv zVczXFjS@#TGcC%;qaB}8Nrn6w?EkoaT*EU>0ohhnOMZ3+OUE+{-}uoN<$BW{c(Sj3 zOa70U>_P)+tGqJ-~rBe}O0f`3|T)Dcu z_uB-DuVL;_=;_d_GOTob&!YY?e0M+l1djg|8&-=)=iFSKUR~&%fGjZ6O{EMiQnR3# z}`GNTf9W^1u4Sh zq-@y#0ILIJDf_7=oc3#N%D!!hr`#_LI0M2Acn$D-{%KUThPWZA#S`P*BW=8S9es zrIqZoKIacTsIWvxA{ow?KCG~CilPt#Di*W)U`E=$M-&`Zt?*8(*w+S(&snWtm=E75 zlH6Kx3XSd8SF?lqEPG~+6EGU(WUV+b@urv$Sc2A8A92j13Q_oU4a|arkJ{=(NGkJV z%3IXmKZ7&Qg=z3rGI1rd0+ZD#2>gx;#9%nZ~)x)UIJGH~|^Rn)ElE3~v z>n06rH|insxZ)4S5@he$CVcJV>`7Cz^@S&$h**&ur`D3!Fpk$Slhv7b7N_lZUUlD;%!Ey{($P6dLETZOH z932Oa{C?f80AafylfIM5TxY-tn;lWI?Ram-@so)ojgP2UrO0XbEPR zYBu9D1qK^0x9UT?vozZ@*{tMqg+@a%@f^E4B%5e<_dbP+jiK;VOd`>2$bKgnRvk+2 zog*pCjW~BWpui|%iA24m($>VN-X>K|2a`Hw1QbT=P7%s#WxOURQfXa z==|hdUH7emM#n1JXI{XR)D+Vc*Zto)@wo4>QPlh9BMJqBn)dz4?kwL9FJZo(Xu%^eX@@T?F#gOJUE%?}D82*tL)XD<9 zd%Zh`89d9??}S2TVY?2ziJ;oPGKt95e*2c^+k|NtGmf*d`u5M2KR5=7U{arGM02h_ ze44i$Y*K@WK9U%^SThLYCvG%3vXJy z(Z89AOUiS~$!xphikG8jq@{u7`pVA=4AV+s9dJ$4BPkA$h|;W@*K6Dwv9fW=9!!PT z^N1~~W64BoxqLB?G=^R99pNMzTV0{tl?0$@o0*|bLEnJr@kUdzBD1$k6+oqnm{;4E9a$1iE;(i<3bzi;7Yj_ z4btvkav%r~ZK7MPeps@9V|%p(F|d3g)q95i&BlqTw1YmqtOh*4i8a??ufE#ULB=Z& zT4@}cSrnp`VALAph2{#4;VrwNt$UH%UGw8y(c49@;bT#&#PH%3->UiAuFxt60=88O z@(Ofx+375exAw!-hR z2n%oHer()cf^8Utx&$gNjm45(O}p>KYTAe}suLD}l)QVV zTqCZ>FLi=pa{$Bny;-bqXrZjIHtmxb>EeLG=!5hQpbP33Sv6_qWE$H|VPa3EFn_I^ zWzvhcP7fyzp5ut)1*3fPF=w*E*B~$mdf$e=_ z9n0h~fw6&61y*ZVO0gA}dc{xc8I7_loy6fgI2|}04kgzxD*>CryY;r($I|T2(fh0B zZC-ays%5yC6LwjW9l$PDqo?3|tud#CE2C!DRZ62!Y(R;5m3i*1*&cn>*BGgmkEn?D zYy9QDRugg#M~e1IzpW|SZt>?J>^T<}oew7o3>)BB%TTDWmYYPI(i-n}+mwbQNxM!4 z4sudiBz!S_44fyQ40ghZ^x%tJnov>jtzxCH|0e8uh&R3xam60kdbmLSO>-SexP6*Vk^x-A(yan~(QFh9EL97Rgl z#cC36fUicN7ZRVxX$}<;a=HBy>VZ;rHF*9(hQkL%qz8Pknfa2re0??6Nk*aUVJx4T z;V<86^`fFur~?GXTL%3PZyv9p>qbJswA9oYkU5NYjK3$8Ryn~sl8j&Sx`}E8775Gb zJ^L+rz;dki+*4-RHx|=}yxuK$4p0=9iv)>!FTi@TIvkgZ%X`t~qGx79__a6Itjhav z##Fq6OTX)^sg^-||CLkK@D!iF5Xs#j1B4CBt&(Io_xIbK$gyW4mQ1$-h8f#e-=XkW zQK64NdWUtyKp8!JrxOQlBC#vPp6v?VhG7rh!PnC$Li61F~^aa+6oG?ttdZh>z2Gmi zErS~1ZL<|NEs8>e1=i^E1ifjl0~%fNGW4E%uT{H)fTq=fa5ho#fB*jzX z{IPjfJp=i8X}%(qr!BDFN}jDx+^-;G9g;k}`NH4L-HnOSZjr*W3m3fnkwr!`ty)|I zYENHGZAQmAK zpWpYNGuX9d6^pB7Wr!dSdnSedGrSvd*tpVWm_6O#mbHu~m2f}yL50VBX!0eU!r%H4 GEBe26G5d4? diff --git a/internal/php5/php5.y b/internal/php5/php5.y index 63b1661..d328d44 100644 --- a/internal/php5/php5.y +++ b/internal/php5/php5.y @@ -5,17 +5,16 @@ import ( "bytes" "strconv" - "github.com/z7zmey/php-parser/internal/position" - "github.com/z7zmey/php-parser/internal/scanner" - "github.com/z7zmey/php-parser/pkg/ast" - "github.com/z7zmey/php-parser/pkg/token" + "github.com/z7zmey/php-parser/internal/position" + "github.com/z7zmey/php-parser/pkg/ast" + "github.com/z7zmey/php-parser/pkg/token" ) %} %union{ node ast.Vertex - token *scanner.Token + token *token.Token list []ast.Vertex simpleIndirectReference simpleIndirectReference @@ -25,7 +24,6 @@ import ( ClosureUse *ast.ExprClosureUse } -%type $unk %token T_INCLUDE %token T_INCLUDE_ONCE %token T_EXIT @@ -278,9 +276,7 @@ start: yylex.(*Parser).rootNode = &ast.Root{ast.Node{}, $1} yylex.(*Parser).rootNode.GetNode().Position = position.NewNodeListPosition($1) - yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.SkippedTokens) } ; @@ -295,14 +291,10 @@ top_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -316,9 +308,7 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.SkippedTokens) } | namespace_name T_NS_SEPARATOR T_STRING { @@ -329,10 +319,8 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.SkippedTokens) } ; @@ -341,26 +329,18 @@ top_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -370,10 +350,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } | T_NAMESPACE namespace_name ';' { @@ -385,11 +363,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_NAMESPACE namespace_name '{' top_statement_list '}' { @@ -401,11 +377,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.SkippedTokens) } | T_NAMESPACE '{' top_statement_list '}' { @@ -415,11 +389,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_USE use_declarations ';' { @@ -431,10 +403,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_USE T_FUNCTION use_function_declarations ';' { @@ -450,11 +420,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | T_USE T_CONST use_const_declarations ';' { @@ -470,11 +438,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | constant_declaration ';' { @@ -484,10 +450,8 @@ top_statement: $$.GetNode().Position = position.NewNodeTokenPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Stmts, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } ; @@ -497,15 +461,11 @@ use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -518,8 +478,6 @@ use_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -535,10 +493,8 @@ use_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -552,9 +508,7 @@ use_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_AS T_STRING { @@ -572,11 +526,9 @@ use_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } ; @@ -586,15 +538,11 @@ use_function_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_function_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -607,8 +555,6 @@ use_function_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -624,10 +570,8 @@ use_function_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -641,9 +585,7 @@ use_function_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_AS T_STRING { @@ -661,11 +603,9 @@ use_function_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } ; @@ -675,15 +615,11 @@ use_const_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_const_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -696,8 +632,6 @@ use_const_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -713,10 +647,8 @@ use_const_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -730,9 +662,7 @@ use_const_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_AS T_STRING { @@ -750,11 +680,9 @@ use_const_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } ; @@ -774,11 +702,9 @@ constant_declaration: $$.GetNode().Position = position.NewNodeNodeListPosition($1, constList.Consts) // save comments - yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $4.SkippedTokens) } | T_CONST T_STRING '=' static_scalar { @@ -793,11 +719,9 @@ constant_declaration: $$.GetNode().Position = position.NewTokenNodeListPosition($1, constList) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $3.SkippedTokens) } ; @@ -812,14 +736,10 @@ inner_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -829,26 +749,18 @@ inner_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -858,10 +770,8 @@ inner_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } ; @@ -870,8 +780,6 @@ statement: unticked_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_STRING ':' { @@ -883,10 +791,8 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $2.SkippedTokens) } ; @@ -899,10 +805,8 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } | T_IF parenthesis_expr statement elseif_list else_single { @@ -918,9 +822,7 @@ unticked_statement: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_IF parenthesis_expr ':' inner_statement_list new_elseif_list new_else_single T_ENDIF ';' { @@ -934,18 +836,16 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.SkippedTokens) if $6 != nil { - yylex.(*Parser).setFreeFloating($6.(*ast.StmtAltElse).Stmt, token.End, append($7.Tokens, $8.Tokens...)) + yylex.(*Parser).setFreeFloating($6.(*ast.StmtAltElse).Stmt, token.End, append($7.SkippedTokens, $8.SkippedTokens...)) } else if len($5) > 0 { - yylex.(*Parser).setFreeFloating($5[len($5)-1].(*ast.StmtAltElseIf).Stmt, token.End, append($7.Tokens, $8.Tokens...)) + yylex.(*Parser).setFreeFloating($5[len($5)-1].(*ast.StmtAltElseIf).Stmt, token.End, append($7.SkippedTokens, $8.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($7.Tokens, $8.Tokens...)) + yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($7.SkippedTokens, $8.SkippedTokens...)) } - yylex.(*Parser).setToken($$, token.SemiColon, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.SemiColon, $8.SkippedTokens) } | T_WHILE parenthesis_expr while_statement { @@ -962,9 +862,7 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DO statement T_WHILE parenthesis_expr ';' { @@ -974,12 +872,10 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($4, token.End, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($4, token.End, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } | T_FOR '(' for_expr ';' for_expr ';' for_expr ')' for_statement { @@ -1000,13 +896,11 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.For, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.For, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.SkippedTokens) } | T_SWITCH parenthesis_expr switch_case_list { @@ -1025,9 +919,7 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_BREAK ';' { @@ -1037,11 +929,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_BREAK expr ';' { @@ -1051,11 +941,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_CONTINUE ';' { @@ -1065,11 +953,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_CONTINUE expr ';' { @@ -1079,11 +965,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_RETURN ';' { @@ -1093,11 +977,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_RETURN expr_without_variable ';' { @@ -1107,11 +989,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_RETURN variable ';' { @@ -1121,11 +1001,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | yield_expr ';' { @@ -1136,10 +1014,8 @@ unticked_statement: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_GLOBAL global_var_list ';' { @@ -1149,11 +1025,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_STATIC static_var_list ';' { @@ -1163,11 +1037,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_ECHO echo_expr_list ';' { @@ -1177,12 +1049,10 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Echo, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Echo, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_INLINE_HTML { @@ -1192,9 +1062,7 @@ unticked_statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr ';' { @@ -1205,10 +1073,8 @@ unticked_statement: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_UNSET '(' unset_variables ')' ';' { @@ -1218,13 +1084,11 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Unset, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Unset, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } | T_FOREACH '(' variable T_AS foreach_variable foreach_optional_arg ')' foreach_statement { @@ -1256,15 +1120,13 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) if $6 != nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Key, $6.GetNode().Tokens[token.Key]); delete($6.GetNode().Tokens, token.Key) } - yylex.(*Parser).setFreeFloating($$, token.Var, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $7.SkippedTokens) } | T_FOREACH '(' expr_without_variable T_AS foreach_variable foreach_optional_arg ')' foreach_statement { @@ -1296,15 +1158,13 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) if $6 != nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Key, $6.GetNode().Tokens[token.Key]); delete($6.GetNode().Tokens, token.Key) } - yylex.(*Parser).setFreeFloating($$, token.Var, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $7.SkippedTokens) } | T_DECLARE '(' declare_list ')' declare_statement { @@ -1315,11 +1175,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Declare, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Declare, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.SkippedTokens) } | ';' { @@ -1329,10 +1187,8 @@ unticked_statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | T_TRY '{' inner_statement_list '}' catch_statement finally_statement { @@ -1346,11 +1202,9 @@ unticked_statement: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Try, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Try, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_THROW expr ';' { @@ -1360,11 +1214,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_GOTO T_STRING ';' { @@ -1376,12 +1228,10 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(label, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(label, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } ; @@ -1389,8 +1239,6 @@ catch_statement: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_CATCH '(' fully_qualified_class_name T_VARIABLE ')' '{' inner_statement_list '}' additional_catches { @@ -1405,14 +1253,12 @@ catch_statement: catchNode.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating(catchNode, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Catch, $2.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Var, $5.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Cond, $6.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(catchNode, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Catch, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Var, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Cond, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Stmts, $8.SkippedTokens) } ; @@ -1420,8 +1266,6 @@ finally_statement: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_FINALLY '{' inner_statement_list '}' { @@ -1431,11 +1275,9 @@ finally_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Finally, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Finally, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } ; @@ -1443,14 +1285,10 @@ additional_catches: non_empty_additional_catches { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1458,14 +1296,10 @@ non_empty_additional_catches: additional_catch { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_additional_catches additional_catch { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1482,14 +1316,12 @@ additional_catch: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Catch, $2.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Cond, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Catch, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Cond, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.SkippedTokens) } ; @@ -1497,17 +1329,13 @@ unset_variables: unset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | unset_variables ',' unset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1515,8 +1343,6 @@ unset_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1524,8 +1350,6 @@ function_declaration_statement: unticked_function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1533,8 +1357,6 @@ class_declaration_statement: unticked_class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1571,19 +1393,17 @@ unticked_function_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 != nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParamList, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Params, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParamList, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Params, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.SkippedTokens) } ; @@ -1610,11 +1430,9 @@ unticked_class_declaration_statement: $$.GetNode().Position = position.NewNodeTokenPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.SkippedTokens) } | interface_entry T_STRING interface_extends_list '{' class_statement_list '}' { @@ -1626,12 +1444,10 @@ unticked_class_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.SkippedTokens) } ; @@ -1645,9 +1461,7 @@ class_entry_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ABSTRACT T_CLASS { @@ -1659,10 +1473,8 @@ class_entry_type: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } | T_TRAIT { @@ -1672,9 +1484,7 @@ class_entry_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL T_CLASS { @@ -1686,10 +1496,8 @@ class_entry_type: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } ; @@ -1697,8 +1505,6 @@ extends_from: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS fully_qualified_class_name { @@ -1708,9 +1514,7 @@ extends_from: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1725,8 +1529,6 @@ interface_extends_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS interface_list { @@ -1736,9 +1538,7 @@ interface_extends_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1746,8 +1546,6 @@ implements_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_IMPLEMENTS interface_list { @@ -1757,9 +1555,7 @@ implements_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1767,17 +1563,13 @@ interface_list: fully_qualified_class_name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | interface_list ',' fully_qualified_class_name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1785,17 +1577,13 @@ foreach_optional_arg: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_DOUBLE_ARROW foreach_variable { $$ = $2 // save comments - yylex.(*Parser).setFreeFloating($$, token.Key, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Key, $1.SkippedTokens) } ; @@ -1803,8 +1591,6 @@ foreach_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '&' variable { @@ -1814,9 +1600,7 @@ foreach_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LIST '(' assignment_list ')' { @@ -1826,11 +1610,9 @@ foreach_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } ; @@ -1841,8 +1623,6 @@ for_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOR ';' { @@ -1854,12 +1634,10 @@ for_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1870,8 +1648,6 @@ foreach_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOREACH ';' { @@ -1883,12 +1659,10 @@ foreach_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1900,8 +1674,6 @@ declare_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDDECLARE ';' { @@ -1913,12 +1685,10 @@ declare_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1935,10 +1705,8 @@ declare_list: constant.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(constant, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(constant, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $2.SkippedTokens) } | declare_list ',' T_STRING '=' static_scalar { @@ -1951,11 +1719,9 @@ declare_list: constant.GetNode().Position = position.NewTokenNodePosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $4.SkippedTokens) } ; @@ -1971,10 +1737,8 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) } | '{' ';' case_list '}' { @@ -1986,11 +1750,9 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) } | ':' case_list T_ENDSWITCH ';' { @@ -2002,12 +1764,10 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | ':' ';' case_list T_ENDSWITCH ';' { @@ -2020,13 +1780,11 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } ; @@ -2035,8 +1793,6 @@ case_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | case_list T_CASE expr case_separator inner_statement_list { @@ -2047,11 +1803,9 @@ case_list: _case.GetNode().Position = position.NewTokenNodeListPosition($2, $5) // save comments - yylex.(*Parser).setFreeFloating(_case, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_case, token.Expr, $4.Tokens) - yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_case, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_case, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.SkippedTokens) } | case_list T_DEFAULT case_separator inner_statement_list { @@ -2062,11 +1816,9 @@ case_list: _default.GetNode().Position = position.NewTokenNodeListPosition($2, $4) // save comments - yylex.(*Parser).setFreeFloating(_default, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_default, token.Default, $3.Tokens) - yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_default, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_default, token.Default, $3.SkippedTokens) + yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.SkippedTokens) } ; @@ -2090,8 +1842,6 @@ while_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDWHILE ';' { @@ -2103,12 +1853,10 @@ while_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -2118,8 +1866,6 @@ elseif_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | elseif_list T_ELSEIF parenthesis_expr statement { @@ -2130,9 +1876,7 @@ elseif_list: _elseIf.GetNode().Position = position.NewTokenNodePosition($2, $4) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) } ; @@ -2141,8 +1885,6 @@ new_elseif_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_elseif_list T_ELSEIF parenthesis_expr ':' inner_statement_list { @@ -2157,10 +1899,8 @@ new_elseif_list: _elseIf.GetNode().Position = position.NewTokenNodeListPosition($2, $5) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $4.SkippedTokens) } ; @@ -2169,8 +1909,6 @@ else_single: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ELSE statement { @@ -2180,9 +1918,7 @@ else_single: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2191,8 +1927,6 @@ new_else_single: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ELSE ':' inner_statement_list { @@ -2206,10 +1940,8 @@ new_else_single: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $2.SkippedTokens) } ; @@ -2218,14 +1950,10 @@ parameter_list: non_empty_parameter_list { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2233,17 +1961,13 @@ non_empty_parameter_list: parameter { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_parameter_list ',' parameter { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2256,18 +1980,18 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, nil} @@ -2281,8 +2005,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenPosition($4) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | optional_class_type is_reference is_variadic T_VARIABLE '=' static_scalar { @@ -2292,19 +2014,19 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.End, $5.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, $6} @@ -2318,8 +2040,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenNodePosition($4, $6) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2328,8 +2048,6 @@ optional_class_type: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ARRAY { @@ -2339,9 +2057,7 @@ optional_class_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CALLABLE { @@ -2351,15 +2067,11 @@ optional_class_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | fully_qualified_class_name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2373,10 +2085,8 @@ function_call_parameter_list: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.SkippedTokens) } | '(' non_empty_function_call_parameter_list ')' { @@ -2386,10 +2096,8 @@ function_call_parameter_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | '(' yield_expr ')' { @@ -2401,10 +2109,8 @@ function_call_parameter_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -2413,17 +2119,13 @@ non_empty_function_call_parameter_list: function_call_parameter { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_function_call_parameter_list ',' function_call_parameter { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2437,8 +2139,6 @@ function_call_parameter: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | variable { @@ -2449,8 +2149,6 @@ function_call_parameter: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '&' w_variable { @@ -2460,9 +2158,7 @@ function_call_parameter: $$.GetNode().Position = position.NewNodePosition($2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ELLIPSIS expr { @@ -2472,9 +2168,7 @@ function_call_parameter: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2484,15 +2178,11 @@ global_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | global_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2508,9 +2198,7 @@ global_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' r_variable { @@ -2520,9 +2208,7 @@ global_var: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' '{' expr '}' { @@ -2532,11 +2218,9 @@ global_var: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.Tokens, $3.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.SkippedTokens, $3.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.SkippedTokens...)) } ; @@ -2555,10 +2239,8 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.SkippedTokens) } | static_var_list ',' T_VARIABLE '=' static_scalar { @@ -2573,11 +2255,9 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenNodePosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Var, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Var, $4.SkippedTokens) } | T_VARIABLE { @@ -2592,9 +2272,7 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' static_scalar { @@ -2609,10 +2287,8 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Var, $2.SkippedTokens) } ; @@ -2621,14 +2297,10 @@ class_statement_list: class_statement_list class_statement { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2643,10 +2315,8 @@ class_statement: // save comments yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.PropertyList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.PropertyList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | class_constant_declaration ';' { @@ -2656,16 +2326,12 @@ class_statement: $$.GetNode().Position = position.NewNodeTokenPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.ConstList, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | trait_use_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | method_modifiers function is_reference T_STRING '(' parameter_list ')' method_body { @@ -2683,20 +2349,18 @@ class_statement: // save comments if len($1) > 0 { yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $2.SkippedTokens) } if $3 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $7.SkippedTokens) } ; @@ -2709,9 +2373,7 @@ trait_use_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2719,17 +2381,13 @@ trait_list: fully_qualified_class_name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_list ',' fully_qualified_class_name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2741,10 +2399,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' trait_adaptation_list '}' { @@ -2753,10 +2409,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.SkippedTokens) } ; @@ -2764,14 +2418,10 @@ trait_adaptation_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_trait_adaptation_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2779,14 +2429,10 @@ non_empty_trait_adaptation_list: trait_adaptation_statement { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_trait_adaptation_list trait_adaptation_statement { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2796,20 +2442,16 @@ trait_adaptation_statement: $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.NameList, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.NameList, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | trait_alias ';' { $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.Alias, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Alias, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } ; @@ -2823,9 +2465,7 @@ trait_precedence: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2833,17 +2473,13 @@ trait_reference_list: fully_qualified_class_name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_reference_list ',' fully_qualified_class_name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2858,15 +2494,11 @@ trait_method_reference: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | trait_method_reference_fully_qualified { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2882,10 +2514,8 @@ trait_method_reference_fully_qualified: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $2.SkippedTokens) } ; @@ -2901,10 +2531,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } | trait_method_reference T_AS member_modifier { @@ -2915,9 +2543,7 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2925,14 +2551,10 @@ trait_modifiers: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | member_modifier { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2945,10 +2567,8 @@ method_body: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' inner_statement_list '}' { @@ -2958,10 +2578,8 @@ method_body: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } ; @@ -2969,8 +2587,6 @@ variable_modifiers: non_empty_member_modifiers { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_VAR { @@ -2981,9 +2597,7 @@ variable_modifiers: modifier.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.SkippedTokens) } ; @@ -2991,14 +2605,10 @@ method_modifiers: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3006,14 +2616,10 @@ non_empty_member_modifiers: member_modifier { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers member_modifier { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3026,9 +2632,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PROTECTED { @@ -3038,9 +2642,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRIVATE { @@ -3050,9 +2652,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_STATIC { @@ -3062,9 +2662,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ABSTRACT { @@ -3074,9 +2672,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL { @@ -3086,9 +2682,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -3106,10 +2700,8 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Start, $3.SkippedTokens) } | class_variable_declaration ',' T_VARIABLE '=' static_scalar { @@ -3124,11 +2716,9 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenNodePosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Var, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Var, $4.SkippedTokens) } | T_VARIABLE { @@ -3143,9 +2733,7 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(property, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(property, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' static_scalar { @@ -3160,10 +2748,8 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(property, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(property, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Var, $2.SkippedTokens) } ; @@ -3183,11 +2769,9 @@ class_constant_declaration: $1.GetNode().Position = position.NewNodesPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $4.SkippedTokens) } | T_CONST T_STRING '=' static_scalar { @@ -3201,11 +2785,9 @@ class_constant_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $3.SkippedTokens) } ; @@ -3215,15 +2797,11 @@ echo_expr_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3232,14 +2810,10 @@ for_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_for_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3249,15 +2823,11 @@ non_empty_for_expr: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3265,14 +2835,10 @@ chaining_method_or_property: chaining_method_or_property variable_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | variable_property { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3286,10 +2852,8 @@ chaining_dereference: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | '[' dim_offset ']' { @@ -3300,10 +2864,8 @@ chaining_dereference: fetch.GetNode().Position = position.NewNodePosition($2) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $3.SkippedTokens) } ; @@ -3311,20 +2873,14 @@ chaining_instance_call: chaining_dereference chaining_method_or_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | chaining_dereference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | chaining_method_or_property { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3332,14 +2888,10 @@ instance_call: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | chaining_instance_call { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3356,9 +2908,7 @@ new_expr: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -3373,12 +2923,10 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $5.SkippedTokens) } | variable '=' expr { @@ -3389,9 +2937,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable '=' '&' variable { @@ -3402,10 +2948,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Equal, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Equal, $3.SkippedTokens) } | variable '=' '&' T_NEW class_name_reference ctor_arguments { @@ -3428,11 +2972,9 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Equal, $3.Tokens) - yylex.(*Parser).setFreeFloating(_new, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Equal, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(_new, token.Start, $4.SkippedTokens) } | T_CLONE expr { @@ -3442,9 +2984,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | variable T_PLUS_EQUAL expr { @@ -3454,9 +2994,7 @@ expr_without_variable: $$.GetNode().Position = position.NewNodesPosition($1, $3) yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MINUS_EQUAL expr { @@ -3467,9 +3005,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MUL_EQUAL expr { @@ -3480,9 +3016,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_POW_EQUAL expr { @@ -3493,9 +3027,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_DIV_EQUAL expr { @@ -3506,9 +3038,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_CONCAT_EQUAL expr { @@ -3519,9 +3049,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MOD_EQUAL expr { @@ -3532,9 +3060,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_AND_EQUAL expr { @@ -3545,9 +3071,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_OR_EQUAL expr { @@ -3558,9 +3082,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_XOR_EQUAL expr { @@ -3571,9 +3093,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SL_EQUAL expr { @@ -3584,9 +3104,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SR_EQUAL expr { @@ -3597,9 +3115,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | rw_variable T_INC { @@ -3610,9 +3126,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_INC rw_variable { @@ -3622,9 +3136,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | rw_variable T_DEC { @@ -3635,9 +3147,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_DEC rw_variable { @@ -3647,9 +3157,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_BOOLEAN_OR expr { @@ -3660,9 +3168,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_BOOLEAN_AND expr { @@ -3673,9 +3179,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_OR expr { @@ -3686,9 +3190,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_AND expr { @@ -3699,9 +3201,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_XOR expr { @@ -3712,9 +3212,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '|' expr { @@ -3725,9 +3223,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '&' expr { @@ -3738,9 +3234,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '^' expr { @@ -3751,9 +3245,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '.' expr { @@ -3764,9 +3256,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '+' expr { @@ -3777,9 +3267,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '-' expr { @@ -3790,9 +3278,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '*' expr { @@ -3803,9 +3289,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_POW expr { @@ -3816,9 +3300,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '/' expr { @@ -3829,9 +3311,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '%' expr { @@ -3842,9 +3322,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SL expr { @@ -3855,9 +3333,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SR expr { @@ -3868,9 +3344,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '+' expr %prec T_INC { @@ -3880,9 +3354,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' expr %prec T_INC { @@ -3892,9 +3364,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '!' expr { @@ -3904,9 +3374,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '~' expr { @@ -3916,9 +3384,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_IS_IDENTICAL expr { @@ -3929,9 +3395,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_IDENTICAL expr { @@ -3942,9 +3406,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_EQUAL expr { @@ -3955,9 +3417,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_EQUAL expr { @@ -3968,10 +3428,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.Equal, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.Equal, $2.SkippedTokens) } | expr '<' expr { @@ -3982,9 +3440,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_SMALLER_OR_EQUAL expr { @@ -3995,9 +3451,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '>' expr { @@ -4008,9 +3462,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_GREATER_OR_EQUAL expr { @@ -4021,9 +3473,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_INSTANCEOF class_name_reference { @@ -4034,29 +3484,23 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | parenthesis_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' new_expr ')' instance_call { $$ = $2 // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) for _, n := range($4) { switch nn := n.(type) { @@ -4079,8 +3523,6 @@ expr_without_variable: // save position $$.GetNode().Position = position.NewNodesPosition($$, n) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr '?' expr ':' expr { @@ -4091,10 +3533,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $4.SkippedTokens) } | expr '?' ':' expr { @@ -4105,16 +3545,12 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $3.SkippedTokens) } | internal_functions_in_yacc { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_INT_CAST expr { @@ -4124,10 +3560,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_DOUBLE_CAST expr { @@ -4137,10 +3571,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_STRING_CAST expr { @@ -4150,10 +3582,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_ARRAY_CAST expr { @@ -4163,10 +3593,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_OBJECT_CAST expr { @@ -4176,10 +3604,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_BOOL_CAST expr { @@ -4189,10 +3615,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_UNSET_CAST expr { @@ -4202,10 +3626,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_EXIT exit_expr { @@ -4223,9 +3645,7 @@ expr_without_variable: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '@' expr { @@ -4235,27 +3655,19 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | combined_scalar_offset { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | combined_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '`' backticks_expr '`' { @@ -4265,9 +3677,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRINT expr { @@ -4277,9 +3687,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD { @@ -4289,9 +3697,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}' { @@ -4301,23 +3707,21 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.SkippedTokens) // normalize if $6 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.LexicalVars]); delete($$.GetNode().Tokens, token.LexicalVars) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}' { @@ -4327,24 +3731,22 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $10) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Static, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Static, $2.SkippedTokens) if $3 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $8.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $10.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $8.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $10.SkippedTokens) // normalize if $7 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.LexicalVars]); delete($$.GetNode().Tokens, token.LexicalVars) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4357,9 +3759,7 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD variable { @@ -4369,9 +3769,7 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD expr T_DOUBLE_ARROW expr_without_variable { @@ -4381,10 +3779,8 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) } | T_YIELD expr T_DOUBLE_ARROW variable { @@ -4394,10 +3790,8 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) } ; @@ -4410,10 +3804,8 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | combined_scalar_offset '[' dim_offset ']' { @@ -4423,10 +3815,8 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | T_CONSTANT_ENCAPSED_STRING '[' dim_offset ']' { @@ -4438,11 +3828,9 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition(str, $4) // save comments - yylex.(*Parser).setFreeFloating(str, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(str, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | general_constant '[' dim_offset ']' { @@ -4452,10 +3840,8 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } ; @@ -4468,11 +3854,9 @@ combined_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Array, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Array, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' array_pair_list ']' { @@ -4482,10 +3866,8 @@ combined_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } ; @@ -4500,8 +3882,6 @@ lexical_vars: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_USE '(' lexical_var_list ')' { @@ -4511,11 +3891,9 @@ lexical_vars: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Use, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Use, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.SkippedTokens) } ; @@ -4531,10 +3909,8 @@ lexical_var_list: variable.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } | lexical_var_list ',' '&' T_VARIABLE { @@ -4549,11 +3925,9 @@ lexical_var_list: reference.GetNode().Position = position.NewTokensPosition($3, $4) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) } | T_VARIABLE { @@ -4566,9 +3940,7 @@ lexical_var_list: variable.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(variable, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(variable, token.Start, $1.SkippedTokens) } | '&' T_VARIABLE { @@ -4583,10 +3955,8 @@ lexical_var_list: reference.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating(reference, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(reference, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } ; @@ -4599,8 +3969,6 @@ function_call: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodesPosition(name, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name function_call_parameter_list { @@ -4612,10 +3980,8 @@ function_call: $$.GetNode().Position = position.NewNodesPosition(funcName, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(funcName, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(funcName, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name function_call_parameter_list { @@ -4627,9 +3993,7 @@ function_call: $$.GetNode().Position = position.NewNodesPosition(funcName, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | class_name T_PAAMAYIM_NEKUDOTAYIM variable_name function_call_parameter_list { @@ -4640,9 +4004,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | class_name T_PAAMAYIM_NEKUDOTAYIM variable_without_objects function_call_parameter_list { @@ -4653,9 +4015,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM variable_name function_call_parameter_list { @@ -4666,9 +4026,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM variable_without_objects function_call_parameter_list { @@ -4679,9 +4037,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_without_objects function_call_parameter_list { @@ -4692,8 +4048,6 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4706,9 +4060,7 @@ class_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | namespace_name { @@ -4716,8 +4068,6 @@ class_name: // save position $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -4727,10 +4077,8 @@ class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -4740,9 +4088,7 @@ class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4753,8 +4099,6 @@ fully_qualified_class_name: // save position $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -4764,10 +4108,8 @@ fully_qualified_class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -4777,9 +4119,7 @@ fully_qualified_class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4787,14 +4127,10 @@ class_name_reference: class_name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | dynamic_class_name_reference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4804,7 +4140,7 @@ dynamic_class_name_reference: $$ = $1 // save comments - yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.Tokens) + yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.SkippedTokens) for _, n := range($3) { switch nn := n.(type) { @@ -4837,14 +4173,10 @@ dynamic_class_name_reference: yylex.(*Parser).MoveFreeFloating(nn.Var, $$) } } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | base_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4853,14 +4185,10 @@ dynamic_class_name_variable_properties: dynamic_class_name_variable_properties dynamic_class_name_variable_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4871,9 +4199,7 @@ dynamic_class_name_variable_property: $$ = $2 // save comments - yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.SkippedTokens) } ; @@ -4881,8 +4207,6 @@ exit_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' ')' { @@ -4892,16 +4216,12 @@ exit_expr: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.SkippedTokens) } | parenthesis_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4909,8 +4229,6 @@ backticks_expr: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE { @@ -4919,14 +4237,10 @@ backticks_expr: // save position part.GetNode().Position = position.NewTokenPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4934,14 +4248,10 @@ ctor_arguments: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_call_parameter_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4954,9 +4264,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DNUMBER { @@ -4966,9 +4274,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CONSTANT_ENCAPSED_STRING { @@ -4978,9 +4284,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LINE { @@ -4990,9 +4294,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FILE { @@ -5002,9 +4304,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DIR { @@ -5014,9 +4314,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_TRAIT_C { @@ -5026,9 +4324,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_METHOD_C { @@ -5038,9 +4334,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FUNC_C { @@ -5050,9 +4344,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_C { @@ -5062,9 +4354,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_ENCAPSED_AND_WHITESPACE T_END_HEREDOC { @@ -5076,9 +4366,7 @@ common_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_END_HEREDOC { @@ -5088,9 +4376,7 @@ common_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5106,10 +4392,8 @@ static_class_constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -5117,8 +4401,6 @@ static_scalar: static_scalar_value { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5126,14 +4408,10 @@ static_scalar_value: common_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_class_name_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name { @@ -5143,8 +4421,6 @@ static_scalar_value: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodePosition(name) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -5156,10 +4432,8 @@ static_scalar_value: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -5171,9 +4445,7 @@ static_scalar_value: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ARRAY '(' static_array_pair_list ')' { @@ -5183,11 +4455,9 @@ static_scalar_value: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Array, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Array, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' static_array_pair_list ']' { @@ -5197,16 +4467,12 @@ static_scalar_value: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } | static_class_constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_CLASS_C { @@ -5216,15 +4482,11 @@ static_scalar_value: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | static_operation { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5237,10 +4499,8 @@ static_operation: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | static_scalar_value '+' static_scalar_value { @@ -5251,9 +4511,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '-' static_scalar_value { @@ -5264,9 +4522,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '*' static_scalar_value { @@ -5277,9 +4533,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_POW static_scalar_value { @@ -5290,9 +4544,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '/' static_scalar_value { @@ -5303,9 +4555,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '%' static_scalar_value { @@ -5316,9 +4566,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '!' static_scalar_value { @@ -5328,9 +4576,7 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '~' static_scalar_value { @@ -5340,9 +4586,7 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | static_scalar_value '|' static_scalar_value { @@ -5353,9 +4597,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '&' static_scalar_value { @@ -5366,9 +4608,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '^' static_scalar_value { @@ -5379,9 +4619,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_SL static_scalar_value { @@ -5392,9 +4630,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_SR static_scalar_value { @@ -5405,9 +4641,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '.' static_scalar_value { @@ -5418,9 +4652,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_LOGICAL_XOR static_scalar_value { @@ -5431,9 +4663,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_LOGICAL_AND static_scalar_value { @@ -5444,9 +4674,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_LOGICAL_OR static_scalar_value { @@ -5457,9 +4685,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_BOOLEAN_AND static_scalar_value { @@ -5470,9 +4696,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_BOOLEAN_OR static_scalar_value { @@ -5483,9 +4707,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_IDENTICAL static_scalar_value { @@ -5496,9 +4718,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_NOT_IDENTICAL static_scalar_value { @@ -5509,9 +4729,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_EQUAL static_scalar_value { @@ -5522,9 +4740,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_NOT_EQUAL static_scalar_value { @@ -5535,10 +4751,8 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.Equal, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.Equal, $2.SkippedTokens) } | static_scalar_value '<' static_scalar_value { @@ -5549,9 +4763,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '>' static_scalar_value { @@ -5562,9 +4774,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_SMALLER_OR_EQUAL static_scalar_value { @@ -5575,9 +4785,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_GREATER_OR_EQUAL static_scalar_value { @@ -5588,9 +4796,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '?' ':' static_scalar_value { @@ -5601,10 +4807,8 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $3.SkippedTokens) } | static_scalar_value '?' static_scalar_value ':' static_scalar_value { @@ -5615,10 +4819,8 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $4.SkippedTokens) } | '+' static_scalar_value { @@ -5628,9 +4830,7 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' static_scalar_value { @@ -5640,19 +4840,15 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '(' static_scalar_value ')' { $$ = $2 // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } ; @@ -5660,8 +4856,6 @@ general_constant: class_constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name { @@ -5671,8 +4865,6 @@ general_constant: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodePosition(name) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -5684,10 +4876,8 @@ general_constant: $$.GetNode().Position = position.NewNodePosition(name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -5699,9 +4889,7 @@ general_constant: $$.GetNode().Position = position.NewNodePosition(name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5716,27 +4904,19 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | general_constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_name_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | common_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '"' encaps_list '"' { @@ -5746,9 +4926,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC encaps_list T_END_HEREDOC { @@ -5758,9 +4936,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CLASS_C { @@ -5770,9 +4946,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5780,8 +4954,6 @@ static_array_pair_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_static_array_pair_list possible_comma { @@ -5789,10 +4961,8 @@ static_array_pair_list: // save comments if $2 != nil { - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5817,11 +4987,9 @@ non_empty_static_array_pair_list: arrayItem.GetNode().Position = position.NewNodesPosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.SkippedTokens) } | non_empty_static_array_pair_list ',' static_scalar_value { @@ -5832,10 +5000,8 @@ non_empty_static_array_pair_list: arrayItem.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_scalar_value T_DOUBLE_ARROW static_scalar_value { @@ -5847,9 +5013,7 @@ non_empty_static_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.SkippedTokens) } | static_scalar_value { @@ -5861,8 +5025,6 @@ non_empty_static_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5870,14 +5032,10 @@ expr: r_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr_without_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5890,10 +5048,8 @@ parenthesis_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | '(' yield_expr ')' { @@ -5903,10 +5059,8 @@ parenthesis_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -5915,8 +5069,6 @@ r_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5925,8 +5077,6 @@ w_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5934,8 +5084,6 @@ rw_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5950,7 +5098,7 @@ variable: } // save comments - yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.Tokens) + yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.SkippedTokens) for _, n := range($3) { switch nn := n.(type) { @@ -5995,14 +5143,10 @@ variable: yylex.(*Parser).MoveFreeFloating(nn.Var, $$) } } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | base_variable_with_function_calls { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6010,14 +5154,10 @@ variable_properties: variable_properties variable_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6033,9 +5173,7 @@ variable_property: $$ = $2 // save comments - yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.SkippedTokens) } ; @@ -6049,10 +5187,8 @@ array_method_dereference: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | method '[' dim_offset ']' { @@ -6063,10 +5199,8 @@ array_method_dereference: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } ; @@ -6077,8 +5211,6 @@ method: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6086,20 +5218,14 @@ method_or_not: method { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | array_method_dereference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6107,8 +5233,6 @@ variable_without_objects: reference_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | simple_indirect_reference reference_variable { @@ -6119,8 +5243,6 @@ variable_without_objects: } $$ = $1.all[0] - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6134,9 +5256,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM variable_without_objects { @@ -6147,9 +5267,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -6157,8 +5275,6 @@ variable_class_name: reference_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6171,10 +5287,8 @@ array_function_dereference: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | function_call '[' dim_offset ']' { @@ -6184,10 +5298,8 @@ array_function_dereference: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } ; @@ -6195,20 +5307,14 @@ base_variable_with_function_calls: base_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | array_function_dereference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_call { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6217,8 +5323,6 @@ base_variable: reference_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | simple_indirect_reference reference_variable { @@ -6229,14 +5333,10 @@ base_variable: } $$ = $1.all[0] - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_member { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6249,10 +5349,8 @@ reference_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | reference_variable '{' expr '}' { @@ -6262,16 +5360,12 @@ reference_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | compound_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6287,9 +5381,7 @@ compound_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' '{' expr '}' { @@ -6299,11 +5391,9 @@ compound_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.Tokens, $3.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.SkippedTokens, $3.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.SkippedTokens...)) } ; @@ -6311,14 +5401,10 @@ dim_offset: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6327,8 +5413,6 @@ object_property: object_dim_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | variable_without_objects { @@ -6337,8 +5421,6 @@ object_property: // save position fetch.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6352,10 +5434,8 @@ object_dim_list: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | object_dim_list '{' expr '}' { @@ -6366,10 +5446,8 @@ object_dim_list: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | variable_name { @@ -6378,8 +5456,6 @@ object_dim_list: // save position fetch.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6392,9 +5468,7 @@ variable_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '{' expr '}' { @@ -6404,10 +5478,8 @@ variable_name: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } ; @@ -6421,9 +5493,7 @@ simple_indirect_reference: n.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(n, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(n, token.Start, $1.SkippedTokens) } | simple_indirect_reference '$' { @@ -6438,9 +5508,7 @@ simple_indirect_reference: n.GetNode().Position = position.NewTokenPosition($2) // save comments - yylex.(*Parser).setFreeFloating(n, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(n, token.Start, $2.SkippedTokens) } ; @@ -6454,9 +5522,7 @@ assignment_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | assignment_list_element { @@ -6465,8 +5531,6 @@ assignment_list: } else { $$ = []ast.Vertex{$1} } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6481,8 +5545,6 @@ assignment_list_element: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_LIST '(' assignment_list ')' { @@ -6494,17 +5556,13 @@ assignment_list_element: $$.GetNode().Position = position.NewNodePosition(listNode) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) } | /* empty */ { $$ = &ast.ExprArrayItem{ast.Node{}, false, nil, nil} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6513,8 +5571,6 @@ array_pair_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_array_pair_list possible_comma { @@ -6526,10 +5582,8 @@ array_pair_list: // save comments if $2 != nil { - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6543,11 +5597,9 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewNodesPosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.SkippedTokens) } | non_empty_array_pair_list ',' expr { @@ -6558,10 +5610,8 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr T_DOUBLE_ARROW expr { @@ -6573,9 +5623,7 @@ non_empty_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.SkippedTokens) } | expr { @@ -6587,8 +5635,6 @@ non_empty_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_array_pair_list ',' expr T_DOUBLE_ARROW '&' w_variable { @@ -6601,12 +5647,10 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewNodesPosition($3, $6) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $5.SkippedTokens) } | non_empty_array_pair_list ',' '&' w_variable { @@ -6619,10 +5663,8 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewTokenNodePosition($3, $4) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $3.SkippedTokens) } | expr T_DOUBLE_ARROW '&' w_variable { @@ -6636,10 +5678,8 @@ non_empty_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $3.SkippedTokens) } | '&' w_variable { @@ -6652,9 +5692,7 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $1.SkippedTokens) } ; @@ -6662,8 +5700,6 @@ encaps_list: encaps_list encaps_var { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list T_ENCAPSED_AND_WHITESPACE { @@ -6674,15 +5710,11 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($2) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.SkippedTokens) } | encaps_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE encaps_var { @@ -6693,9 +5725,7 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.SkippedTokens) } ; @@ -6710,9 +5740,7 @@ encaps_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '[' encaps_var_offset ']' { @@ -6726,10 +5754,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | T_VARIABLE T_OBJECT_OPERATOR T_STRING { @@ -6745,10 +5771,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES expr '}' { @@ -6760,10 +5784,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '}' { @@ -6777,10 +5799,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '[' expr ']' '}' { @@ -6794,22 +5814,18 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setToken(variable, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken(variable, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.SkippedTokens) } | T_CURLY_OPEN variable '}' { $$ = $2; // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -6822,9 +5838,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NUM_STRING { @@ -6839,9 +5853,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE { @@ -6853,9 +5865,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -6868,11 +5878,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Isset, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Isset, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $4.SkippedTokens) } | T_EMPTY '(' variable ')' { @@ -6884,11 +5892,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_EMPTY '(' expr ')' { @@ -6900,11 +5906,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_INCLUDE expr { @@ -6914,9 +5918,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_INCLUDE_ONCE expr { @@ -6926,9 +5928,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_EVAL '(' expr ')' { @@ -6940,11 +5940,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_REQUIRE expr { @@ -6954,9 +5952,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_REQUIRE_ONCE expr { @@ -6966,9 +5962,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -6976,17 +5970,13 @@ isset_variables: isset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | isset_variables ',' isset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -6994,14 +5984,10 @@ isset_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr_without_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -7017,10 +6003,8 @@ class_constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM T_STRING { @@ -7033,10 +6017,8 @@ class_constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -7052,10 +6034,8 @@ static_class_name_scalar: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -7071,10 +6051,8 @@ class_name_scalar: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; diff --git a/internal/php5/php5_bench_test.go b/internal/php5/php5_bench_test.go index 49efe90..3cbd675 100644 --- a/internal/php5/php5_bench_test.go +++ b/internal/php5/php5_bench_test.go @@ -414,7 +414,7 @@ CAD; ` for n := 0; n < b.N; n++ { - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() } diff --git a/internal/php5/php5_test.go b/internal/php5/php5_test.go index b1a1ada..051917b 100644 --- a/internal/php5/php5_test.go +++ b/internal/php5/php5_test.go @@ -22212,11 +22212,12 @@ func TestPhp5(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22351,11 +22352,12 @@ func TestPhp5Strings(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22579,11 +22581,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22606,7 +22609,7 @@ func TestPhp5ControlCharsErrors(t *testing.T) { parserErrors = append(parserErrors, e) } - lexer := scanner.NewLexer([]byte(src), "5.6", false, errorHandlerFunc) + lexer := scanner.NewLexer([]byte(src), "5.6", errorHandlerFunc) php5parser := php5.NewParser(lexer, errorHandlerFunc) php5parser.Parse() assert.DeepEqual(t, expected, parserErrors) diff --git a/internal/php7/parser.go b/internal/php7/parser.go index a910f9c..32f3fa5 100644 --- a/internal/php7/parser.go +++ b/internal/php7/parser.go @@ -12,7 +12,7 @@ import ( // Parser structure type Parser struct { Lexer *scanner.Lexer - currentToken *scanner.Token + currentToken *token.Token rootNode ast.Vertex errHandlerFunc func(*errors.Error) } @@ -39,8 +39,7 @@ func (p *Parser) Error(msg string) { return } - var pos = p.currentToken.Position - p.errHandlerFunc(errors.NewError(msg, &pos)) + p.errHandlerFunc(errors.NewError(msg, p.currentToken.Position)) } // Parse the php7 Parser entrypoint @@ -82,7 +81,7 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) { delete(src.GetNode().Tokens, token.Start) } -func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -98,7 +97,7 @@ func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []to } } -func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -108,14 +107,14 @@ func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, token *dstCollection = make(token.Collection) } - (*dstCollection)[pos] = make([]token.Token, 0) + (*dstCollection)[pos] = make([]*token.Token, 0) for _, v := range tokens { (*dstCollection)[pos] = append((*dstCollection)[pos], v) } } -func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -141,7 +140,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. } if semiColon[0].Value[0] == ';' { - p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{ + p.setFreeFloatingTokens(prevNode, token.SemiColon, []*token.Token{ { ID: token.ID(';'), Value: semiColon[0].Value[0:1], @@ -155,28 +154,18 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. tlen = 3 } - phpCloseTag := []token.Token{} + phpCloseTag := []*token.Token{} if vlen-tlen > 1 { - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: token.T_WHITESPACE, Value: semiColon[0].Value[1 : vlen-tlen], }) } - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: T_CLOSE_TAG, Value: semiColon[0].Value[vlen-tlen:], }) p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...)) } - -func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) { - for i := 1; i < len(yyDollar); i++ { - if yyDollar[i].token != nil { - p.Lexer.ReturnTokenToPool(yyDollar[i].token) - } - yyDollar[i].token = nil - } - yyVAL.token = nil -} diff --git a/internal/php7/parser_test.go b/internal/php7/parser_test.go index 9ef81b7..d909c98 100644 --- a/internal/php7/parser_test.go +++ b/internal/php7/parser_test.go @@ -60,11 +60,12 @@ func TestIdentifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -955,11 +956,12 @@ func TestPhp7ArgumentNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1809,11 +1811,12 @@ func TestPhp7ParameterNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1832,11 +1835,12 @@ func TestCommentEndFile(t *testing.T) { Stmts: []ast.Vertex{}, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1911,11 +1915,12 @@ func TestName(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1988,11 +1993,12 @@ func TestFullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2065,11 +2071,12 @@ func TestRelative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2145,11 +2152,12 @@ func TestScalarEncapsed_SimpleVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2223,11 +2231,12 @@ func TestScalarEncapsed_SimpleVarOneChar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2312,11 +2321,12 @@ func TestScalarEncapsed_SimpleVarEndsEcapsed(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2411,11 +2421,12 @@ func TestScalarEncapsed_StringVarCurveOpen(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2521,11 +2532,12 @@ func TestScalarEncapsed_SimpleVarPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2599,11 +2611,12 @@ func TestScalarEncapsed_DollarOpenCurlyBraces(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2698,11 +2711,12 @@ func TestScalarEncapsed_DollarOpenCurlyBracesDimNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2807,11 +2821,12 @@ func TestScalarEncapsed_CurlyOpenMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2900,11 +2915,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2993,11 +3009,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3054,11 +3071,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3101,11 +3119,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3162,11 +3181,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3208,11 +3228,12 @@ func TestScalarMagicConstant(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3253,11 +3274,12 @@ func TestScalarNumber_LNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3298,11 +3320,12 @@ func TestScalarNumber_DNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3343,11 +3366,12 @@ func TestScalarNumber_Float(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3388,11 +3412,12 @@ func TestScalarNumber_BinaryLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3433,11 +3458,12 @@ func TestScalarNumber_BinaryDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3478,11 +3504,12 @@ func TestScalarNumber_HLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3523,11 +3550,12 @@ func TestScalarNumber_HDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3568,11 +3596,12 @@ func TestScalarString_DoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3613,11 +3642,12 @@ func TestScalarString_DoubleQuotedScalarStringWithEscapedVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3660,11 +3690,12 @@ func TestScalarString_MultilineDoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3705,11 +3736,12 @@ func TestScalarString_SingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3752,11 +3784,12 @@ func TestScalarString_MultilineSingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3823,11 +3856,12 @@ func TestStmtAltIf_AltIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3937,11 +3971,12 @@ func TestStmtAltIf_AltElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4028,11 +4063,12 @@ func TestStmtAltIf_AltElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4207,11 +4243,12 @@ func TestStmtAltIf_AltElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4343,11 +4380,12 @@ func TestStmtClassConstList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4466,11 +4504,12 @@ func TestStmtClassConstList_WithoutModifiers(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4546,11 +4585,12 @@ func TestStmtClassMethod_SimpleClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4696,11 +4736,12 @@ func TestStmtClassMethod_PrivateProtectedClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4823,11 +4864,12 @@ func TestStmtClassMethod_Php7ClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4939,11 +4981,12 @@ func TestStmtClassMethod_AbstractClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5067,11 +5110,12 @@ func TestStmtClassMethod_Php7AbstractClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5113,11 +5157,12 @@ func TestStmtClass_SimpleClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5172,11 +5217,12 @@ func TestStmtClass_AbstractClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5264,11 +5310,12 @@ func TestStmtClass_ClassExtends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5358,11 +5405,12 @@ func TestStmtClass_ClassImplement(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5475,11 +5523,12 @@ func TestStmtClass_ClassImplements(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5631,11 +5680,12 @@ func TestStmtClass_AnonimousClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5731,11 +5781,12 @@ func TestStmtConstList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5798,11 +5849,12 @@ func TestStmtContinue_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5876,11 +5928,12 @@ func TestStmtContinue_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5954,11 +6007,12 @@ func TestStmtContinue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6032,11 +6086,12 @@ func TestStmtDeclare(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6143,11 +6198,12 @@ func TestStmtDeclare_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6223,11 +6279,12 @@ func TestStmtDeclare_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6279,11 +6336,12 @@ func TestStmtDo(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6347,11 +6405,12 @@ func TestStmtEcho(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6404,11 +6463,12 @@ func TestStmtEcho_Parenthesis(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6449,11 +6509,12 @@ func TestStmtExpression(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6646,11 +6707,12 @@ func TestStmtFor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6768,11 +6830,12 @@ func TestStmtFor_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6855,11 +6918,12 @@ func TestStmtForeach(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6932,11 +6996,12 @@ func TestStmtForeach_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7019,11 +7084,12 @@ func TestStmtForeach_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7127,11 +7193,12 @@ func TestStmtForeach_WithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7225,11 +7292,12 @@ func TestStmtForeach_ExprWithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7343,11 +7411,12 @@ func TestStmtForeach_WithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7473,11 +7542,12 @@ func TestStmtForeach_WithList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7520,11 +7590,12 @@ func TestStmtFunction(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7578,11 +7649,12 @@ func TestStmtFunction_Return(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7743,11 +7815,12 @@ func TestStmtFunction_ReturnVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7812,11 +7885,12 @@ func TestStmtFunction_Ref(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7882,11 +7956,12 @@ func TestStmtFunction_ReturnType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7939,11 +8014,12 @@ func TestStmtGlobal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8101,11 +8177,12 @@ func TestStmtGlobal_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8167,11 +8244,12 @@ func TestStmtGotoLabel(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8201,11 +8279,12 @@ func TestStmtHaltCompiler(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8267,11 +8346,12 @@ func TestStmtIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8377,11 +8457,12 @@ func TestStmtIf_ElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8464,11 +8545,12 @@ func TestStmtIf_Else(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8637,11 +8719,12 @@ func TestStmtIf_ElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8820,11 +8903,12 @@ func TestStmtIf_ElseIfElseIfElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8865,11 +8949,12 @@ func TestStmtInlineHtml(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8911,11 +8996,12 @@ func TestStmtInterface(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8992,11 +9078,12 @@ func TestStmtInterface_Extend(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9096,11 +9183,12 @@ func TestStmtInterface_Extends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9153,11 +9241,12 @@ func TestStmtNamespace(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9211,11 +9300,12 @@ func TestStmtNamespace_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9246,11 +9336,12 @@ func TestStmtNamespace_Anonymous(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9349,11 +9440,12 @@ func TestStmtProperty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9505,11 +9597,12 @@ func TestStmtProperty_Properties(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9661,11 +9754,12 @@ func TestStmtProperty_Properties2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9787,11 +9881,12 @@ func TestStmtProperty_PropertyType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9854,11 +9949,12 @@ func TestStmtStaticVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9963,11 +10059,12 @@ func TestStmtStaticVar_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10072,11 +10169,12 @@ func TestStmtStaticVar_Vars2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10200,11 +10298,12 @@ func TestStmtSwitch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10328,11 +10427,12 @@ func TestStmtSwitch_Semicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10446,11 +10546,12 @@ func TestStmtSwitch_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10552,11 +10653,12 @@ func TestStmtSwitch_AltSemicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10607,11 +10709,12 @@ func TestStmtThrow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10653,11 +10756,12 @@ func TestStmtTrait(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10745,11 +10849,12 @@ func TestStmtTraitUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10860,11 +10965,12 @@ func TestStmtTraitUse_Uses(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10975,11 +11081,12 @@ func TestStmtTraitUse_EmptyAdaptations(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11134,11 +11241,12 @@ func TestStmtTraitUse_Modifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11304,11 +11412,12 @@ func TestStmtTraitUse_AliasModifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11588,11 +11697,12 @@ func TestStmtTraitUse_Adaptions(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11626,11 +11736,12 @@ func TestStmtTry_Try(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11722,11 +11833,12 @@ func TestStmtTry_TryCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11841,11 +11953,12 @@ func TestStmtTry_Php7TryCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11994,11 +12107,12 @@ func TestStmtTry_TryCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12101,11 +12215,12 @@ func TestStmtTry_TryCatchFinally(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12309,11 +12424,12 @@ func TestStmtTry_TryCatchCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12366,11 +12482,12 @@ func TestStmtUnset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12444,11 +12561,12 @@ func TestStmtUnset_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12522,11 +12640,12 @@ func TestStmtUnset_TrailingComma(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12601,11 +12720,12 @@ func TestStmtUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12680,11 +12800,12 @@ func TestStmtUse_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12770,11 +12891,12 @@ func TestStmtUse_FullyQualifiedAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12882,11 +13004,12 @@ func TestStmtUse_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13005,11 +13128,12 @@ func TestStmtUse_ListAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13138,11 +13262,12 @@ func TestStmtUse_ListFunctionType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13293,11 +13418,12 @@ func TestStmtUse_ListFunctionTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13427,11 +13553,12 @@ func TestStmtUse_ListConstType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13582,11 +13709,12 @@ func TestStmtUse_ListConstTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13727,11 +13855,12 @@ func TestStmtUse_GroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13883,11 +14012,12 @@ func TestStmtUse_GroupUseAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14049,11 +14179,12 @@ func TestStmtUse_FunctionGroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14215,11 +14346,12 @@ func TestStmtUse_ConstGroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14402,11 +14534,12 @@ func TestStmtUse_MixedGroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14469,11 +14602,12 @@ func TestStmtBreak_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14547,11 +14681,12 @@ func TestStmtBreak_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14625,11 +14760,12 @@ func TestStmtBreak(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14703,11 +14839,12 @@ func TestExprArrayDimFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14800,11 +14937,12 @@ func TestExprArrayDimFetch_Nested(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14845,11 +14983,12 @@ func TestExprArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14912,11 +15051,12 @@ func TestExprArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15032,11 +15172,12 @@ func TestExprArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15110,11 +15251,12 @@ func TestExprArray_ItemUnpack(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15177,11 +15319,12 @@ func TestExprArrowFunction(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15267,11 +15410,12 @@ func TestExprArrowFunction_ReturnType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15332,11 +15476,12 @@ func TestExprBitwiseNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15397,11 +15542,12 @@ func TestExprBooleanNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15475,11 +15621,12 @@ func TestExprClassConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15541,11 +15688,12 @@ func TestExprClassConstFetch_Static(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15606,11 +15754,12 @@ func TestExprClone_Brackets(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15671,11 +15820,12 @@ func TestExprClone(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15718,11 +15868,12 @@ func TestExprClosure(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15893,11 +16044,12 @@ func TestExprClosure_Use(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16068,11 +16220,12 @@ func TestExprClosure_Use2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16138,11 +16291,12 @@ func TestExprClosure_ReturnType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16205,11 +16359,12 @@ func TestExprConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16272,11 +16427,12 @@ func TestExprConstFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16339,11 +16495,12 @@ func TestExprConstFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16404,11 +16561,12 @@ func TestExprEmpty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16469,11 +16627,12 @@ func TestExprErrorSuppress(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16534,11 +16693,12 @@ func TestExprEval(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16579,11 +16739,12 @@ func TestExprExit(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16624,11 +16785,12 @@ func TestExprExit_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16690,11 +16852,12 @@ func TestExprExit_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16735,11 +16898,12 @@ func TestExprDie(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16780,11 +16944,12 @@ func TestExprDie_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16846,11 +17011,12 @@ func TestExprDie_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16923,11 +17089,12 @@ func TestExprFunctionCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17000,11 +17167,12 @@ func TestExprFunctionCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17102,11 +17270,12 @@ func TestExprFunctionCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17222,11 +17391,12 @@ func TestExprFunctionCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17355,11 +17525,12 @@ func TestExprFunctionCall_ExprArg(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17420,11 +17591,12 @@ func TestExprPostDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17485,11 +17657,12 @@ func TestExprPostInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17550,11 +17723,12 @@ func TestExprPreDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17615,11 +17789,12 @@ func TestExprPreInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17680,11 +17855,12 @@ func TestExprInclude(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17745,11 +17921,12 @@ func TestExprInclude_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17810,11 +17987,12 @@ func TestExprRequire(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17875,11 +18053,12 @@ func TestExprRequire_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17963,11 +18142,12 @@ func TestExprInstanceOf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18051,11 +18231,12 @@ func TestExprInstanceOf_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18139,11 +18320,12 @@ func TestExprInstanceOf_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18206,11 +18388,12 @@ func TestExprIsset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18294,11 +18477,12 @@ func TestExprIsset_Variables(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18370,11 +18554,12 @@ func TestExprList_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18478,11 +18663,12 @@ func TestExprList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18596,11 +18782,12 @@ func TestExprList_ArrayIndex(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18726,11 +18913,12 @@ func TestExprList_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18835,11 +19023,12 @@ func TestExprList_EmptyItem(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18946,11 +19135,12 @@ func TestExprList_EmptyItems(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19032,11 +19222,12 @@ func TestExprMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19099,11 +19290,12 @@ func TestExprNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19176,11 +19368,12 @@ func TestExprNew_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19253,11 +19446,12 @@ func TestExprNew_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19386,11 +19580,12 @@ func TestExprNew_Anonymous(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19451,11 +19646,12 @@ func TestExprPrint(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19527,11 +19723,12 @@ func TestExprPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19646,11 +19843,12 @@ func TestExprReference_ForeachWithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19724,11 +19922,12 @@ func TestExprShellExec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19769,11 +19968,12 @@ func TestExprShortArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19836,11 +20036,12 @@ func TestExprShortArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19956,11 +20157,12 @@ func TestExprShortArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20064,11 +20266,12 @@ func TestExprShortList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20182,11 +20385,12 @@ func TestExprShortList_ArrayIndex(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20312,11 +20516,12 @@ func TestExprShortList_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20400,11 +20605,12 @@ func TestExprStaticCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20488,11 +20694,12 @@ func TestExprStaticCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20576,11 +20783,12 @@ func TestExprStaticCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20674,11 +20882,12 @@ func TestExprStaticCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20770,11 +20979,12 @@ func TestExprStaticCall_VarVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20858,11 +21068,12 @@ func TestExprStaticPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20946,11 +21157,12 @@ func TestExprStaticPropertyFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21034,11 +21246,12 @@ func TestExprStaticPropertyFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21141,11 +21354,12 @@ func TestExprTernary(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21227,11 +21441,12 @@ func TestExprTernary_Simple(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21386,11 +21601,12 @@ func TestExprTernary_NestedTrue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21545,11 +21761,12 @@ func TestExprTernary_NestedCond(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21610,11 +21827,12 @@ func TestExprUnaryMinus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21675,11 +21893,12 @@ func TestExprUnaryPlus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21730,11 +21949,12 @@ func TestExprVariable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21795,11 +22015,12 @@ func TestExprVariable_Variable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21839,11 +22060,12 @@ func TestExprYield(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21904,11 +22126,12 @@ func TestExprYield_Val(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21990,11 +22213,12 @@ func TestExprYield_KeyVal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22045,11 +22269,12 @@ func TestExprYield_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22121,11 +22346,12 @@ func TestExprYield_KeyExpr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22186,11 +22412,12 @@ func TestExprYieldFrom(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22274,11 +22501,12 @@ func TestExprAssign_Assign(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22360,11 +22588,12 @@ func TestExprAssign_Reference(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22458,11 +22687,12 @@ func TestExprAssign_ReferenceNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22601,11 +22831,12 @@ func TestExprAssign_ReferenceArgs(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22687,11 +22918,12 @@ func TestExprAssign_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22773,11 +23005,12 @@ func TestExprAssign_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22859,11 +23092,12 @@ func TestExprAssign_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22945,11 +23179,12 @@ func TestExprAssign_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23031,11 +23266,12 @@ func TestExprAssign_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23117,11 +23353,12 @@ func TestExprAssign_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23203,11 +23440,12 @@ func TestExprAssign_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23289,11 +23527,12 @@ func TestExprAssign_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23375,11 +23614,12 @@ func TestExprAssign_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23461,11 +23701,12 @@ func TestExprAssign_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23547,11 +23788,12 @@ func TestExprAssign_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23633,11 +23875,12 @@ func TestExprAssign_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23719,11 +23962,12 @@ func TestExprAssign_Coalesce(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23807,11 +24051,12 @@ func TestExprBinary_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23893,11 +24138,12 @@ func TestExprBinary_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23979,11 +24225,12 @@ func TestExprBinary_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24065,11 +24312,12 @@ func TestExprBinary_BooleanAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24151,11 +24399,12 @@ func TestExprBinary_BooleanOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24237,11 +24486,12 @@ func TestExprBinary_Coalesce(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24323,11 +24573,12 @@ func TestExprBinary_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24409,11 +24660,12 @@ func TestExprBinary_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24495,11 +24747,12 @@ func TestExprBinary_Equal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24581,11 +24834,12 @@ func TestExprBinary_GreaterOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24667,11 +24921,12 @@ func TestExprBinary_Greater(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24753,11 +25008,12 @@ func TestExprBinary_Identical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24839,11 +25095,12 @@ func TestExprBinary_LogicalAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24925,11 +25182,12 @@ func TestExprBinary_LogicalOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25011,11 +25269,12 @@ func TestExprBinary_LogicalXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25097,11 +25356,12 @@ func TestExprBinary_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25183,11 +25443,12 @@ func TestExprBinary_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25269,11 +25530,12 @@ func TestExprBinary_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25355,11 +25617,12 @@ func TestExprBinary_NotEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25441,11 +25704,12 @@ func TestExprBinary_NotIdentical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25527,11 +25791,12 @@ func TestExprBinary_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25613,11 +25878,12 @@ func TestExprBinary_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25699,11 +25965,12 @@ func TestExprBinary_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25785,11 +26052,12 @@ func TestExprBinary_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25871,11 +26139,12 @@ func TestExprBinary_SmallerOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25957,11 +26226,12 @@ func TestExprBinary_Smaller(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26043,11 +26313,12 @@ func TestExprBinary_Spaceship(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26110,11 +26381,12 @@ func TestExprCast_Array(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26175,11 +26447,12 @@ func TestExprCast_Bool(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26240,11 +26513,12 @@ func TestExprCast_BoolShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26305,11 +26579,12 @@ func TestExprCast_Double(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26370,11 +26645,12 @@ func TestExprCast_CastFloat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26435,11 +26711,12 @@ func TestExprCast_Int(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26500,11 +26777,12 @@ func TestExprCast_IntShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26565,11 +26843,12 @@ func TestExprCast_Object(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26630,11 +26909,12 @@ func TestExprCast_String(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26695,11 +26975,12 @@ func TestExprCast_BinaryString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26760,10 +27041,11 @@ func TestExprCast_Unset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } diff --git a/internal/php7/php7.go b/internal/php7/php7.go index 89954b66035054f2ade09e65668ad66583eb60ef..7ed02ea323fabec80f8b9c38a16cd76601804ce7 100644 GIT binary patch delta 21076 zcmc&cYj~8!)z8c<2?-Dqt|7NwkR(7b?0q-LErJw8AQ7y9fI(425E4PH6}eRl0t$%a z2tp-Qp{|}Rd+`kMr#t*% zU-(6L#)hw9b2W7~@g*lE-3O%wq%E9aB$0PuB!RRTF=cXj`7IMAkr^<~K|D~LOq|3I zq!sbO7Y@={0jfG6U~Cf$P?3#*eG!1-jtCf*U}2=?B49@ZU_vtSLf=G^tB6X#%h3QI zc$+GW03e1BHrfGsNj8jic0fjQ6vilnqe(BNS=5(4SZD{(<~EF35dcpL%JjV*P~O5C zA~C+vfFK~i6hVIW!J!m|Z30HMv;p3?1FlK6VQjYp`lVSIasgk7251rFmz!Fd*zmjq zSG-H>s6YQ0{byF2sBbHy|4eNg_3fhQKOOBNzV*TV?MR+n4#C$4KScjo(cbv>TKKpA zbW6H|eWpK+?x0{F=ubm46l{n7RGg_`TlJ?vmV!MmKBc9QYoTkl`t+3kl%1nspg*NK z6>PEo)WoG=bHu03ZIRxO`qPY#3U*w7n$$_b4(m@hT&7_A^rzvS73^*ElamAh57sC+ z*_n)h*K<*|@>4iZeR@HE>YlG)>-8sB7ld`JrA}16JfN!;0IHfGtaVr9xUGx01bEsU zGKj!|Zp1CtD+f2pglFhw9l#CX-;R+mwig z1K_D489d)ZT98PBz2U%>B-=)Cmlp|c8^m13pq@U`+AjEFzYu&h3SI=C`B7E90>}#i zIXL6AwWJ-?-$~NoSOgS>GofpO3fBvyf|}54LM9Xr`bjd34$~Ysr(mcb4`=zXF|3B- zvaBUe$SNYI^@P%Tk_q~c?h8mq{m0M+B%@Q*HC|X)*j4=NiZ}lK$w@a)m~o4%2$q-Q zTq;(eE{_LE5_COCTELvqq#L-CXgj{<3NqMIA9BmA>`m^}Op2c(BRWJei6jnHVMhk^ zTTGfm?X##jH*`FRX2znvu@CumBh=@0qC?@+ek3WQK^8^0|8W284I_^>iQ;qXwd8J}Y$G0P6;m z6`I86E7%y@=*O-ywn|PyQCM2nvLU1oyf=`f!nn>f6KaOw5qfAKo}@@BrSRGK7(x0C zLQ=J7SPu1gz&Ql>UTR0$Nl<7F>qe3U-gGc2h$3Sb=H06e3QqW}KS|>GLx`7Nl0o#XQV@cRTWKxHgu1WD85_=br3xq5 zAkL~0CJqgS^!CZ&&N8xsY5wK=Ne@`&p^1<>9xsxTiKyudH6<8(pXW#$~F0aRLsgLdX>T+ z;S{#3oQhk{5D$$2LVU1jDw-||s%7<72yfc!d zbLV|ztlg^qZI&io1hv)FzB1?U$L~+hCOJ^D2k-RqV(Ns~XXA_njJk_VPh|OA1ww`6 z`gKBAr0+bu_#~P)WLWI_c_tRpw5)K|d=m;c`B{6zieLLX1uM>UxL|H~+B9;yP(BvV zL0JG5Ef}EtqaA=KwA$BDEs<#2BZxw)XoqOhgh^@vA>4mu=i5fvM`h>Xxdo>3Xi$1X zIo!37V2o#CgSf+OJCi@8hJ=Dp8m4)0`y!>Zu;GwoE;a!)?1$1qdNq{2L>>Iq#pH}d zTHbNvmSe~#ZdT1vf!jdYM3NHE;B9;a1(2t=$c+*d(qi6!DOq7rjxs!o-sPHu+{jy& z6R&MzD33WjIN|GK8zN(UB+8F<#QZx34EQJCpG{o-(a?;|irh9R%W=8FZNTgNm`} zXxGg$Z*f`_GES?o@*6%{O)P#m+v~Laag6+{YfRB0X%R^`+ywX9$Srm;3LTzQ6s9aD z&7i1;wBrvyN&X8NW>z7E!;hddh4(Z%Yy8WqNIyj`Sl&}4Gin}wLmh)>hDtnT%uTrgoaO0{l%q*_pyLHKw>^zLX>)&KmHnx?#LoG|3iG5*xN zzbG8(b2<6xzhIcA#ooM-+P$?8S&hMaVTUXGa7liL>`0O@ZdeVk}yVzas6EYlsV%K^%3CNB#US zpg)P*Zvw%wU3grfOt{%*l=`y4)hV`wv@}VO^EheBm%L0m*tmcGx9HjRhWfjxJ#1b1 zib>G0Ku$>7O7?5b9^OhuH0I2e{2q;1%?M${)(|HR1o+NZNrB}Pc#2tH@V*hTvJHhx z2i`E{s9Z=~8-~|JZ=pu_b>Q!63SqwJ@8roSH59XFUb0N6kDjX|}*&*NTKr(m<9yMwmU(?)!+-sGH(F^!ezelDE^qOBdVbR1Xjd6P$MWm<%u&o6FfIf> zW-p;Oovo^k*qaUQi9zpc1|irsn+72H11(Z0p079bLp=+0+@LOuSK8c25*#epbyG*DH$inzpgz~SDkY^8R7^B_43=Ahl=SNS9zw<9raw+p^2_i$J zrzw{_j!hYHOmXoUs^h4-CoYYFNOIX2;OWnbwjWVm9GQRdVKm$H#T92mfeFT$693Is6O7sUcB?$e8T*jtNo(e}!;U1N=LT+C4A2Pxoy}k3D znsV3&uG8r4&pK_g=X*|*Qak>*9}r(&N&&YKOr)Jbd<9v46i;u5-(q};X|{UX9k=60 zJ=OyfbLx2vn@W~4TY~SNvk`99^7gb39C{S3ZQU|v&+rtSGfBd{85sGlJA+}&ljlei zYt7Vfmra;{Kbs>(sv*U*PqE_F!zEvBNjk+e!PR;yd^|SEt0r>ti&18A10$9 zz4+1{niHFm+)k;XIu&xPkZgqb+y0q&Hc#WC)eeo_e&#qo_M=3%w*wi{84AUIFjA<`VYB}#I<6Z(7oweQy6T{CZcH5tphczEM%X? zJ+z<$9d5Cg>Z#5^C5otpmTnK8k{>zR5IMSk8Ui;lEW>6?v9_*6EHip4s^=h@U_ zld?3&Bt=3np$*<`ALr0~!x0F1{Gw>;gI_x_J0Y)*5EfNLx=94E@S2k@fq5=-t5W=K zoP$o_fsXVVOPpW0N#D3k%`oJb$%&y*?RK;Rk(A@V=}hmpI9z(DZkzh?JhecWfhj4Z zS3=?UtT|tnPj9y@zC~T6>-3^1uu8AAr;U_Oyil^1DGkbmT|TTYP)9rz4)C7}=$rN{ z{Osi@-nKNnsEhMye|UZ|RtBCZr0>R)?P95)Xt|e$Xs&dTZ3=nKYjTvHI3|P)g_QZ| z0ILF^X5Km4%~RkAg~Tu|wDCI|QvCcuj$Y^_Yf)3*XKmn(-XuX2k+E)h5mp}B^rYQ1 zI2LhKPdc{YU<_+K(}@|3p>kcs%LiRSf3hi6YA9;KtNPIKn(2i;G(-P^8LnY*!|F|a z&FKj@OXRhakFzwmsUI%9SXNpj6R2&$^)x215{peR7M}332*3^jkcSTOYNy6z(v$lm zb@4qZ;wJ~w#aefK&k$-AV)=n4q|_+IF-Fwti6nPhLmb}vVhO)CQK;Lzy5b;hyk67M`GPbk8zp2w>QH)! zrOQb)8Et~78{2ct>T(kFJ<5Kg^aJ0Hra>4tMkC6YFB8^HpiSYxSegcV=A*NHdfQ^HK+ zZW3s2l~FKEl&M)+F%X(y9(3X{$$;s!#jG{dZK1qoRPm!4_a8*QFz=k~8!WoJQ z)@o!;tNbt;kqtBG)7DwX`21?NU1{NZks3x+Vo^RaX!or75q z<4(=EhqkmUZ_;nD<|kF|^#zO=vG8izx^layv>qf)c*lF`X6rg`K)k>#I#)|3aW2fF zR$%~h5jW_saVn)UHK`$TRa(rZ9c`;Jb+%R*IvwH`E}Ww_AC|V_+`<)e^$4R9SGg6- z89&aY)tZ?|4%+zioUccr+pis}P4nq-Jxm5fAO9{!#_$8iSdoQ~8tQ^&4%!M1JwSKH z5J`vQSH-yU{eN#za-r8JDzOojGPIJ#s?LaOCx(F^xrsFNdEUXAJI04zPBR5^)VtRvp z)9qMdDvbuh8?qm!*8Y;FuAy@5#-(al$opRmTmGmT77hY(3xe3;+`WRnWt}_4IM22> zezy__S4_f5;rfb*5GU0gQuonR?yJO=u%Ryp6P=>BE;BAx685UcY1odxb(O;Rnq|l7 zt8qC6L+bOvsu$T<>0|>A&g6Zbpby$aKCy=8H|Ab-ydGU$DL-01A%9G!+%^OElWM|2 zYeQeWetJcfISk64LEA5Vt8c5!sk8_ciFP1i?(UdA5z400V!U7yME6@8`12p>9-9nX zo>DSko7N-@aHLN*_GyzGJTP6cNO=Zfo^o6WGLOK zilpQ@GQ5|@6uUVO^OORe;=L(4?&8s0^sJ(J>Ta4955xVO!l?QXFY7q&&_|n046~RW zm(i?hg@i2#ajfsQ&F1%bbqF&aX?)E*n7jq5W@Ebk+nuDBHoVi~+T31Ske)OfL9AA5 zOU=BoC$g1DXB3alKR$&%U8aV82Wp&DYnM2o#+YX~bUGW(rP&Vav&mUJ>aI z$;_mfq3GU0YiFK=q&M~WBC(ieB(qh5m*W1#j*%B_GyVB3Aq9Skts_RAq~;%j%2MiT z5|pia#oe%oGj|xqK7E~zBwn(eUa&1BX4Ee&!ySHGG4loZBX84WYLn)pcTEI5wde>r zVfWkU!cN^qzpx=byjxKUdiWQ+>CPyT3Lu9$x@co3uW*E<)cCh)$Tv?VKX>;xum3C{5t^eMqlvBoXZ` z65ekv6hC8F8R38|J49RZ`#+*1EO~GqeIR8&wQu5y`;oC6>k(0jlkeY8sol<`f1;4k zmp~*#utz_k!|a$jpPHDk>?qCWt3HiI$vtGEU_}OMPtnf&$wPFgolv(rg@qM0iS=|{ zELM-t6}vjog)H^;=g(vD{6`Etm)I~as6Ry$dG(Q4*?JvCJhC2ddOR{hw3m*?5-a{1 zk;Eeq!>C8D)z+`^;;=34&~Hq1Bdirmv*R0F8XMZEV~Q&382hD-$-I9omf*w_CZ=$> z@X{9)51ojmIK5s`#E|JI&E@~9k0o~BDMie%AcENGQ}kN9cn^MO;EB?I?f2BlfBKI0 zwUMTUI)apR^^>kR$|8+)e7-SvyrJ>l+OJ9RbcCqNUrz>>Svsx8?Bs~_hJVRk(+?d6>@ozI?=RGS6k?`pV z&RZ+gyY_zSXdpSUY=-OV z;?p}JrG_0LT)KBu5GY=&7}34ZC(rL3OMYsuBJUIAbNSKS*lqKhd=rUcT3_y)Q~B(A zyQB-cDpG-nokP0C?uexY3Nsk7caplZ2s?&}iy*YvkJ-#P-6#LLToJ(dUDmxbSTmUB z#(UxRLN>%M*?)Q{bTe>Q+bfadm{E3ghZoWDC65bU_yN5sg`B`=ds)nVnc_E)#roiu z$pu78rW<}75Yf&ncT|soG4B0?1ChQMOZ>ue^ZJ1-)sniQQA-AyqbL?W#jWzv zU_4#g%hbafJE{02&T0!6ZPL=tBmWk>*$@`dR-~!f^tb97Erc}v&dR~e!PADaOPKb$ zVdhv^_aQc?O0Ma{SwwGILtLLJF-I~#wnd7YOR~)_y%dq3y;h5C`VP(!?5zC^jJi&< zz#Ei|K-XaRbu7t_*5-PJ7QpC==J#JyKnk*nm*7P{YJrqFX7x?)>zV7qMuw~sYZ@iISLW4LQc9I<0=^f}`-QNKmB6{HNn zbnfnf*yiiHk$t6i5<2wuW3#%<;#Ktx+Juw|XcHRF;Q2yO@gSz&WOOV~2|jg@ujdkO zHnl|&qMGz&ZKOGW;AWO!56O1k!f=;L?JCRG`&Kr=+V`sA>;_1^Eu!00C3{=LvS|@u zd&c)l!2E7{oV0J_O=TRL%JvfYVG2%Wvv@mcoJc--Dq3%?NF~}t4QW`aYhmbB5rt!! zSC^tib?eHQ5k+g8YH#8Kl&A}Q`YPsXT8_t5qHK8ES8mFOk3Ur>TM7)Vz*meyYKj4n zd|&)d1t#Mp1d|xA-^ns%x6$M~nej4|216X70c&O`hX-cQWUU}LlSQ@OOR(RzRqJQs z_Cfltc#V&NyUj1rTazz+ez@C|8V=^LWH_@EdkaLil0SJ58*D#TdH3RI@$ys@sbszf zo4jOYW{A;0gzZbKqXaL2x`DK-O~R{YDKePvl&;Z%`S?^0TFypUBzd2NJNff-*>CMa z^qgyqHXK;KX)e3ZjxuS!$pC6FPUHr4&jTiP?5Z%VUu|Afuez~hJMLdE;Tca~&)MDHd z@;HXE4fnalY#+R`#3anZcx5fGqIod&VG{%s{ANuw?GY0sY6R*DPo!N{$p&Fk@g0kd z1nS;oO`zveW_9|@a>tBi%u4u3NJ9-}`OD2IAlH-K@!^o@Cfl%Xd4#o6z`$vV5rl93HGna&V~&vhHckk_EqQy8Ddd83u^wz65!d6j_k^>$$s0Wd$>|77%hS* z+)8W)?6eIt_&8$Xb7&Ef3Pg^G#Jc7@RlKufNS>0#= z;D^lx>h+bB(ciF)RqGVx%~_64u<>zr);86WKN=XWhKXEwO3gtAP4VZj=@}D=hJD0_A_yU?MH7o)sOq*kn{xoaCSN(~l#OV3YNO?0lU=&>d3-yM-pc7=9%$jEv zswV;!km}{ocOyGtL!a`Ti7sNOF5ujRXGSu@gC%{9S`9I-qa5h}Jo^I1S1Uy9kipsv zu@$Zky}%kN#`y8EAYT7sLP2*!!)=mwd&!g(Z!SElrZ<1+B|{GE{1rFVGOqr-a}$&O z>=6eZo!AL$QRjj$-u%|p_0`L2GI({#S+Dw=0m*?5;}|-Q;#vQYS4^0AhblNUU56YB~~YfJ(nq~>^O z7jM%P10sWl4%K#4gZ2}!O=kJq^AZQH`iCBfU2Olw!0kJ< z*36_@&z)!h8+QHtn0i@@QK_Lu*_LCfas4~yq?uRw&ZFM7K;~j6?^3Ek1A}*+lh4`} z8~Ug1QSJMo@Nc|OgJaP?u z@JTHTY6v!wy`2`-DI;EXLgoibMOe7Df>@;8q>;3VztU}-I(*O)H2SB}Z%E+FK4<-i zEDAMdXVSdpVCD_UT#ggaF=-Mpx{(#al9H&4OXplH6Z2vhi9BiRKi0~Z7Rkxc1iLQ2 zFKZ7d75MPc-=**8pAV{K^x*|5f+9Y9S8R4Gu}-5I?wK{)n50m<&$TF6SrYj~rPdhK z+#?!QEnKYs_3oD%Nv0^Aar(H9A7%66Y>eu!wAkXAdvqcJOB6DZV6B1=Pi8{}+os=W zQrIle@hGd*yHa+NH6X$9e{10*`{t^S;WCS#7!mG-sNadQc;ww2FtR+d&?kS2r0+ru zFy|6SK`OpOz~3s-zgOaiWh<5O7nqe5SOS+f^hYO5CYT%N-M(c#>_-ynQ8-Ec>>~8@ zPuH_|;mk=TnVAAdo_TP`_pBAKcChY^ohL9_3dF3;TzJX%EE^vCj@822(@K`0m!CY% z(rpIPe};9lS-3T4Oz9{t3l8ZGuE8vchks-b*(Y-0Cp8iD$fcT`fIaxl;IXr;zL9f? zxk&M3M85b#J-Ak$aZHEIG5G!ue=VSuQK-W?^x*$I&#cBx<41U8J#p;?W;MfLOePDD zPWiGzZx02Q)kh=JZB!&#@{_khz!TW=qvI#*^ea*gl{Re}tYZ%8C*e;I`63<=ZE`rI zSA+QyOcS7IRu=JVL-U(B>`fPsB$#9%yqcxK(i<_rC`r^b@Mn#lPISZ+9)d~cD14rQ z2Sz-ck{vG%8i?zf=>gcPpgRXdkDn1q0$zm~(CaGN6jJV>XQ7NbCc)mfm|Ls5A*;PI zMUyVFwGS5YJ6k%gvhBvt(;W78pMkBAL@oYiPlCrS%i@P8(?YaY=%O5H<@mQ2Ax`3T zQX6w99G;2Uf!D>WD4bL`-lMIfueG>wMkcg3M^bELh!a~!Yor;^OLzR%B8b}PFM1kN zp!c65G8;-sDbcSlk$owYkBVt-J)WVY$10*r+`T?n)*G$YvR_5KDvBfgxGW{S@siqb z=&dNuwpp9x80co(L1jmWpRdSqyl6k`54aTBA`kz><#00_p+TJyVR+z?}&ZIm!a<{ z)Q2yZvy2SRjzULM`!cj}E7gU?n~Y%`pMBgDwfBCRyiT8@i>VBJ{l-`LP-5E>@AD&4 z6;_YYd`hIjfmjKb22CVLf1P31>}*U7l!oxyk^?dLj_uWdEUbp8=jdrLsK^oeA37Vd AmH+?% delta 36179 zcmcg#d3cmX(to-;2}!sTAcP~wAV+|3%w(>N5D17MH{p~;Ib1+MB!D1gww1 zs_N?M!^dh@{E$%67Ong(uG)Qn9W!Ohxam*UKr@l&$;GIXzvb0Wk*h(J+v9e=n%JoL zJdTT_i8(ke#^;KUiLpFDZY^ftB@1qDO9^r6+Gkb~oGh(_u3WF6$9E22=dK+BdSXo-n7wZJz{IxiYE z4}2q9u7tEqiV6PaY)H%P)q=k{Tn)7dAU~E2ef5Y9ZrL5u;*AUbW?M*0Mttx$>qA6{S!4GU>$ zpBVg2aY##g9r?}W?{GT(JsoJKAuYA*27mKNNJ~XX%dC3Azq%CCa(7bjH)ldxhSd-L z=CHX1jYr9JbTmq#J(5iys^;BQH+vgY`SrG{n==|#`Stp$o0A(=@pURKY$R8L;I*H? zR2tj3%I1YtH(xb1qb#~78vZXoMfK-z`?`v@QK_Nd9jl_Pe_H5w`$F2BG&fyQF8g)T z*UfECPM?$7ZKm&xwil~tvo#6*ZgmxH2{(m)_h@L_s-~fz%?@eHqL-QmhHIn$Xa)pN zXeNI-tpZksf2LCCnP&3CpypxCt86}8b#spvRers@>Sk|-`WkuUbZ;}-w#Slm>WITm zn$c2z#aob<-P;n~hQ#*J{{P+{TEYG@KR)iSW9}Pg%chst;D)rY4AtNYdV725AaN3^ zPZQomwFO|)FaT-G)^fvv-`i-dHmJj@fut=x+Zr9RzInZ!iL#8%A`N@97{CogH18#p zNgvKcHhN?@inEUT@3sn+I(Fg|`luJB(foT+939aP-Az{$R4<&pwX~f9GzvJMP0?J` zh_z{tY6=o_=zKd$wN72JU4ovS%O^MoH)eGmsBX~CZ+U9BKSjib4ybALFD?{Mb7!N} z@Mm>%mYmhRBB1KQWKy5zPQ*#n-3cXxB^BvT)9(lN1gWz2`=^cHbNazzNo;Q!p}I>_ zaC9e(;ED6tDg}#7zeiq*`jHmlv+oz<$6Y>#&e2LR#nR`ZpSUEqriJ@qN@}hk33NeI z97hj$P!rmv8*Y>!ZM}Bw+O^{UvtaWz@diW)mW_utIY5$V=@gLh!k!@GWj~^Z?B8D0 zQ^;v!TJkz>MEm;C?eyq;+>0K0LT6FcxLNgyeAIRTM5J<;6uL~IrN>bmJAN~Agmezf z$f5SrD1oMK)S;kSj6t;`fLvRigR5qPWhMU^6hy# zUH0y5?lOxeevcc|0o_nNdL#*^G9iwFZVISIQY_UOznNh zRqJw)+D}0dPgZtdRkvo_`k_Y%oi-NTDRx9DZAD{Yy_YXWwN?!RR){?_e;lZrg$Y<= z&zw6*^^QI6p!Ln-2czUDdik%YfRiykhyvPizlj8=^|7BDu6ie)Y}fzq*t?A~_3pL% z0=-Lp`ezS*+c?!d_PV&ymrO^!4>noGcxm~8Z=cHlODVUv*N;aT{v|@Zpa-WdhK;(g zHC|(#@6X?D?%ESD2W!&uXK@``bPz{{ZvwQQVA2m>k1rC!Zl0(JBVKPd-Lnq6EYedt z(M(UaHxhb|Oj7h&6H}uwY?!D-YuzbS7xP6Mkz)rbv8Nh7EQY9V8$1SYOK0Z?o6Z#A!=Z8TI>rHV_{+;9AI@~i%kf`2h6oELl3bB7NR|dxjS_+c&x(J zgTEScn=IM-IO{!VD4X>t9XouylP>JV2ho!uRtmNA>)F z?^#Y#sO=uyhMpRVV&mFbK3@3lVG~*4oa8=&>I>DLL;t=6ye{Eu{j>xeKk?U)rT)ME zxfC_F&V^G;1Krt>ivbhv7rnKnH-yP^mj~d}K^OuaWbwu2$j@gkVi7nN+X@3+q#*~* zI}Z~eJQ&+bL~c+Y=blh~5C>>qh)kuiVNaqPHHgMfs}AU;c_dX1;qj-99&%}|XChMB8pe6X~Mc@h!A$BWfu-@0h2hMDm|gy_2jg2L&yX%F3QYdyNG#do$=w|5Vhb zIqwkR(e|b#n{-C|-X?Q@E)VaoE4!l@9sl3^%c0Jfq)r=LS#?=8@!TTR{vZZFAfBOeI&L?nC zS7G=;qCryB7#6%wO14w*id-I1+Vz!H_`>%@ihgHoNUEwJXE$}2Wdmc>;St~NaleZ zutp--7B*fir|_4L#{Yn78R4H@D9^flZQEs{IzqqN zvs=>7*Bf!!v##f`g(LSpN+n%l?YbO1@>`#ll%go%FTxI7}z;HF1rkml632Su}2-$hR#xrHtC!tT-8Lt^L|I|S)%W8oYOg7kIi%lp7M zJ+u$Rxzi||O-KG4mNsl~U1|T(xHjGLrZlE=Xv_WZ3$f-0IN|69u2$m! zx)8pn`||1w{AUJccKv%WXHELVx|fjP3~;cH1HFWqhANhkRf}4Y+u_ zA&db3DT^Nb!2APn9N;j>U<@}qY2gSQP5U2(;q#Ljx@;(pV`~qh(?Z%ZB@@I51rz*% zTp49g^xfHR22V(`?l-JHVwx{F)!<~^M*A-#b%ZG?$YMa&V~UOx5xSP_24eiS8E5>O zmvl7Z)&H1jaBD4s1s|#T!)^~>1YN8}*%CfhRA3+iB2v0X1P^|qp!}yJdWQ`^A5%!g z?GsXCOb=}5aqJ(*QSa-{3>avuNi1q-S{W55bS5b2GZPgaDZ3^iVf#Kq)BhJL)QSa7 z+0eRH8hYB?z0*fi&!E((DHA7K2!tItt>lAvTy6#!oi$>O6U)rBc#K~dd zm^KZoTQF$S=cR4T;V@{{F0Vk0s-!D`E{KACVqqrv+B%KwoTL#|$2PH_GuziFRtRkl zoqQfV%soTMNb9&ipEr%&$<2>+`t4@ik`{i0PI1g3v|DfSwZj zS~%v>7Z(%?cKhg>AJLucsf%csG4epbk8b}N4U=Z`nISp{lQl1ydV-UeF_+Meu*pm+ zS3d^hrAc)>hV3s$x8HCoZ@D5ny4+IA77xhKYd=Dw!2T;}QP}j}_M?J94)*Mi=#?8z zYW^>(4-6Y~?ysnWDR0Df6^#T#>TT4PuKpc0qo)SwNZG|}rcNOF06KX~jHES%bL(o< z`8xz<{GGI|ygaO~Y>1T;qKs=k1)u^u|B5=QVLkmaUPLRZkpH6){nLw2xge@Bl-SSdFbIRy`=<;6bWG=`s~80xyjPu4T{1d>rNs#oNRCu zmJ2tNo{EN8aYBqvs9=ut73wtddiZ}P>@kkxFpgQhw!gmONoto`_8m_DoZLiT^C4mXKKMv-N4c zKzFvF13rr@9O9L5^PnLeFhQR1i2*nr?1dz}SHrBBnH(T29fR}5nN3*=cCzjb@apRy zH*j;;P+#jaaB~q3vCfu;&=Gh2#2~y5ISX4;@Bkw5f$i6ygXu@j9*5X8=6T&+8_O_C$ z6J`$v;u1Ll4aLu`%Cv*7{6zZm5=+lLZP{AUgB`*>)s4~kGR^Gqx+#SJI?&wva9#FU zChmY(YFk_rF-JQ{lw-ZyRgw!P>u$Q%Kx#{nrzm7#ffu>)N?mq=Y>?ukNAL)hIO zoN1xEmD#F0y7D}n_y!_(Va@XIrr&dy-C8d244>*93=wPTD;Lc_mcarx{AL`jq37=D zU`!}yI%Jvf^mWbL#M2!mRF|>X20`Blkn%c7q&zqa0Ta+#lE#5flC$E+M)=g)%(^=( z1X@%8K3icCnN4eNAfB)dgWA=tpF5X{KE4GuyM=i;kULqp1U22Jm(G!$g_%)F5*K1if z6Pvp11u3?c3ON2&^?U4%DD!SscN0IvWe(s+68*J1KB1*|Au(t@gyPFjOIu=5fru?Q z_&p2&uA{NeM)i~w3Me=!;HiOn>6p`c1(3j*&`Ars+DkHC&?3SHrjSWkx^HT46GB8m z|3!TQqX9b$!qwJOIAi-NVz9^9H{i^yF5Uem=%JhyxIVku7cbM8wZi@;>bQvVEvYgz z1!x=lbtPUd%<$fH0Ml8~bfD^qo>+{NR}F&9FiV-5tp-U5$0I^oAqZ{t)>hFV$w`3# zt~U=ti?o!)!FaxrTA&x8`DuuWwk(mzA*1_~hvMfEKWgd4HC2L)?!$S@s7S&=J%+2^ z=-x*`Jwm*?2Hl&)7hO+?ESw&Wf3R+#p85-RajTrOhwu2}5GD6MIGly^lN~sC6l^43 z$lJ#Ad{~B!XF9stn*klZhyiy@SAg`Hjkr1;J{7lPx%c5? zM!*aRO%?f_*2R9_Oa%@X8HEiglw`%dxY?@LaRO_7KbY~>frrgfz{Cp|z_^xVuxF0Z z0P9zvz<6@KHBVq)fm%59jV-{ybJNIRvAEfSIrv#bo6N%ltq*bGJVh1BHt9Lt{3*u+ z3J_--Doz~C-?$X%goUaP?vU(;fSrz8bu6|9e=~E!NHs29hX1fGlgl15LHWcv zsBQwme;+nOiL54FsA-Ryp@iM13-#p^1x0*L!==_sY#6poVeyL)BE?&e?*|@JP{ap! zA|tS$PCjnNf~$O$<(c7viRdVMNN#;DH5?0 zX7RJ4_Hap8Bw|;}_p3!um%~BxuccE z=S?thUsxvOJO}%IpBM0H>v-Nx0lZf1$xRrlC_oN(2ipHl@WRDkmtMq8c!ajM$o90} z!|ctn2VNrx+a2tK&G?96upfFE!^KVx&2KMQhn}#-1PymcfEmqL*%sVOLxFp@DlB@$ zg_2p(R(zMCuez6Dja<3OB__zkIbdOy&mh%{%Sx~Yecx?YV4(aGQ%O1-^$H{lSZ7=5 zs|pHB8EVF|UxPz>>sU|yQ(=MOCt_q}n{Yg<$jz#Gp3c#s>$%PqkmtFbc-_t8Mh zenX=1GEhePS~FJih8AV3#K`5PHK&7OMN6!oy(62~IA1Pm8vT`y|%o6sU$}gpkGZ82fKLFo>7o z(6)EK2@g*5V1Y3A0o+Xk>f!el6b1*vv4lY2{(X!zuwoA?Ec)R*256weCYuEFT=vT5K&oG=XN*q9eEsd*MUe z&pLgpK9Wehv>X_0pjMi**&k^M+ovC!poF8^oaKE2Mt5bkZqYYzX3-|^$;>cLBfpfl z;Rx)rna4wr!;#>}KUG9h#|5}uvhZ6RPfvdalTqV@)?|$TOhLldDC!AuPzIpEgAa$q z+*1mVCZvLT67nKQ+R20P)b)&~C0=MoM4 zwKOQ|q}B)P_=VQK_RN=NKnGtN%~|4CT5Owq&Wz=htK-XaT3DUWD=avi7xvhZKX7fv z&WGSxT_VoF65(tl9ykpXGraWM5a@6O@T&`oR17g=5s(2|6WGX$T3gz#?-U^NtXgTT zI_CLaYk{o3WQL=c2N?>VuDA?E&1yqc<{*_5&Y)8-D^Qxyl~iX1mvO2F&~ra1Ah?nl z;9fkde+9>A0Ji#30m2SaJ`e82!;*_(Yk$;AGTQc2B|IgG89!_3{YQUMcu?I+%HEEv zke^&x@rN@DW3MSJo`vEUz1r;LH7#}E`%M8NuM)lOzkww46EP13?@IRq@7;eWbmE2c zTCwcT{GnxBb*fNc7%uVn$6lZ9tk42XLBwB~4{8=cyq8zH6UzE2M0Cu*D_8;2;e)_M z7aHsyLWXP1_0cGW1}7NCuGBM{=s1-x#V9a-1|d8_VYj7JBRaeA1F>d6Ss;R4j3u>& zPcG^_Y7}Ig7_U&lS*YksTI!@kIo;xRvcFn^*gUi0DmGj>VuPxau}0jWZEYei{%KT^ zvDGvvg+jW4t5<;Cg1ugo$V#C(6gmhBq*~p&z+U3HU4EBr!zeI!_}sD5+T>0Rp=zC| z$foFLbLAlGr1`I(0afXA(M#MCOC?do4eAg>b=Z@F#~+T8n^RZO$YUw|q8JBe<#kD9 z$U~AO^2%|uv*(gYbVS^UZ)ie>cr#a033W&;Ti7r{ zn!jo!k$Hh)Y=>xcDrw3_G$v4jLMVQ{+1r((P>CIGz=DIG#$f(pB7$9=reMJlle>Hq z63wcolV5`tf>lZOR8F9j{D6Wde4Q<9LUi7+c=jemF1D-u6$|yJ>oq0vPLr4(W3Pv+ zm~rf(rX(t8$lW+(Ag{5)MUfL&hyHgtr0AWrr zB>O|@x*}48O=?SGG>$dqwykvU)qzus6rJZXuDA`Uyw?8 zC*{dhRpt|lEW&))!2DP4uTz#{CjB)Bl;}Vg$&(KRSY6t%Z1%(c`Q0X&8!eVLT)q<4&-0_(`8+3TXhPEEszf0aRey_9Xcy49@z31017TqYdOrl;d6d(#Hg;xWdaII z$=g9}KrXpUW3}$?qDUoK{se;opAePT4aJAQf^KW|gEYWHUCBJ)dN&eTRW67NgLsQv zL;lJEFS00h+fBXT$pImW;p_{!-6icD0;(|!rM2ZYGFO8QH{Yh9WDG}aSej_AUJ8w; zFb9pzsxHXpcP<0W^6N3Zq#So&MH;W^ZnXmE*1mEb3Z6qcilxe`E&8b+^CJ*zm$}v% zzvgmJ-`)gXss0tNeuT_Ed1q4>?TW~$xTDHl>O4Jw@ zB%GVa)AAA$MeiC+e&cmFy3$QUNIlJ}9&LsyenHJxSPC|_ZYcM8BBD@LjWD4ZFJ}m}86z~%=>2z?;NT8d5rlp7 zM}Sc^at!&cGI16K#{~wCaT8ZO9W!YsNub+~!tNdl9nPAVGD=Y@>W(rPwoVZF+tCWh z(7(D+X;!%yNfUNUvgH!Ghf^QHxPrcrhgu}Oy<0Osz2^}6I0x7m>PQi)s3JTI2 zMC`M0GH6M?867&o1m~oecS0pIVXTO9W{1~X5-1tmM5n;g7e!lCiydleqYVt}IzXl$%8<{tSbJmFkhy{A~3 zFEmo3^Q1k zI})%}vlJFg6P(7#h;UeJDVSy2jv;j(CfUMfemUsZ<^4#b@ZGyRW}Ewl2vDU50jz2! z$Y?S&x-bRc=*NF~$wEY_Pl1^vsPy2sIxRX-F8MrxKPb&9f~ny*nIpFYx6YOGop(Vb zTtKM7>N3ihCr2p|tlr08Z~8Gk{tfJ#wdbN5kq>Lh1A$?QqH0nMtxAD2@XiJd&PFvJ zrJJ+B#2mN_%~Sa7{{p!#oeE<;@&>+xPF;su@p6dMco;w&JYeoI)WcO!zrBicX(1}K`CK+A_?xEB9c)Pj}|Cie!!GmiFK z4iz<)EhqbH$PyG5UpO4MLiG#J+o%&*>hD{(VBTc1(Rn^%>>1 zJF8qj;fVE>zbhbK0u1iR+89h(cSNWD6KlNQa(Imbgn1K3Sm_ZVzPzpyqH#*9catrx zs1lZsFQ@v{wgAo>{N-<$-a0+D!Hnz(F47v7-8o>RS!f*&<_LX@(dyo>u9eHB0M)X= zeoK#ZxN~1n!{twqPMrkR68Rgs*6H&{S2=xAZiikuvdNni>)`Oe@H}op%U*-$T5?|` zaZsz=(FxVFN*XBJH8M(WkOr}2YuoJHVnr1U&EsK*ib;D7^%}ofu^WIO*L37&@}Ka% z3j0fn5U7|gkLr%?atnly1$ddz9M+0t_>}UAeXziIBM; zRDy&Dro@f)paW&U-ZxDy8*-35sJd>*t-o5-epo&~Wp?NNZw-jH4F2S@b=i(PqR7N< zc6T%+_`Va^-F-M&XvAH)q$t?U^EQV&5DX}Zb(alX*dqQCmH>rcu8m4=@baeXD8vvNK`w6f8HeCP)juXz7|U`u0CbftZtW6)+vUf z&`A{1?4nu!(`1Yxx;Lae^h0H8;8AWj6!pJ+0&e78ErY~K>r)c@xk3Xr$|w_?^Etd5 zVjXP77YYnWgF+XgX0zf&k1xsS8_A1#=PGq5FHWYh@6Tza5>|Yz(1;t#7>bA+fK_9& t&ubkGKKxB3L^xiJ $unk %token T_INCLUDE %token T_INCLUDE_ONCE %token T_EXIT @@ -297,9 +296,7 @@ start: // save position yylex.(*Parser).rootNode.GetNode().Position = position.NewNodeListPosition($1) - yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.SkippedTokens) } ; @@ -343,14 +340,10 @@ top_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -364,9 +357,7 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.SkippedTokens) } | namespace_name T_NS_SEPARATOR T_STRING { @@ -377,10 +368,8 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.SkippedTokens) } ; @@ -391,8 +380,6 @@ name: // save position $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -402,10 +389,8 @@ name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -415,9 +400,7 @@ name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -426,38 +409,26 @@ top_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | interface_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -467,10 +438,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } | T_NAMESPACE namespace_name ';' { @@ -482,12 +451,10 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) yylex.(*Parser).MoveFreeFloating($2[0], name) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_NAMESPACE namespace_name '{' top_statement_list '}' { @@ -499,12 +466,10 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) yylex.(*Parser).MoveFreeFloating($2[0], name) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.SkippedTokens) } | T_NAMESPACE '{' top_statement_list '}' { @@ -514,11 +479,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_USE mixed_group_use_declaration ';' { @@ -528,10 +491,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_USE use_type group_use_declaration ';' { @@ -543,10 +504,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | T_USE use_declarations ';' { @@ -558,10 +517,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_USE use_type use_declarations ';' { @@ -575,10 +532,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | T_CONST const_list ';' { @@ -588,11 +543,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } ; @@ -605,9 +558,7 @@ use_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CONST { @@ -617,9 +568,7 @@ use_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -641,13 +590,11 @@ group_use_declaration: // save comments if $5 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_NS_SEPARATOR '{' unprefixed_use_declarations possible_comma '}' { @@ -667,15 +614,13 @@ group_use_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.SkippedTokens) if $6 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.SkippedTokens) } ; @@ -697,13 +642,11 @@ mixed_group_use_declaration: // save comments if $5 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_NS_SEPARATOR '{' inline_use_declarations possible_comma '}' { @@ -723,15 +666,13 @@ mixed_group_use_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.SkippedTokens) if $6 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.SkippedTokens) } ; @@ -752,15 +693,11 @@ inline_use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | inline_use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -770,15 +707,11 @@ unprefixed_use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | unprefixed_use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -788,15 +721,11 @@ use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -804,8 +733,6 @@ inline_use_declaration: unprefixed_use_declaration { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | use_type unprefixed_use_declaration { @@ -813,8 +740,6 @@ inline_use_declaration: // save position $$.GetNode().Position = position.NewNodesPosition($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -827,8 +752,6 @@ unprefixed_use_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodePosition(name) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -844,10 +767,8 @@ unprefixed_use_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } ; @@ -855,8 +776,6 @@ use_declaration: unprefixed_use_declaration { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NS_SEPARATOR unprefixed_use_declaration { @@ -867,9 +786,7 @@ use_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -879,15 +796,11 @@ const_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | const_decl { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -902,14 +815,10 @@ inner_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -918,38 +827,26 @@ inner_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | interface_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -959,10 +856,8 @@ inner_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } statement: @@ -974,22 +869,16 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } | if_stmt { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | alt_if_stmt { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_WHILE '(' expr ')' while_statement { @@ -1009,11 +898,9 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_DO statement T_WHILE '(' expr ')' ';' { @@ -1025,13 +912,11 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(exprBrackets, token.End, append($6.Tokens, $7.Tokens...)) - yylex.(*Parser).setToken($$, token.SemiColon, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(exprBrackets, token.End, append($6.SkippedTokens, $7.SkippedTokens...)) + yylex.(*Parser).setToken($$, token.SemiColon, $7.SkippedTokens) } | T_FOR '(' for_exprs ';' for_exprs ';' for_exprs ')' for_statement { @@ -1052,13 +937,11 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.For, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.For, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.SkippedTokens) } | T_SWITCH '(' expr ')' switch_case_list { @@ -1080,11 +963,9 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_BREAK optional_expr ';' { @@ -1094,11 +975,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_CONTINUE optional_expr ';' { @@ -1108,11 +987,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_RETURN optional_expr ';' { @@ -1122,11 +999,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_GLOBAL global_var_list ';' { @@ -1136,11 +1011,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_STATIC static_var_list ';' { @@ -1150,11 +1023,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_ECHO echo_expr_list ';' { @@ -1164,12 +1035,10 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Echo, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Echo, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_INLINE_HTML { @@ -1179,9 +1048,7 @@ statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr ';' { @@ -1192,10 +1059,8 @@ statement: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_UNSET '(' unset_variables possible_comma ')' ';' { @@ -1205,17 +1070,15 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Unset, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Unset, $2.SkippedTokens) if $4 != nil { - yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.Tokens, $5.Tokens...)) + yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.SkippedTokens, $5.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloating($$, token.VarList, $5.Tokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $5.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $6.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $6.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $6.SkippedTokens) } | T_FOREACH '(' expr T_AS foreach_variable ')' foreach_statement { @@ -1234,13 +1097,10 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $6.Tokens) - - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $6.SkippedTokens) } | T_FOREACH '(' expr T_AS variable T_DOUBLE_ARROW foreach_variable ')' foreach_statement { @@ -1261,13 +1121,11 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Key, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Key, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $8.SkippedTokens) } | T_DECLARE '(' const_list ')' declare_statement { @@ -1278,11 +1136,9 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Declare, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Declare, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.SkippedTokens) } | ';' { @@ -1292,10 +1148,8 @@ statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | T_TRY '{' inner_statement_list '}' catch_list finally_statement { @@ -1308,11 +1162,9 @@ statement: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Try, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Try, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_THROW expr ';' { @@ -1322,11 +1174,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_GOTO T_STRING ';' { @@ -1338,12 +1188,10 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(label, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(label, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_STRING ':' { @@ -1355,18 +1203,14 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $2.SkippedTokens) } catch_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | catch_list T_CATCH '(' catch_name_list T_VARIABLE ')' '{' inner_statement_list '}' { @@ -1381,31 +1225,25 @@ catch_list: catch.GetNode().Position = position.NewTokensPosition($2, $9) // save comments - yylex.(*Parser).setFreeFloating(catch, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Catch, $3.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $5.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Var, $6.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Cond, $7.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Stmts, $9.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(catch, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Catch, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Var, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Cond, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Stmts, $9.SkippedTokens) } ; catch_name_list: name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | catch_name_list '|' name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1413,8 +1251,6 @@ finally_statement: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_FINALLY '{' inner_statement_list '}' { @@ -1424,11 +1260,9 @@ finally_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Finally, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Finally, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } ; @@ -1436,17 +1270,13 @@ unset_variables: unset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | unset_variables ',' unset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1454,8 +1284,6 @@ unset_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1471,24 +1299,22 @@ function_declaration_statement: // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 != nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParamList, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParamList, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.SkippedTokens) // normalize if $8 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.ReturnType]); delete($$.GetNode().Tokens, token.ReturnType) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1526,12 +1352,10 @@ class_declaration_statement: // save comments yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.SkippedTokens) } | T_CLASS T_STRING extends_from implements_list backup_doc_comment '{' class_statement_list '}' { @@ -1543,12 +1367,10 @@ class_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.SkippedTokens) } ; @@ -1556,14 +1378,10 @@ class_modifiers: class_modifier { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_modifiers class_modifier { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1576,9 +1394,7 @@ class_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL { @@ -1588,9 +1404,7 @@ class_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1605,12 +1419,10 @@ trait_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.SkippedTokens) } ; @@ -1625,12 +1437,10 @@ interface_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.SkippedTokens) } ; @@ -1638,8 +1448,6 @@ extends_from: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS name { @@ -1649,9 +1457,7 @@ extends_from: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1659,8 +1465,6 @@ interface_extends_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS name_list { @@ -1670,9 +1474,7 @@ interface_extends_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1680,8 +1482,6 @@ implements_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_IMPLEMENTS name_list { @@ -1691,9 +1491,7 @@ implements_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1701,8 +1499,6 @@ foreach_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '&' variable { @@ -1712,9 +1508,7 @@ foreach_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LIST '(' array_pair_list ')' { @@ -1724,11 +1518,9 @@ foreach_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' array_pair_list ']' { @@ -1738,10 +1530,8 @@ foreach_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save commentsc - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } ; @@ -1752,8 +1542,6 @@ for_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOR ';' { @@ -1765,12 +1553,10 @@ for_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1781,8 +1567,6 @@ foreach_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOREACH ';' { @@ -1794,12 +1578,10 @@ foreach_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1810,8 +1592,6 @@ declare_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDDECLARE ';' { @@ -1823,12 +1603,10 @@ declare_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1843,10 +1621,8 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) } | '{' ';' case_list '}' { @@ -1858,11 +1634,9 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) } | ':' case_list T_ENDSWITCH ';' { @@ -1874,12 +1648,10 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | ':' ';' case_list T_ENDSWITCH ';' { @@ -1892,13 +1664,11 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } ; @@ -1906,8 +1676,6 @@ case_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | case_list T_CASE expr case_separator inner_statement_list { @@ -1918,11 +1686,9 @@ case_list: _case.GetNode().Position = position.NewTokenNodeListPosition($2, $5) // save comments - yylex.(*Parser).setFreeFloating(_case, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_case, token.Expr, append($4.Tokens)) - yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_case, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_case, token.Expr, append($4.SkippedTokens)) + yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.SkippedTokens) } | case_list T_DEFAULT case_separator inner_statement_list { @@ -1933,11 +1699,9 @@ case_list: _default.GetNode().Position = position.NewTokenNodeListPosition($2, $4) // save comments - yylex.(*Parser).setFreeFloating(_default, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_default, token.Default, $3.Tokens) - yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_default, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_default, token.Default, $3.SkippedTokens) + yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.SkippedTokens) } ; @@ -1959,8 +1723,6 @@ while_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDWHILE ';' { @@ -1972,12 +1734,10 @@ while_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1992,11 +1752,9 @@ if_stmt_without_else: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | if_stmt_without_else T_ELSEIF '(' expr ')' statement { @@ -2012,11 +1770,9 @@ if_stmt_without_else: $$.GetNode().Position = position.NewNodesPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.SkippedTokens) } ; @@ -2024,8 +1780,6 @@ if_stmt: if_stmt_without_else %prec T_NOELSE { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | if_stmt_without_else T_ELSE statement { @@ -2039,9 +1793,7 @@ if_stmt: $$.GetNode().Position = position.NewNodesPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(_else, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_else, token.Start, $2.SkippedTokens) } ; @@ -2060,12 +1812,10 @@ alt_if_stmt_without_else: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $5.SkippedTokens) } | alt_if_stmt_without_else T_ELSEIF '(' expr ')' ':' inner_statement_list { @@ -2084,12 +1834,10 @@ alt_if_stmt_without_else: _elseIf.GetNode().Position = position.NewTokenNodeListPosition($2, $7) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $6.SkippedTokens) } ; @@ -2104,13 +1852,11 @@ alt_if_stmt: // save comments altif := $$.(*ast.StmtAltIf) if len(altif.ElseIf) > 0 { - yylex.(*Parser).setFreeFloating(altif.ElseIf[len(altif.ElseIf)-1], token.End, append($2.Tokens, $3.Tokens...)) + yylex.(*Parser).setFreeFloating(altif.ElseIf[len(altif.ElseIf)-1], token.End, append($2.SkippedTokens, $3.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloating(altif.Stmt, token.End, append($2.Tokens, $3.Tokens...)) + yylex.(*Parser).setFreeFloating(altif.Stmt, token.End, append($2.SkippedTokens, $3.SkippedTokens...)) } - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | alt_if_stmt_without_else T_ELSE ':' inner_statement_list T_ENDIF ';' { @@ -2128,12 +1874,10 @@ alt_if_stmt: $$.GetNode().Position = position.NewNodeTokenPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating(_else, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($5.Tokens, $6.Tokens...)) - yylex.(*Parser).setToken($$, token.SemiColon, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_else, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($5.SkippedTokens, $6.SkippedTokens...)) + yylex.(*Parser).setToken($$, token.SemiColon, $6.SkippedTokens) } ; @@ -2141,14 +1885,10 @@ parameter_list: non_empty_parameter_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2156,17 +1896,13 @@ non_empty_parameter_list: parameter { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_parameter_list ',' parameter { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2179,18 +1915,18 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, nil} @@ -2204,8 +1940,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenPosition($4) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | optional_type is_reference is_variadic T_VARIABLE '=' expr { @@ -2215,19 +1949,19 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.End, $5.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, $6} @@ -2241,8 +1975,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenNodePosition($4, $6) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2250,14 +1982,10 @@ optional_type: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | type_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2265,8 +1993,6 @@ type_expr: type { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '?' type { @@ -2276,9 +2002,7 @@ type_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2291,9 +2015,7 @@ type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CALLABLE { @@ -2303,15 +2025,11 @@ type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2319,17 +2037,13 @@ return_type: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' type_expr { $$ = $2; // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) } ; @@ -2342,10 +2056,8 @@ argument_list: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.SkippedTokens) } | '(' non_empty_argument_list possible_comma ')' { @@ -2355,14 +2067,12 @@ argument_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) if $3 != nil { - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($3.Tokens, $4.Tokens...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($3.SkippedTokens, $4.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2370,17 +2080,13 @@ non_empty_argument_list: argument { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_argument_list ',' argument { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2394,8 +2100,6 @@ argument: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ELLIPSIS expr { @@ -2405,9 +2109,7 @@ argument: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2417,15 +2119,11 @@ global_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | global_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2433,8 +2131,6 @@ global_var: simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2444,15 +2140,11 @@ static_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | static_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2469,9 +2161,7 @@ static_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' expr { @@ -2485,10 +2175,8 @@ static_var: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -2496,14 +2184,10 @@ class_statement_list: class_statement_list class_statement { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2517,10 +2201,8 @@ class_statement: // save comments yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.PropertyList, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.PropertyList, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | method_modifiers T_CONST class_const_list ';' { @@ -2532,14 +2214,12 @@ class_statement: // save comments if len($1) > 0 { yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $2.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | T_USE name_list trait_adaptations { @@ -2549,9 +2229,7 @@ class_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | method_modifiers T_FUNCTION returns_ref identifier backup_doc_comment '(' parameter_list ')' return_type method_body { @@ -2569,20 +2247,18 @@ class_statement: // save comments if len($1) > 0 { yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $2.SkippedTokens) } if $3 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $8.SkippedTokens) } ; @@ -2590,17 +2266,13 @@ name_list: name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | name_list ',' name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2612,11 +2284,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' '}' { @@ -2625,10 +2294,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $2.SkippedTokens) } | '{' trait_adaptation_list '}' { @@ -2637,10 +2304,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.SkippedTokens) } ; @@ -2648,14 +2313,10 @@ trait_adaptation_list: trait_adaptation { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_adaptation_list trait_adaptation { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2665,20 +2326,16 @@ trait_adaptation: $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.NameList, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.NameList, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | trait_alias ';' { $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.Alias, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Alias, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } ; @@ -2692,9 +2349,7 @@ trait_precedence: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2710,10 +2365,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | trait_method_reference T_AS reserved_non_modifiers { @@ -2726,10 +2379,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | trait_method_reference T_AS member_modifier identifier { @@ -2742,10 +2393,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } | trait_method_reference T_AS member_modifier { @@ -2756,9 +2405,7 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2773,15 +2420,11 @@ trait_method_reference: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | absolute_trait_method_reference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2797,10 +2440,8 @@ absolute_trait_method_reference: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $2.SkippedTokens) } ; @@ -2813,10 +2454,8 @@ method_body: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' inner_statement_list '}' { @@ -2826,10 +2465,8 @@ method_body: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } ; @@ -2837,8 +2474,6 @@ variable_modifiers: non_empty_member_modifiers { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_VAR { @@ -2849,9 +2484,7 @@ variable_modifiers: modifier.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.SkippedTokens) } ; @@ -2859,14 +2492,10 @@ method_modifiers: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2874,14 +2503,10 @@ non_empty_member_modifiers: member_modifier { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers member_modifier { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2894,9 +2519,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PROTECTED { @@ -2906,9 +2529,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRIVATE { @@ -2918,9 +2539,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_STATIC { @@ -2930,9 +2549,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ABSTRACT { @@ -2942,9 +2559,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL { @@ -2954,9 +2569,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2966,15 +2579,11 @@ property_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | property { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2991,9 +2600,7 @@ property: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' expr backup_doc_comment { @@ -3007,10 +2614,8 @@ property: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -3020,15 +2625,11 @@ class_const_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | class_const_decl { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3043,10 +2644,8 @@ class_const_decl: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -3061,10 +2660,8 @@ const_decl: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -3074,15 +2671,11 @@ echo_expr_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | echo_expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3090,8 +2683,6 @@ echo_expr: expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3099,14 +2690,10 @@ for_exprs: /* empty */ { $$ = nil; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_for_exprs { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3116,15 +2703,11 @@ non_empty_for_exprs: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3141,11 +2724,9 @@ anonymous_class: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.SkippedTokens) } ; @@ -3161,9 +2742,7 @@ new_expr: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NEW anonymous_class { @@ -3173,9 +2752,7 @@ new_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -3190,12 +2767,10 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $5.SkippedTokens) } | '[' array_pair_list ']' '=' expr { @@ -3207,11 +2782,9 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(shortList, token.ArrayPairList, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(shortList, token.ArrayPairList, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $4.SkippedTokens) } | variable '=' expr { @@ -3222,9 +2795,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable '=' '&' expr { @@ -3235,10 +2806,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Equal, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Equal, $3.SkippedTokens) } | T_CLONE expr { @@ -3248,9 +2817,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | variable T_PLUS_EQUAL expr { @@ -3261,9 +2828,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MINUS_EQUAL expr { @@ -3274,9 +2839,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MUL_EQUAL expr { @@ -3287,9 +2850,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_POW_EQUAL expr { @@ -3300,9 +2861,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_DIV_EQUAL expr { @@ -3313,9 +2872,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_CONCAT_EQUAL expr { @@ -3326,9 +2883,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MOD_EQUAL expr { @@ -3339,9 +2894,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_AND_EQUAL expr { @@ -3352,9 +2905,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_OR_EQUAL expr { @@ -3365,9 +2916,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_XOR_EQUAL expr { @@ -3378,9 +2927,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SL_EQUAL expr { @@ -3391,9 +2938,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SR_EQUAL expr { @@ -3404,9 +2949,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_COALESCE_EQUAL expr { @@ -3417,9 +2960,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_INC { @@ -3430,9 +2971,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_INC variable { @@ -3442,9 +2981,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | variable T_DEC { @@ -3455,9 +2992,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_DEC variable { @@ -3467,9 +3002,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_BOOLEAN_OR expr { @@ -3480,9 +3013,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_BOOLEAN_AND expr { @@ -3493,9 +3024,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_OR expr { @@ -3506,9 +3035,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_AND expr { @@ -3519,9 +3046,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_XOR expr { @@ -3532,9 +3057,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '|' expr { @@ -3545,9 +3068,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '&' expr { @@ -3558,9 +3079,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '^' expr { @@ -3571,9 +3090,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '.' expr { @@ -3584,9 +3101,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '+' expr { @@ -3597,9 +3112,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '-' expr { @@ -3610,9 +3123,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '*' expr { @@ -3623,9 +3134,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_POW expr { @@ -3636,9 +3145,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '/' expr { @@ -3649,9 +3156,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '%' expr { @@ -3662,9 +3167,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SL expr { @@ -3675,9 +3178,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SR expr { @@ -3688,9 +3189,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '+' expr %prec T_INC { @@ -3700,9 +3199,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' expr %prec T_INC { @@ -3712,9 +3209,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '!' expr { @@ -3724,9 +3219,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '~' expr { @@ -3736,9 +3229,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_IS_IDENTICAL expr { @@ -3749,9 +3240,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_IDENTICAL expr { @@ -3762,9 +3251,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_EQUAL expr { @@ -3775,9 +3262,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_EQUAL expr { @@ -3788,10 +3273,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.Equal, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.Equal, $2.SkippedTokens) } | expr '<' expr { @@ -3802,9 +3285,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_SMALLER_OR_EQUAL expr { @@ -3815,9 +3296,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '>' expr { @@ -3828,9 +3307,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_GREATER_OR_EQUAL expr { @@ -3841,9 +3318,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SPACESHIP expr { @@ -3854,9 +3329,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_INSTANCEOF class_name_reference { @@ -3867,9 +3340,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '(' expr ')' { @@ -3879,16 +3350,12 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | new_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr '?' expr ':' expr { @@ -3899,10 +3366,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $4.SkippedTokens) } | expr '?' ':' expr { @@ -3913,10 +3378,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $3.SkippedTokens) } | expr T_COALESCE expr { @@ -3927,15 +3390,11 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | internal_functions_in_yacc { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_INT_CAST expr { @@ -3945,10 +3404,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_DOUBLE_CAST expr { @@ -3958,10 +3415,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_STRING_CAST expr { @@ -3971,10 +3426,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_ARRAY_CAST expr { @@ -3984,10 +3437,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_OBJECT_CAST expr { @@ -3997,10 +3448,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_BOOL_CAST expr { @@ -4010,10 +3459,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_UNSET_CAST expr { @@ -4023,10 +3470,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_EXIT exit_expr { @@ -4044,9 +3489,7 @@ expr_without_variable: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '@' expr { @@ -4056,15 +3499,11 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '`' backticks_expr '`' { @@ -4074,9 +3513,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRINT expr { @@ -4086,9 +3523,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD { @@ -4098,9 +3533,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD expr { @@ -4110,9 +3543,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD expr T_DOUBLE_ARROW expr { @@ -4122,10 +3553,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) } | T_YIELD_FROM expr { @@ -4135,15 +3564,11 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | inline_function { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_STATIC inline_function { @@ -4161,9 +3586,7 @@ expr_without_variable: // save comments yylex.(*Parser).setFreeFloatingTokens($$, token.Static, $$.GetNode().Tokens[token.Start]); delete($$.GetNode().Tokens, token.Start) - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens); - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens); } ; @@ -4176,16 +3599,16 @@ inline_function: $$.GetNode().Position = position.NewTokensPosition($1, $11) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.SkippedTokens) // normalize if $8 == nil { @@ -4194,8 +3617,6 @@ inline_function: if $7 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.LexicalVarList]); delete($$.GetNode().Tokens, token.LexicalVarList) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_FN returns_ref '(' parameter_list ')' return_type backup_doc_comment T_DOUBLE_ARROW expr { @@ -4205,22 +3626,20 @@ inline_function: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.SkippedTokens) }; - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ReturnType, $8.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ReturnType, $8.SkippedTokens) // normalize if $6 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.ReturnType]); delete($$.GetNode().Tokens, token.ReturnType) }; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4243,8 +3662,6 @@ lexical_vars: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_USE '(' lexical_var_list ')' { @@ -4254,11 +3671,9 @@ lexical_vars: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Use, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Use, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.SkippedTokens) } ; @@ -4268,15 +3683,11 @@ lexical_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | lexical_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4291,9 +3702,7 @@ lexical_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '&' T_VARIABLE { @@ -4307,10 +3716,8 @@ lexical_var: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } ; @@ -4324,8 +3731,6 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_name T_PAAMAYIM_NEKUDOTAYIM member_name argument_list { @@ -4336,9 +3741,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM member_name argument_list { @@ -4349,9 +3752,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | callable_expr argument_list { @@ -4362,8 +3763,6 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4376,15 +3775,11 @@ class_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4392,14 +3787,10 @@ class_name_reference: class_name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4407,8 +3798,6 @@ exit_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' optional_expr ')' { @@ -4418,10 +3807,8 @@ exit_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -4429,8 +3816,6 @@ backticks_expr: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE { @@ -4439,14 +3824,10 @@ backticks_expr: // save position part.GetNode().Position = position.NewTokenPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4454,14 +3835,10 @@ ctor_arguments: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | argument_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4474,11 +3851,9 @@ dereferencable_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Array, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Array, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' array_pair_list ']' { @@ -4488,10 +3863,8 @@ dereferencable_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } | T_CONSTANT_ENCAPSED_STRING { @@ -4501,9 +3874,7 @@ dereferencable_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4516,9 +3887,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DNUMBER { @@ -4528,9 +3897,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LINE { @@ -4540,9 +3907,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FILE { @@ -4552,9 +3917,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DIR { @@ -4564,9 +3927,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_TRAIT_C { @@ -4576,9 +3937,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_METHOD_C { @@ -4588,9 +3947,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FUNC_C { @@ -4600,9 +3957,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_C { @@ -4612,9 +3967,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CLASS_C { @@ -4624,9 +3977,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_ENCAPSED_AND_WHITESPACE T_END_HEREDOC { @@ -4638,9 +3989,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_END_HEREDOC { @@ -4650,9 +3999,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '"' encaps_list '"' { @@ -4662,9 +4009,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC encaps_list T_END_HEREDOC { @@ -4674,21 +4019,15 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | dereferencable_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4702,8 +4041,6 @@ constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_name T_PAAMAYIM_NEKUDOTAYIM identifier { @@ -4716,10 +4053,8 @@ constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM identifier { @@ -4732,10 +4067,8 @@ constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -4743,14 +4076,10 @@ expr: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr_without_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4758,14 +4087,10 @@ optional_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4773,8 +4098,6 @@ variable_class_name: dereferencable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4782,8 +4105,6 @@ dereferencable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' expr ')' { @@ -4793,16 +4114,12 @@ dereferencable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | dereferencable_scalar { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4810,8 +4127,6 @@ callable_expr: callable_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' expr ')' { @@ -4821,16 +4136,12 @@ callable_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | dereferencable_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4838,8 +4149,6 @@ callable_variable: simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | dereferencable '[' optional_expr ']' { @@ -4849,10 +4158,8 @@ callable_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | constant '[' optional_expr ']' { @@ -4862,10 +4169,8 @@ callable_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | dereferencable '{' expr '}' { @@ -4875,10 +4180,8 @@ callable_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | dereferencable T_OBJECT_OPERATOR property_name argument_list { @@ -4889,15 +4192,11 @@ callable_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | function_call { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4905,14 +4204,10 @@ variable: callable_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_member { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | dereferencable T_OBJECT_OPERATOR property_name { @@ -4923,9 +4218,7 @@ variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -4940,9 +4233,7 @@ simple_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' '{' expr '}' { @@ -4952,11 +4243,9 @@ simple_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.Tokens, $3.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.SkippedTokens, $3.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.SkippedTokens...)) } | '$' simple_variable { @@ -4966,9 +4255,7 @@ simple_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4982,9 +4269,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM simple_variable { @@ -4995,9 +4280,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -5005,8 +4288,6 @@ new_variable: simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_variable '[' optional_expr ']' { @@ -5016,10 +4297,8 @@ new_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | new_variable '{' expr '}' { @@ -5029,10 +4308,8 @@ new_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | new_variable T_OBJECT_OPERATOR property_name { @@ -5043,9 +4320,7 @@ new_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | class_name T_PAAMAYIM_NEKUDOTAYIM simple_variable { @@ -5056,9 +4331,7 @@ new_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | new_variable T_PAAMAYIM_NEKUDOTAYIM simple_variable { @@ -5069,9 +4342,7 @@ new_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -5084,25 +4355,19 @@ member_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '{' expr '}' { $$ = $2; // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } | simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5115,25 +4380,19 @@ property_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '{' expr '}' { $$ = $2; // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } | simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5141,8 +4400,6 @@ array_pair_list: non_empty_array_pair_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5150,14 +4407,10 @@ possible_array_pair: /* empty */ { $$ = &ast.ExprArrayItem{ast.Node{}, false, nil, nil} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | array_pair { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5171,9 +4424,7 @@ non_empty_array_pair_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | possible_array_pair { @@ -5182,8 +4433,6 @@ non_empty_array_pair_list: } else { $$ = []ast.Vertex{$1} } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5197,9 +4446,7 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr { @@ -5210,8 +4457,6 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr T_DOUBLE_ARROW '&' variable { @@ -5224,10 +4469,8 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $3.SkippedTokens) } | '&' variable { @@ -5239,9 +4482,7 @@ array_pair: reference.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ELLIPSIS expr { @@ -5251,9 +4492,7 @@ array_pair: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_DOUBLE_ARROW T_LIST '(' array_pair_list ')' { @@ -5267,12 +4506,10 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $4.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $6.SkippedTokens) } | T_LIST '(' array_pair_list ')' { @@ -5285,11 +4522,9 @@ array_pair: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) } ; @@ -5297,8 +4532,6 @@ encaps_list: encaps_list encaps_var { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list T_ENCAPSED_AND_WHITESPACE { @@ -5309,15 +4542,11 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($2) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.SkippedTokens) } | encaps_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE encaps_var { @@ -5328,9 +4557,7 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.SkippedTokens) } ; @@ -5345,9 +4572,7 @@ encaps_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '[' encaps_var_offset ']' { @@ -5361,10 +4586,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | T_VARIABLE T_OBJECT_OPERATOR T_STRING { @@ -5380,10 +4603,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES expr '}' { @@ -5395,10 +4616,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '}' { @@ -5412,10 +4631,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '[' expr ']' '}' { @@ -5429,22 +4646,18 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setToken(variable, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken(variable, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.SkippedTokens) } | T_CURLY_OPEN variable '}' { $$ = $2; // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -5457,9 +4670,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NUM_STRING { @@ -5474,9 +4685,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' T_NUM_STRING { @@ -5500,9 +4709,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE { @@ -5514,9 +4721,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5529,15 +4734,13 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Isset, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Isset, $2.SkippedTokens) if $4 == nil { - yylex.(*Parser).setFreeFloating($$, token.VarList, $5.Tokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $5.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.Tokens, $5.Tokens...)) + yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.SkippedTokens, $5.SkippedTokens...)) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EMPTY '(' expr ')' { @@ -5549,11 +4752,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_INCLUDE expr { @@ -5563,9 +4764,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_INCLUDE_ONCE expr { @@ -5575,9 +4774,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_EVAL '(' expr ')' { @@ -5589,11 +4786,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_REQUIRE expr { @@ -5603,9 +4798,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_REQUIRE_ONCE expr { @@ -5615,9 +4808,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5625,17 +4816,13 @@ isset_variables: isset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | isset_variables ',' isset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -5643,8 +4830,6 @@ isset_variable: expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; diff --git a/internal/php7/php7_bench_test.go b/internal/php7/php7_bench_test.go index 558c54b..d5f2636 100644 --- a/internal/php7/php7_bench_test.go +++ b/internal/php7/php7_bench_test.go @@ -382,7 +382,7 @@ CAD; ` for n := 0; n < b.N; n++ { - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() } diff --git a/internal/php7/php7_test.go b/internal/php7/php7_test.go index 68364f5..8d52038 100644 --- a/internal/php7/php7_test.go +++ b/internal/php7/php7_test.go @@ -19595,11 +19595,12 @@ func TestPhp7(t *testing.T) { }, } - lexer := scanner.NewLexer(src, "7.4", false, nil) + lexer := scanner.NewLexer(src, "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19734,11 +19735,12 @@ func TestPhp5Strings(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19962,11 +19964,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19989,7 +19992,7 @@ func TestPhp7ControlCharsErrors(t *testing.T) { parserErrors = append(parserErrors, e) } - lexer := scanner.NewLexer([]byte(src), "7.4", false, errorHandlerFunc) + lexer := scanner.NewLexer([]byte(src), "7.4", errorHandlerFunc) php7parser := php7.NewParser(lexer, errorHandlerFunc) php7parser.Parse() assert.DeepEqual(t, expected, parserErrors) diff --git a/internal/position/position.go b/internal/position/position.go index cbcc309..2603826 100644 --- a/internal/position/position.go +++ b/internal/position/position.go @@ -1,9 +1,9 @@ package position import ( - "github.com/z7zmey/php-parser/internal/scanner" "github.com/z7zmey/php-parser/pkg/ast" "github.com/z7zmey/php-parser/pkg/position" + "github.com/z7zmey/php-parser/pkg/token" ) type startPos struct { @@ -95,7 +95,7 @@ func NewNodePosition(n ast.Vertex) *position.Position { } // NewTokenPosition returns new Position -func NewTokenPosition(t *scanner.Token) *position.Position { +func NewTokenPosition(t *token.Token) *position.Position { return &position.Position{ StartLine: t.Position.StartLine, EndLine: t.Position.EndLine, @@ -105,7 +105,7 @@ func NewTokenPosition(t *scanner.Token) *position.Position { } // NewTokensPosition returns new Position -func NewTokensPosition(startToken *scanner.Token, endToken *scanner.Token) *position.Position { +func NewTokensPosition(startToken *token.Token, endToken *token.Token) *position.Position { return &position.Position{ StartLine: startToken.Position.StartLine, EndLine: endToken.Position.EndLine, @@ -115,7 +115,7 @@ func NewTokensPosition(startToken *scanner.Token, endToken *scanner.Token) *posi } // NewTokenNodePosition returns new Position -func NewTokenNodePosition(t *scanner.Token, n ast.Vertex) *position.Position { +func NewTokenNodePosition(t *token.Token, n ast.Vertex) *position.Position { return &position.Position{ StartLine: t.Position.StartLine, EndLine: getNodeEndPos(n).endLine, @@ -125,7 +125,7 @@ func NewTokenNodePosition(t *scanner.Token, n ast.Vertex) *position.Position { } // NewNodeTokenPosition returns new Position -func NewNodeTokenPosition(n ast.Vertex, t *scanner.Token) *position.Position { +func NewNodeTokenPosition(n ast.Vertex, t *token.Token) *position.Position { return &position.Position{ StartLine: getNodeStartPos(n).startLine, EndLine: t.Position.EndLine, @@ -145,7 +145,7 @@ func NewNodesPosition(startNode ast.Vertex, endNode ast.Vertex) *position.Positi } // NewNodeListTokenPosition returns new Position -func NewNodeListTokenPosition(list []ast.Vertex, t *scanner.Token) *position.Position { +func NewNodeListTokenPosition(list []ast.Vertex, t *token.Token) *position.Position { return &position.Position{ StartLine: getListStartPos(list).startLine, EndLine: t.Position.EndLine, @@ -155,7 +155,7 @@ func NewNodeListTokenPosition(list []ast.Vertex, t *scanner.Token) *position.Pos } // NewTokenNodeListPosition returns new Position -func NewTokenNodeListPosition(t *scanner.Token, list []ast.Vertex) *position.Position { +func NewTokenNodeListPosition(t *token.Token, list []ast.Vertex) *position.Position { return &position.Position{ StartLine: t.Position.StartLine, EndLine: getListEndPos(list).endLine, @@ -185,7 +185,7 @@ func NewNodeListNodePosition(list []ast.Vertex, n ast.Vertex) *position.Position } // NewOptionalListTokensPosition returns new Position -func NewOptionalListTokensPosition(list []ast.Vertex, t *scanner.Token, endToken *scanner.Token) *position.Position { +func NewOptionalListTokensPosition(list []ast.Vertex, t *token.Token, endToken *token.Token) *position.Position { if list == nil { return &position.Position{ StartLine: t.Position.StartLine, diff --git a/internal/position/position_test.go b/internal/position/position_test.go index f932f2d..1fd6c83 100644 --- a/internal/position/position_test.go +++ b/internal/position/position_test.go @@ -5,15 +5,15 @@ import ( "testing" builder "github.com/z7zmey/php-parser/internal/position" - "github.com/z7zmey/php-parser/internal/scanner" "github.com/z7zmey/php-parser/pkg/ast" "github.com/z7zmey/php-parser/pkg/position" + "github.com/z7zmey/php-parser/pkg/token" ) func TestNewTokenPosition(t *testing.T) { - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -29,18 +29,18 @@ func TestNewTokenPosition(t *testing.T) { } func TestNewTokensPosition(t *testing.T) { - token1 := &scanner.Token{ + token1 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, EndPos: 3, }, } - token2 := &scanner.Token{ + token2 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 2, EndLine: 2, StartPos: 4, @@ -71,9 +71,9 @@ func TestNewNodePosition(t *testing.T) { } func TestNewTokenNodePosition(t *testing.T) { - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -108,9 +108,9 @@ func TestNewNodeTokenPosition(t *testing.T) { }, } - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 2, EndLine: 2, StartPos: 10, @@ -202,9 +202,9 @@ func TestNewNodeListTokenPosition(t *testing.T) { }, } - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 3, EndLine: 3, StartPos: 20, @@ -218,9 +218,9 @@ func TestNewNodeListTokenPosition(t *testing.T) { } func TestNewTokenNodeListPosition(t *testing.T) { - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -332,18 +332,18 @@ func TestNewNodeListNodePosition(t *testing.T) { } func TestNewOptionalListTokensPosition(t *testing.T) { - token1 := &scanner.Token{ + token1 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, EndPos: 3, }, } - token2 := &scanner.Token{ + token2 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 2, EndLine: 2, StartPos: 4, @@ -378,18 +378,18 @@ func TestNewOptionalListTokensPosition2(t *testing.T) { }, } - token1 := &scanner.Token{ + token1 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 4, EndLine: 4, StartPos: 27, EndPos: 29, }, } - token2 := &scanner.Token{ + token2 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 5, EndLine: 5, StartPos: 30, @@ -426,9 +426,9 @@ func TestNilNodeListPos(t *testing.T) { } func TestNilNodeListTokenPos(t *testing.T) { - token := &scanner.Token{ + token := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -459,9 +459,9 @@ func TestEmptyNodeListPos(t *testing.T) { } func TestEmptyNodeListTokenPos(t *testing.T) { - token := &scanner.Token{ + token := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, diff --git a/internal/scanner/lexer.go b/internal/scanner/lexer.go index 7456961..b21ff7c 100644 --- a/internal/scanner/lexer.go +++ b/internal/scanner/lexer.go @@ -13,31 +13,32 @@ import ( type Lexer struct { data []byte phpVersion string - withTokens bool errHandlerFunc func(*errors.Error) + sts, ste int p, pe, cs int ts, te, act int stack []int top int heredocLabel []byte - tokenPool *TokenPool + tokenPool *token.Pool + positionPool *position.Pool newLines NewLines } -func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc func(*errors.Error)) *Lexer { +func NewLexer(data []byte, phpVersion string, errHandlerFunc func(*errors.Error)) *Lexer { lex := &Lexer{ data: data, phpVersion: phpVersion, - withTokens: withTokens, errHandlerFunc: errHandlerFunc, pe: len(data), stack: make([]int, 0), - tokenPool: &TokenPool{}, - newLines: NewLines{make([]int, 0, 128)}, + tokenPool: token.NewPool(position.DefaultBlockSize), + positionPool: position.NewPool(position.DefaultBlockSize), + newLines: NewLines{make([]int, 0, 128)}, } initLexer(lex) @@ -45,26 +46,37 @@ func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc fu return lex } -func (lex *Lexer) ReturnTokenToPool(t *Token) { - lex.tokenPool.Put(t) +func (lex *Lexer) setTokenPosition(token *token.Token) { + pos := lex.positionPool.Get() + + pos.StartLine = lex.newLines.GetLine(lex.ts) + pos.EndLine = lex.newLines.GetLine(lex.te - 1) + pos.StartPos = lex.ts + pos.EndPos = lex.te + + token.Position = pos } -func (lex *Lexer) setTokenPosition(token *Token) { - token.Position.StartLine = lex.newLines.GetLine(lex.ts) - token.Position.EndLine = lex.newLines.GetLine(lex.te - 1) - token.Position.StartPos = lex.ts - token.Position.EndPos = lex.te -} - -func (lex *Lexer) addHiddenToken(t *Token, id TokenID, ps, pe int) { - if !lex.withTokens { - return +func (lex *Lexer) addSkippedToken(t *token.Token, id token.ID, ps, pe int) { + if lex.sts == 0 { + lex.sts = lex.ts } - t.Tokens = append(t.Tokens, token.Token{ - ID: token.ID(id), - Value: lex.data[ps:pe], - }) + lex.ste = lex.te + + // TODO remove after parser refactoring + + skippedTkn := lex.tokenPool.Get() + skippedTkn.ID = id + skippedTkn.Value = lex.data[ps:pe] + + lex.setTokenPosition(skippedTkn) + + if t.SkippedTokens == nil { + t.SkippedTokens = make([]*token.Token, 0, 2) + } + + t.SkippedTokens = append(t.SkippedTokens, skippedTkn) } func (lex *Lexer) isNotStringVar() bool { diff --git a/internal/scanner/lexer_tokens.go b/internal/scanner/lexer_tokens.go deleted file mode 100644 index bd54024..0000000 --- a/internal/scanner/lexer_tokens.go +++ /dev/null @@ -1,145 +0,0 @@ -package scanner - -type TokenID int - -//go:generate stringer -type=TokenID -output ./tokenid_string.go -const ( - T_INCLUDE TokenID = iota + 57346 - T_INCLUDE_ONCE - T_EXIT - T_IF - T_LNUMBER - T_DNUMBER - T_STRING - T_STRING_VARNAME - T_VARIABLE - T_NUM_STRING - T_INLINE_HTML - T_CHARACTER - T_BAD_CHARACTER - T_ENCAPSED_AND_WHITESPACE - T_CONSTANT_ENCAPSED_STRING - T_ECHO - T_DO - T_WHILE - T_ENDWHILE - T_FOR - T_ENDFOR - T_FOREACH - T_ENDFOREACH - T_DECLARE - T_ENDDECLARE - T_AS - T_SWITCH - T_ENDSWITCH - T_CASE - T_DEFAULT - T_BREAK - T_CONTINUE - T_GOTO - T_FUNCTION - T_FN - T_CONST - T_RETURN - T_TRY - T_CATCH - T_FINALLY - T_THROW - T_USE - T_INSTEADOF - T_GLOBAL - T_VAR - T_UNSET - T_ISSET - T_EMPTY - T_HALT_COMPILER - T_CLASS - T_TRAIT - T_INTERFACE - T_EXTENDS - T_IMPLEMENTS - T_OBJECT_OPERATOR - T_DOUBLE_ARROW - T_LIST - T_ARRAY - T_CALLABLE - T_CLASS_C - T_TRAIT_C - T_METHOD_C - T_FUNC_C - T_LINE - T_FILE - T_COMMENT - T_DOC_COMMENT - T_OPEN_TAG - T_OPEN_TAG_WITH_ECHO - T_CLOSE_TAG - T_WHITESPACE - T_START_HEREDOC - T_END_HEREDOC - T_DOLLAR_OPEN_CURLY_BRACES - T_CURLY_OPEN - T_PAAMAYIM_NEKUDOTAYIM - T_NAMESPACE - T_NS_C - T_DIR - T_NS_SEPARATOR - T_ELLIPSIS - T_EVAL - T_REQUIRE - T_REQUIRE_ONCE - T_LOGICAL_OR - T_LOGICAL_XOR - T_LOGICAL_AND - T_INSTANCEOF - T_NEW - T_CLONE - T_ELSEIF - T_ELSE - T_ENDIF - T_PRINT - T_YIELD - T_STATIC - T_ABSTRACT - T_FINAL - T_PRIVATE - T_PROTECTED - T_PUBLIC - T_INC - T_DEC - T_YIELD_FROM - T_INT_CAST - T_DOUBLE_CAST - T_STRING_CAST - T_ARRAY_CAST - T_OBJECT_CAST - T_BOOL_CAST - T_UNSET_CAST - T_COALESCE - T_SPACESHIP - T_NOELSE - T_PLUS_EQUAL - T_MINUS_EQUAL - T_MUL_EQUAL - T_POW_EQUAL - T_DIV_EQUAL - T_CONCAT_EQUAL - T_MOD_EQUAL - T_AND_EQUAL - T_OR_EQUAL - T_XOR_EQUAL - T_SL_EQUAL - T_SR_EQUAL - T_COALESCE_EQUAL - T_BOOLEAN_OR - T_BOOLEAN_AND - T_POW - T_SL - T_SR - T_IS_IDENTICAL - T_IS_NOT_IDENTICAL - T_IS_EQUAL - T_IS_NOT_EQUAL - T_IS_SMALLER_OR_EQUAL - T_IS_GREATER_OR_EQUAL -) diff --git a/internal/scanner/scanner.go b/internal/scanner/scanner.go index c6ebf1c01cc16db50a01950d23e82b5e6d4ae3c5..f90db2331179561005b52c12de75d42bed8a805e 100644 GIT binary patch delta 13091 zcmb7LcU)D+68`PIXAuwtW5t4~!5~Gd6crQ!BZekLL!OBe#qul|yGTbsV?kJPf@l=P z8Vg3@7^6mwqDDor1{;FWXku*dB^rIkH+#;#6g`KSe{}Ee?9A-Ux3jaehtoM0TUJ;s z`?!~}QMYjuW{jWtX|K_7lO5wd;wQ(>b(}hWs>9TnY13n;IZmB8&T&TE#Mmj_jC&dz zL*YfOHR-)G^SOq2uvG0J;>wO|qsjT4QH$IhHG+RA29>>MlGpx8OF(|TIre>Od> zY$XQ0Sd3t0DmMfDjg5_FOk~-DMF(ReoY8CgjOkY1R!;gKvHSKb=Ed zp>PuI!A2&z9FM6_*Ne&%j#*B%W zIAQA4*fEiO9X2y2PO-PrX|(T7Pxfo_QR2pSro13t_-`k6A~jg!=FE03E@h|F<`8EX zx|O=I`1DA*w~S@ZPa*ECS;hyXH^gL6oA#>gmuAdEriBZ;ub|Y3xo381iqwL2USz{I zXFB4P)MPuh?2a+BTF|-G#3?Y#r*hfQ7O%bu<&|A(Ta zX>$s_E%h2U#8~7=-+>X4;l6&6EN6!YGbr-uph6ZN?#ptvc(U9gAH@~1n?-XwD$gAl z9^m_d#DC*D9~CwHe1n2~2L=U5WbVakYWVp^$Rvk~Rn#zBuk0mOus%UWKfj>R5SeT2 z1|Kz=(2xk(MvphBNbbKeS=llb8DlFcgW{Ixkz zS?Z!K3Hq(y2&}mGL;Ts4pyu)L~)8HdPZ0^9_FQiWPp_qq3&NZ&Y@b@!mugNa--| zyU&wd-s`Qhe2)LE)jnj1#(l=dEJ5 zz!1NnD1T;o(1)G)PQ@l|52h<)Z}UOb&>C2`km-k1aB=96O5g}OthOhE0(ECd+wawk z91<~-wcP8;3cpto`|x{}6C~+KlJYW5j^-&3>^vH;JkYm1O?lvSxyt_jbov~+qE0uXXJd*}7Swa)agbo}MfEgKQWL9Y}T`qbns*_Tg4x`^8 z9*Tp3=h85zZpwIU$)zYZ?z}x-8VVP7VY|++>a2Fli5%%4>c{rq>BQ16v{Aoa?7P#M z9lziwJFP2oxOhz&Y>)pNtElsmOF8m=OV#0Q+w%|E;miHl$#b5HEckfQj}8r%u2-Rv z$KgdJwP9zk*t3aMc6d>d@14pZV>hajkwlMNeR|V~-^d1WE#_7bdvV19QDlWM-brxF zFI04(gkEO#r#)+a-Ht82r|igt>nk9|h8V)!L8PVX)){dlgRQDohMZmXXY7xkCB!M( z>`wIrR&%2@yP4F!$(wY}%}=DunC)9qyRl_8d^5EMr*N9XM%B8(Srg);q(i+aw%@Xc zbz0Io_r9(2J`0Q(84?=F$ZruW?I&Ay>7{2=bZ}owhOwcy`(ZJ`9!6~v6ypf1zCE9~ za2HEfz6kOdfjZlfuTlf&)Wz%;FyPRgbLDb*9jU36E#8*8fpn+JVPeJc=t~ znK?Wi#_m3`gLCG@LwVu;fy1OF?fj2DhKJPT_Za=}Ka7oAWNO=p|{Vy_jWKcPNbz!0FUy>c076G#adq!UkRHEE9?3F2jfmAuP<&}`7a9$$|($_y;(I~Y zBEfK#U_P_`Cq?8zkcMP{u>om|0a8gBu+oxLm@E**^AYZ9NfvAIQgzj^TOdn`7gU-M zZ$Khag;Rj?O?Z&e~2Z%Ewwya%+A~d1eh%A(LuO9J@xJjvjlv9FnbAtJoWDDhe91s+t z6WW5TW^@7>_c zR8B$Z8YL-->5d&3n-K@hvABDcCxizKjS38}pHO9nD|0s^{gu#wsZj)MFeeTy#DLRSlh<=si6D)VkA zewV~!RroO$h~YsGVg}LH#97bSw+C)8tR35nIFw5;2}rdetB8|Sl!m4D=-*^}{JE$n zNx(UfQ$R}LfGwE|LqC@n(U>Sq#4uj0)w2(CA}+YLbSKiNN?r;$Ai#QOk_hG&v>dYg z5F63lm0YfyS>lNVMINMA<7H!aYU3&XT5+^DIYFhWIgIin3uNJCu-2a>$SZLb!vaV- zL3RJR;kOn&=a5N+c&tby9pT_yZe-Cno*cmU zBkU@`r<{~$o z)GeWMAuhoQa0%X!{W&>?7?OL2|2iA_qu-g z-vVMJI^~j)8rfWK5WRvFb1NFalspVD9#D{n`pK^)eDFfu;(me#$dniN`xj^p`2hqr z=99}h+CAV|(7^E`g3g&}Ts>tBKU_ryq8yjiD8~iJU!)Uci=+kISxwGp+&#E$d&B)N z$w$bIzly=jH6#@&lh=}dTtLh12;pgkDA`$JD0VEu@XhVA4w3J|P1Qr}P)xqlV&B2t z#1fuuL}F=6SVQm&#LmV|WD~()8n%VxOQ6DkEWfIk=(rVOFAcW_lO5z;y(UAqp(d_y zep{U;cU~jo+U29F= z0e8P8|I*8Sz7OR(LBf7nZt4|nDa0MX9qr;J3u*ryDbu1+&m1PR!Mq%UMAY|WJg(}% z_hc6K_dY^qLH<$f-+P3Nhpck6x6@HF0rgsW6o>8~!w$=GGG62C1PjWsXGbLxn;s+M zpz1g=hxxp9>;$%6976{Ef56dc$0ZS0kCRzu9{6O)pH*W<&%jqh7jQmGTEX-aBnli( zBI)H5Na_v)en8Gc&Z4X%KadH~@f5cDop4DMK%# z13ci{%Vegew-dPhhyyFCaG>l*RHXe)oYSicg+9N6Lf7-w-K*H@bOrgiLdF#&A9W3h zjIW{=A=j~$%3GIt-!p6>@0e?37W?FiAyi)@aR}AW>nO|J3Cgb{EWE(`2LI^{yKf+` z?3*asvl>Mw)?jM~Z%wSlR*#$LArB{5b`z;iT|(|{YH+n4&X81tJyEVz(!9LU!wr(Jf@aQ)FW4W2U1k-24STwm!lG@0a?Fbv5Bf_P)YMq5! zR1AKhnRf*@I6oixX)6A8QHy!22b}v+`?4Q-1+t1 z_8-o)oPk#atafl&SV53p%8l#vz~c_bVed@2^rw_f@oQ&34B=$FP@$Zk=}MKlH+m0< zo+TXCogHG>Y~d((Slf7E3Bq3QNWgvzcoHw1;m+_~E|kIC1mUB4&q-ao6NMChe97K# z0TYshgLTfknJm2SyvHd*64FaKQbDS~-}hzSd1->29|3loi{a@)Ppv-FZge??uFa_> zY|cbwbp}5SRJ}+8m6wIia577n4{t{|2wp7^YVmk#X+e*OYnBOX!ST8(~4We~(a#Fu0BJpAPW24ZoNiq@1(uv01 zQ#SjfL&7YrIAFUlQX@_C-zjv1BPBv)1Buu-8@;s;|KkQVguY)3c^Z#;4)?W?CJH=T z=l&{%fA0}6pKjp7Qb_nlNY(VNpOU48AqRvNbyMKGH>PYph*L0~lHh@Q=C|OX2EX|k z0#DBhQ^4_-z%OFG6kRL%<^{LzjY{DIh^_n^9HhYN8eG}p%Yun&bz;N~3#guiF%G}f z=>x^*g&bwC1zc##=OX&tnkdh-y(BDG9>IM(3VL1Ufl_bR!a_my-i!M-7ygD|G_Lb_;qRY*3#efDJyIvUMk4%W58I6V-YrGf5nwN{9PHnlj~|CTVGd%+w; z-rC4pjz0-Y`OW93xc0sZvfm6e62Z4val;O`z|xgVl-^XTIZ-VGpHEM}!XWz<`d0M{OwP6V0hQ9I9_Ogu~Nj zR0R%S7}CE90nNn_Bicb36RgFF4y=EbA*?Z`(^RpsNcIvY0B1~;>jpfO_l1!rbh!G9 zfS$w##+uSY7$Zd7dv<~t`3_yhuNjZq(AnHDW-z-g#l(vvQS0U+4RiyCb~Fmcx25Kg z+m4Rst}p~mdpZ@Kx5K`)_H-%7+EGX{r}H7{n9x>?>p*AdV}2>Dw4|>`jjdg1n!=>( z8u<6Sv>sP`tlyaVLl4E7cnyDOM?+1U+8g2`FWQc1dwWSYO%XFY3oRyX{9O-TdDA?Y zH;}f5Wd>CFg$XkX7qIM4Ka?K%y4R&f(V!VX9pIM%RK1WlcL4oN`k;`1r&yoOL_?A< zPF?OpRX037WTb;5^2V~19wl$o*lfY7KYb56_|e07=a)1ND zNRk+d+^Qm|;=2U5Q#6D|AxZR5TK5sc8;%(Y9fr}vQndq|QmC2Oaya$p!4NA0yP^3A znvMX>9zl6!K|@-V5H zJB8xgpZvP1$FVJ#$8pq28t}`K_#%!Tz&ykQ?=7hJtLa!RXf}s#>{%PYUIi z1%yKxl%>-8yMoknJdN^N3Ie1rWTaCUJ%`Ejk%J5GkCWZq#CO*GhIcFj*TcV7XlUq> znP_O^%q9$dI};6EkolH|zMNHO=={vLG<4AdNwG$T9S?uv{fbtbM_^16vzXVWyEs7kfZliBq)_RD$Q#!#nI_I1107NXsy;AaoZim0z1+gaX>LZm3g5~FA$MiYY)(O{t|s2CfpI*L7F z>;(%)V^73hP_UsWSQ2CGChyBLCi!O1Z9LC`n19|suy@){+uhmOvvHB#**v@XgSF1q z){s}BeV2F^6}{9DZ@yXEi@5U_+DRIBe?Eq`?i@Gfv&1CF$<|hhV@5c3?d~`_F}3s9 zL~E-F!@h7#8S|N=j1LdBwzk4wiTn_?>n@Ly^qk`yy9=^JFug0Eoza~4d0>mbN8{KP|9f)3 zj84Q0$Wh`%+##)5xGJ9hn%iF=y7}KIhI-Rzh<>j)h8m?jz@px#7m}kLE>hpcT4U#qoa-Rmb0~$ zm2e|^hmW240pB&l#bBkz-Scm#7xLtnLYlf4B68DUY6wGu2k$h0ZK}A0u#oqn`ReIS zA)_1fg75UgM%N; z%^*I)@7~CgVj@31AIU{BL7a?WuA5y8wgZB<8}qT(sc! z%cj_xJZOv-Srx<|E-PtlMi?=zR}WMr-on)28o6>Tj%IVeJm6JMrouLG7 z`&z}C9&08lDbK7?q5QB=Mdp*j$x8P0wJPk5zEJ_LPy=>er$TvfoyzQe*QeW=!=PVH zaS5Sey@KK+;&^hAFK@RYu$c-)w2a)oA+UuKDX$R0+hb#((zd*fX>Y0UMD~drD9aqW zDbQX89~BxJ77`H@!-Mwt@n1Ko2>#dREahxBHmkT|^p;EoW&YL-HBvswC1#IzXSPxi7Zn#4(<>+h6TqLB6sy@15gw~oC{_wI zZ`_8>B`zTmArbmSD9Bl^IH}6#{1IL z0-wC4JFniS=IprlVxs%YeoWr4ijN7BQ_+``18TO%#pqYpyaOs`S07L@Ci&nbrP+U} z0p}c2VQ+mnOC=q{qe3F$LwVmLf&90_DoZB($Ta1oRY%g*wxDo+;e;RG`km6C{O?uv z)Op{l#KG~X3gtF6U|@;L_IaU1C0{;1woqw9gX1dREI2+@N$GY%Wl5Brn5N=_*a&!b z1@n3FNu>|?gOlmXH6WH11T|o-prH-3+v_;n6V{CLdp#2AF~S1atCx{2Bh$qWaIm^D1qNuWJKMh_b| z&Pnh)J9pz*S6i9wszJ^3Diit0vpRTTMdUh6p5%QwtcnRdhozI9n!o_-}rRgaEW1I)#|y=t5w!IBK%^{xeI+>QBB7va5Y zt{NW`p<>d(6`90af1hzIZ(`0~V-yC4hehzb2a$Zrbsb*B(68o7p(ageLa&(UNZz+J zg1>*m6-XI%#2n7y4wX7M5-bfLgC+`-N8el|R-n3bAD2@* zIB<ZvUZC*)cQ~H8{w>G! z+d83@xJfkw*Ep8Og`9*tvX`CyM;;!*gYW9Z%?qkF&`t^mAG(_ZEwx0s(&pVO;F?={ zE_L@0vSM~(pS0iwKMdzX&xw1pxPMynmiKLHS$J-I_KyZPI=>;yhkRC5Ey_QH zQ3f_4GCCA8El3QvdEm<1-Bj)`IS;0b-A-XxLQs@}bJNeRy#CKh&bAM;!PA1&;+TX! zSca^=7s=myB%H2fKkz6U+BYF8GvnDWbKZFD!ms@7tB6$JCpmoiQ>DZ!5=;><_iZ6P{JuPX%>jLd+8 zR9UjX6-MSl5+Q-+yMj@2EHrLLdPAuNp;gv2Ro{BswS}EH(s=G$4^sU`=$q!Jnczsq}zteUi(U-d8TY z!4+{3*nqggveU#8x?ZEo5bSS2GQ=8CFvIMIBone4u^tMK4{3xLRh}+3BwZczrwy43 zv5l4c16;5rSsHJD$g(BfYp+@0{~8llwPG7SZ-Uts@W991-!gg-#x^1SAipWWW?GrT zbg5o5@)jZbP9HpOL*9}`H7DO$VE}b)$X8llHxIc9C{RBbx_f(JUR%;%A7X46O4!l$ zCK_*<&=V%NBZm!yyWm-SQlqnoguAd?NG6Gfunu}V5gRD(K-`U8Ls;1ned@S^=p@AD z6D%QaI+JCFCMRs`M0T4bx6+{)>^*u5aIO^NN(!|Gyt6H zOau$$W;|qOXlEImVfX^y$eI6;$c)LUOI^qCNBWWXP^?ZSGgM;~GDJo({ zWOFi71GWc{s~AY3aBn?0y@=>g#orc@T;rNRWVy-0o=aW}{w_ceh4=!@Mp+hJO0Jm5 z^Oh;dzg|w#G+yqa5&k*OG;amY_836`ya$lBH8q=OW zgnsmt9~4}q@rOyV78TPSC0`<8;ZZUH_YV6KG68zr#Z6{a2^oox4_3zz(5eIhbB~dc zV0jEj>^P!)An7<659tE%{5Tl}>n`Bv(-Y(q_*KBFP9XW{<2dSnl8nM^I&xB;!S56b z^gV^}1E-K8{3MRNPUGmX7;VB4if(oWiQYYpfF);8;1)q{a~6qm&){e}j_?f-o}NXj z)UyZ~e-3d^#ONW8(46@5h#GMY0Tt&FRVhY&OHtIR^Ef(Qiu&GcgmL$~K$0{b{;>W6 z(l->44j0K7c>5w!6<$QnK__rzUxp($VPAe3%BWw4Bg=A>VOxcx%yROH#?v2uDn}WS zml2Y53F%)7s(Y6N&KIICBkq1VLdq^9F5wc6-oGNNy7LP9#Tz@qAfP4W_ATC?&Sx+RA3NRUPY4B>!{FNLHzVO3U-HqH&C!A zl;S`5^g6=hD^c)(8#p>6M!hO=6mkg8i0du>BllefwEizu?jr)HU%JG7k z_zyB|aqfE6=%OhVNSR%YJkx~}URLAmC+^_ri`xkML%{CeM%cq@91XpLZt(GklXp-? z#chN{-6eCxDs+HX>##n`9HQw03G0ZpH2NNy+(Mt1_Mm-5vP5!sn1KsW+W1N{xkBS% zJY6fa;Zorh?JNx{*QG)`3vCq-d+@kI``EPF^KWRyv1UGccEOrTZMw$8T~FNqruMms z*tS|rL0hHm{2B+sGjMMcYZLZWYeNwISG7E0o~x#VwEebr#QZmWor4A`)C#^DM_r)P zm$VtUjb&}%r*X8usDA3~6$nmC_%?+aueo?MzZuW0@l+>f(~0*{8!?*;I84BwJyS#v zL8UOn>)R4|ZvyJ4cDI)%qvoapC~^w5lPXi`W)X*3lj$6dzac`1xi5TsWFakvM``o} zvs-15QwGh{co}M-$_#3*fnu}h>rop%m14aqY9O0wRDQLUWmsj?EV=vR`yb5O4aK0XMhv9`Z3C{1tB&)4! z25Fb++u*;J9#gvb;U($-UB02ilqlrt1n0h?A1P7j>sByi9er-FA0DltuF%GsD0P=f zeK*qeFn0qDfQM^n=h_J+BpgLB2vu0HN~LG%G_Ca0HX5T5eAe*e4r&iWchI;R(^?7W zSm6L$_+%#)$581`x(rvDQN{!d6nYfeI_{<|tzN2DO^no&f7i z_o{1jJghCRV@CayQt&sJZ#!cCZ^Iq>lxEnyGHS4DgET)FD)U-ZpMa*~hn)xAK$rdQt zjQJ1(;SHHM0sF3}&d}J3HLoKno;G5=Ahr?f0!^%#`dKDtkA#F4xLZiL3UyR!-;6EO zf?rEKE6=~hL@DIol8F*Xp3}D_vlZ}W@JCA)FT8IFAK0<6h(B$|L@gcCiY*6Bu@Z=C z!m{D%OWH!};lNUioBeM1)REOV-{cTaY0IXnFjZ=xYX@ddj+q@5oCQv*oS|CjkSmLB zC_1D%)typG=*He6S|2z0^5-P24qy$m8ZYBJkrk{+MHk=!F%Y`-WUlhJS)yQRr+T(v z&ENs0q0AZD1fj<*g7sM`3m619f|v{R3)O$zrJhfHf?2X)6VDpv!)dx|TZlir@1mcq zV<=8GvS%&f<%27n1>MgW7WvA4wK-`dJqcqY8UHTR0e;>?8cLbH**dMp&uF*uAuS;) zK9UcqaDtW5Yyf-|!^E>z8g!1q&@bzQ-u@dp6N7(l z#??U!n_}2VV;+9mU!}u_^Suw(Vo?xkX?hKc>TWzkVetf>_f_I_S|Kl~C*T$ss{;&d_K8`%W|@yd*l1g;99}9Kx`9Fob2{ zu{&)D6SsB=KZWxZM>&??ouqa{nFqznD6cu|AWa$01`|HmW;0~tNS33~ zUo}Jr2lfxgg6EWy;lZAc!=0>n>K275sLWOfL157p@;{hG}76Va%+FrGz7e~n{jwB}dY+CJJ8?}VPv zBb9Z7`V;j_$`dZ6vaWD#VqK{cH<9HSdh)7YTMD^D47r4F<@)ZuI_1+zEJ;jW!>s+2 z(JZ+I;w5dL!g>;v^em0d#mLBaFod;&bLs4yA*j0{Hj@oPBEv;YeBKCiv)EL@FqOR! zWO7e$|1?&kLXtt}vN0Vz^|!CdIZR*s3;vYp$nR}J*n-Cl=2at#b-bhMplk*UhHqx- zJ?a6U%|r*Mj?wCVh4?0 zjGoI{N*~W=PQtyj=dd{%cf++6G5V{O@WbPI3fDU4u`G>`Oza7_@|b+JLT}c%XW*~B zFnR$?H@fh^B0ShSEMy+0nAt2s2g?i31rQr*-XrVERvjb(3&xhayl~usEoR*o0b{)U zmWN`~5GA4bn86O#EM}v{=7PJwdK${^MbQ?JyTnui5WhF%8CZY;7WZXw?Rl#4GdS|W za%Ly}wUl)fF7CJ-GuY5YK!n-DxhoYOKDr8hBhz}qq}3(|7sKZ^S94%*)J-mwt0)@j!N4|J$a<^TWy diff --git a/internal/scanner/scanner.rl b/internal/scanner/scanner.rl index cd29221..18874ae 100644 --- a/internal/scanner/scanner.rl +++ b/internal/scanner/scanner.rl @@ -4,6 +4,8 @@ import ( "fmt" "strconv" "strings" + + "github.com/z7zmey/php-parser/pkg/token" ) %%{ @@ -18,13 +20,14 @@ func initLexer(lex *Lexer) { %% write init; } -func (lex *Lexer) Lex() *Token { +func (lex *Lexer) Lex() *token.Token { eof := lex.pe - var tok TokenID + var tok token.ID - token := lex.tokenPool.Get() - token.Tokens = token.Tokens[:0] - token.Value = lex.data[0:0] + tkn := lex.tokenPool.Get() + + lex.sts = 0 + lex.ste = 0 lblStart := 0 lblEnd := 0 @@ -124,7 +127,7 @@ func (lex *Lexer) Lex() *Token { main := |* "#!" any* :>> newline => { - lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te) }; any => { fnext html; @@ -135,42 +138,42 @@ func (lex *Lexer) Lex() *Token { html := |* any_line+ -- ' { lex.ungetStr("<") - lex.setTokenPosition(token) - tok = T_INLINE_HTML; + lex.setTokenPosition(tkn) + tok = token.T_INLINE_HTML; fbreak; }; ' { - lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_OPEN_TAG, lex.ts, lex.te) fnext php; }; ' { lex.ungetCnt(lex.te - lex.ts - 5) - lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.ts+5) + lex.addSkippedToken(tkn, token.T_OPEN_TAG, lex.ts, lex.ts+5) fnext php; }; ' { - lex.setTokenPosition(token); - tok = T_ECHO; + lex.setTokenPosition(tkn); + tok = token.T_ECHO; fnext php; fbreak; }; *|; php := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; - ';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + '?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;}; + ';' whitespace_line* '?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;}; - (dnum | exponent_dnum) => {lex.setTokenPosition(token); tok = T_DNUMBER; fbreak;}; + (dnum | exponent_dnum) => {lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;}; bnum => { s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1) _, err := strconv.ParseInt(s, 2, 0) if err == nil { - lex.setTokenPosition(token); tok = T_LNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak; } - lex.setTokenPosition(token); tok = T_DNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak; }; lnum => { base := 10 @@ -182,142 +185,142 @@ func (lex *Lexer) Lex() *Token { _, err := strconv.ParseInt(s, base, 0) if err == nil { - lex.setTokenPosition(token); tok = T_LNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak; } - lex.setTokenPosition(token); tok = T_DNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak; }; hnum => { s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1) _, err := strconv.ParseInt(s, 16, 0) if err == nil { - lex.setTokenPosition(token); tok = T_LNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak; } - lex.setTokenPosition(token); tok = T_DNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak; }; - 'abstract'i => {lex.setTokenPosition(token); tok = T_ABSTRACT; fbreak;}; - 'array'i => {lex.setTokenPosition(token); tok = T_ARRAY; fbreak;}; - 'as'i => {lex.setTokenPosition(token); tok = T_AS; fbreak;}; - 'break'i => {lex.setTokenPosition(token); tok = T_BREAK; fbreak;}; - 'callable'i => {lex.setTokenPosition(token); tok = T_CALLABLE; fbreak;}; - 'case'i => {lex.setTokenPosition(token); tok = T_CASE; fbreak;}; - 'catch'i => {lex.setTokenPosition(token); tok = T_CATCH; fbreak;}; - 'class'i => {lex.setTokenPosition(token); tok = T_CLASS; fbreak;}; - 'clone'i => {lex.setTokenPosition(token); tok = T_CLONE; fbreak;}; - 'const'i => {lex.setTokenPosition(token); tok = T_CONST; fbreak;}; - 'continue'i => {lex.setTokenPosition(token); tok = T_CONTINUE; fbreak;}; - 'declare'i => {lex.setTokenPosition(token); tok = T_DECLARE; fbreak;}; - 'default'i => {lex.setTokenPosition(token); tok = T_DEFAULT; fbreak;}; - 'do'i => {lex.setTokenPosition(token); tok = T_DO; fbreak;}; - 'echo'i => {lex.setTokenPosition(token); tok = T_ECHO; fbreak;}; - 'else'i => {lex.setTokenPosition(token); tok = T_ELSE; fbreak;}; - 'elseif'i => {lex.setTokenPosition(token); tok = T_ELSEIF; fbreak;}; - 'empty'i => {lex.setTokenPosition(token); tok = T_EMPTY; fbreak;}; - 'enddeclare'i => {lex.setTokenPosition(token); tok = T_ENDDECLARE; fbreak;}; - 'endfor'i => {lex.setTokenPosition(token); tok = T_ENDFOR; fbreak;}; - 'endforeach'i => {lex.setTokenPosition(token); tok = T_ENDFOREACH; fbreak;}; - 'endif'i => {lex.setTokenPosition(token); tok = T_ENDIF; fbreak;}; - 'endswitch'i => {lex.setTokenPosition(token); tok = T_ENDSWITCH; fbreak;}; - 'endwhile'i => {lex.setTokenPosition(token); tok = T_ENDWHILE; fbreak;}; - 'eval'i => {lex.setTokenPosition(token); tok = T_EVAL; fbreak;}; - 'exit'i | 'die'i => {lex.setTokenPosition(token); tok = T_EXIT; fbreak;}; - 'extends'i => {lex.setTokenPosition(token); tok = T_EXTENDS; fbreak;}; - 'final'i => {lex.setTokenPosition(token); tok = T_FINAL; fbreak;}; - 'finally'i => {lex.setTokenPosition(token); tok = T_FINALLY; fbreak;}; - 'for'i => {lex.setTokenPosition(token); tok = T_FOR; fbreak;}; - 'foreach'i => {lex.setTokenPosition(token); tok = T_FOREACH; fbreak;}; - 'function'i | 'cfunction'i => {lex.setTokenPosition(token); tok = T_FUNCTION; fbreak;}; - 'fn'i => {lex.setTokenPosition(token); tok = T_FN; fbreak;}; - 'global'i => {lex.setTokenPosition(token); tok = T_GLOBAL; fbreak;}; - 'goto'i => {lex.setTokenPosition(token); tok = T_GOTO; fbreak;}; - 'if'i => {lex.setTokenPosition(token); tok = T_IF; fbreak;}; - 'isset'i => {lex.setTokenPosition(token); tok = T_ISSET; fbreak;}; - 'implements'i => {lex.setTokenPosition(token); tok = T_IMPLEMENTS; fbreak;}; - 'instanceof'i => {lex.setTokenPosition(token); tok = T_INSTANCEOF; fbreak;}; - 'insteadof'i => {lex.setTokenPosition(token); tok = T_INSTEADOF; fbreak;}; - 'interface'i => {lex.setTokenPosition(token); tok = T_INTERFACE; fbreak;}; - 'list'i => {lex.setTokenPosition(token); tok = T_LIST; fbreak;}; - 'namespace'i => {lex.setTokenPosition(token); tok = T_NAMESPACE; fbreak;}; - 'private'i => {lex.setTokenPosition(token); tok = T_PRIVATE; fbreak;}; - 'public'i => {lex.setTokenPosition(token); tok = T_PUBLIC; fbreak;}; - 'print'i => {lex.setTokenPosition(token); tok = T_PRINT; fbreak;}; - 'protected'i => {lex.setTokenPosition(token); tok = T_PROTECTED; fbreak;}; - 'return'i => {lex.setTokenPosition(token); tok = T_RETURN; fbreak;}; - 'static'i => {lex.setTokenPosition(token); tok = T_STATIC; fbreak;}; - 'switch'i => {lex.setTokenPosition(token); tok = T_SWITCH; fbreak;}; - 'throw'i => {lex.setTokenPosition(token); tok = T_THROW; fbreak;}; - 'trait'i => {lex.setTokenPosition(token); tok = T_TRAIT; fbreak;}; - 'try'i => {lex.setTokenPosition(token); tok = T_TRY; fbreak;}; - 'unset'i => {lex.setTokenPosition(token); tok = T_UNSET; fbreak;}; - 'use'i => {lex.setTokenPosition(token); tok = T_USE; fbreak;}; - 'var'i => {lex.setTokenPosition(token); tok = T_VAR; fbreak;}; - 'while'i => {lex.setTokenPosition(token); tok = T_WHILE; fbreak;}; - 'yield'i whitespace_line* 'from'i => {lex.setTokenPosition(token); tok = T_YIELD_FROM; fbreak;}; - 'yield'i => {lex.setTokenPosition(token); tok = T_YIELD; fbreak;}; - 'include'i => {lex.setTokenPosition(token); tok = T_INCLUDE; fbreak;}; - 'include_once'i => {lex.setTokenPosition(token); tok = T_INCLUDE_ONCE; fbreak;}; - 'require'i => {lex.setTokenPosition(token); tok = T_REQUIRE; fbreak;}; - 'require_once'i => {lex.setTokenPosition(token); tok = T_REQUIRE_ONCE; fbreak;}; - '__CLASS__'i => {lex.setTokenPosition(token); tok = T_CLASS_C; fbreak;}; - '__DIR__'i => {lex.setTokenPosition(token); tok = T_DIR; fbreak;}; - '__FILE__'i => {lex.setTokenPosition(token); tok = T_FILE; fbreak;}; - '__FUNCTION__'i => {lex.setTokenPosition(token); tok = T_FUNC_C; fbreak;}; - '__LINE__'i => {lex.setTokenPosition(token); tok = T_LINE; fbreak;}; - '__NAMESPACE__'i => {lex.setTokenPosition(token); tok = T_NS_C; fbreak;}; - '__METHOD__'i => {lex.setTokenPosition(token); tok = T_METHOD_C; fbreak;}; - '__TRAIT__'i => {lex.setTokenPosition(token); tok = T_TRAIT_C; fbreak;}; - '__halt_compiler'i => {lex.setTokenPosition(token); tok = T_HALT_COMPILER; fnext halt_compiller_open_parenthesis; fbreak;}; - 'new'i => {lex.setTokenPosition(token); tok = T_NEW; fbreak;}; - 'and'i => {lex.setTokenPosition(token); tok = T_LOGICAL_AND; fbreak;}; - 'or'i => {lex.setTokenPosition(token); tok = T_LOGICAL_OR; fbreak;}; - 'xor'i => {lex.setTokenPosition(token); tok = T_LOGICAL_XOR; fbreak;}; - '\\' => {lex.setTokenPosition(token); tok = T_NS_SEPARATOR; fbreak;}; - '...' => {lex.setTokenPosition(token); tok = T_ELLIPSIS; fbreak;}; - '::' => {lex.setTokenPosition(token); tok = T_PAAMAYIM_NEKUDOTAYIM; fbreak;}; - '&&' => {lex.setTokenPosition(token); tok = T_BOOLEAN_AND; fbreak;}; - '||' => {lex.setTokenPosition(token); tok = T_BOOLEAN_OR; fbreak;}; - '&=' => {lex.setTokenPosition(token); tok = T_AND_EQUAL; fbreak;}; - '|=' => {lex.setTokenPosition(token); tok = T_OR_EQUAL; fbreak;}; - '.=' => {lex.setTokenPosition(token); tok = T_CONCAT_EQUAL; fbreak;}; - '*=' => {lex.setTokenPosition(token); tok = T_MUL_EQUAL; fbreak;}; - '**=' => {lex.setTokenPosition(token); tok = T_POW_EQUAL; fbreak;}; - '/=' => {lex.setTokenPosition(token); tok = T_DIV_EQUAL; fbreak;}; - '+=' => {lex.setTokenPosition(token); tok = T_PLUS_EQUAL; fbreak;}; - '-=' => {lex.setTokenPosition(token); tok = T_MINUS_EQUAL; fbreak;}; - '^=' => {lex.setTokenPosition(token); tok = T_XOR_EQUAL; fbreak;}; - '%=' => {lex.setTokenPosition(token); tok = T_MOD_EQUAL; fbreak;}; - '--' => {lex.setTokenPosition(token); tok = T_DEC; fbreak;}; - '++' => {lex.setTokenPosition(token); tok = T_INC; fbreak;}; - '=>' => {lex.setTokenPosition(token); tok = T_DOUBLE_ARROW; fbreak;}; - '<=>' => {lex.setTokenPosition(token); tok = T_SPACESHIP; fbreak;}; - '!=' | '<>' => {lex.setTokenPosition(token); tok = T_IS_NOT_EQUAL; fbreak;}; - '!==' => {lex.setTokenPosition(token); tok = T_IS_NOT_IDENTICAL; fbreak;}; - '==' => {lex.setTokenPosition(token); tok = T_IS_EQUAL; fbreak;}; - '===' => {lex.setTokenPosition(token); tok = T_IS_IDENTICAL; fbreak;}; - '<<=' => {lex.setTokenPosition(token); tok = T_SL_EQUAL; fbreak;}; - '>>=' => {lex.setTokenPosition(token); tok = T_SR_EQUAL; fbreak;}; - '>=' => {lex.setTokenPosition(token); tok = T_IS_GREATER_OR_EQUAL; fbreak;}; - '<=' => {lex.setTokenPosition(token); tok = T_IS_SMALLER_OR_EQUAL; fbreak;}; - '**' => {lex.setTokenPosition(token); tok = T_POW; fbreak;}; - '<<' => {lex.setTokenPosition(token); tok = T_SL; fbreak;}; - '>>' => {lex.setTokenPosition(token); tok = T_SR; fbreak;}; - '??' => {lex.setTokenPosition(token); tok = T_COALESCE; fbreak;}; - '??=' => {lex.setTokenPosition(token); tok = T_COALESCE_EQUAL; fbreak;}; + 'abstract'i => {lex.setTokenPosition(tkn); tok = token.T_ABSTRACT; fbreak;}; + 'array'i => {lex.setTokenPosition(tkn); tok = token.T_ARRAY; fbreak;}; + 'as'i => {lex.setTokenPosition(tkn); tok = token.T_AS; fbreak;}; + 'break'i => {lex.setTokenPosition(tkn); tok = token.T_BREAK; fbreak;}; + 'callable'i => {lex.setTokenPosition(tkn); tok = token.T_CALLABLE; fbreak;}; + 'case'i => {lex.setTokenPosition(tkn); tok = token.T_CASE; fbreak;}; + 'catch'i => {lex.setTokenPosition(tkn); tok = token.T_CATCH; fbreak;}; + 'class'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS; fbreak;}; + 'clone'i => {lex.setTokenPosition(tkn); tok = token.T_CLONE; fbreak;}; + 'const'i => {lex.setTokenPosition(tkn); tok = token.T_CONST; fbreak;}; + 'continue'i => {lex.setTokenPosition(tkn); tok = token.T_CONTINUE; fbreak;}; + 'declare'i => {lex.setTokenPosition(tkn); tok = token.T_DECLARE; fbreak;}; + 'default'i => {lex.setTokenPosition(tkn); tok = token.T_DEFAULT; fbreak;}; + 'do'i => {lex.setTokenPosition(tkn); tok = token.T_DO; fbreak;}; + 'echo'i => {lex.setTokenPosition(tkn); tok = token.T_ECHO; fbreak;}; + 'else'i => {lex.setTokenPosition(tkn); tok = token.T_ELSE; fbreak;}; + 'elseif'i => {lex.setTokenPosition(tkn); tok = token.T_ELSEIF; fbreak;}; + 'empty'i => {lex.setTokenPosition(tkn); tok = token.T_EMPTY; fbreak;}; + 'enddeclare'i => {lex.setTokenPosition(tkn); tok = token.T_ENDDECLARE; fbreak;}; + 'endfor'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOR; fbreak;}; + 'endforeach'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOREACH; fbreak;}; + 'endif'i => {lex.setTokenPosition(tkn); tok = token.T_ENDIF; fbreak;}; + 'endswitch'i => {lex.setTokenPosition(tkn); tok = token.T_ENDSWITCH; fbreak;}; + 'endwhile'i => {lex.setTokenPosition(tkn); tok = token.T_ENDWHILE; fbreak;}; + 'eval'i => {lex.setTokenPosition(tkn); tok = token.T_EVAL; fbreak;}; + 'exit'i | 'die'i => {lex.setTokenPosition(tkn); tok = token.T_EXIT; fbreak;}; + 'extends'i => {lex.setTokenPosition(tkn); tok = token.T_EXTENDS; fbreak;}; + 'final'i => {lex.setTokenPosition(tkn); tok = token.T_FINAL; fbreak;}; + 'finally'i => {lex.setTokenPosition(tkn); tok = token.T_FINALLY; fbreak;}; + 'for'i => {lex.setTokenPosition(tkn); tok = token.T_FOR; fbreak;}; + 'foreach'i => {lex.setTokenPosition(tkn); tok = token.T_FOREACH; fbreak;}; + 'function'i | 'cfunction'i => {lex.setTokenPosition(tkn); tok = token.T_FUNCTION; fbreak;}; + 'fn'i => {lex.setTokenPosition(tkn); tok = token.T_FN; fbreak;}; + 'global'i => {lex.setTokenPosition(tkn); tok = token.T_GLOBAL; fbreak;}; + 'goto'i => {lex.setTokenPosition(tkn); tok = token.T_GOTO; fbreak;}; + 'if'i => {lex.setTokenPosition(tkn); tok = token.T_IF; fbreak;}; + 'isset'i => {lex.setTokenPosition(tkn); tok = token.T_ISSET; fbreak;}; + 'implements'i => {lex.setTokenPosition(tkn); tok = token.T_IMPLEMENTS; fbreak;}; + 'instanceof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTANCEOF; fbreak;}; + 'insteadof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTEADOF; fbreak;}; + 'interface'i => {lex.setTokenPosition(tkn); tok = token.T_INTERFACE; fbreak;}; + 'list'i => {lex.setTokenPosition(tkn); tok = token.T_LIST; fbreak;}; + 'namespace'i => {lex.setTokenPosition(tkn); tok = token.T_NAMESPACE; fbreak;}; + 'private'i => {lex.setTokenPosition(tkn); tok = token.T_PRIVATE; fbreak;}; + 'public'i => {lex.setTokenPosition(tkn); tok = token.T_PUBLIC; fbreak;}; + 'print'i => {lex.setTokenPosition(tkn); tok = token.T_PRINT; fbreak;}; + 'protected'i => {lex.setTokenPosition(tkn); tok = token.T_PROTECTED; fbreak;}; + 'return'i => {lex.setTokenPosition(tkn); tok = token.T_RETURN; fbreak;}; + 'static'i => {lex.setTokenPosition(tkn); tok = token.T_STATIC; fbreak;}; + 'switch'i => {lex.setTokenPosition(tkn); tok = token.T_SWITCH; fbreak;}; + 'throw'i => {lex.setTokenPosition(tkn); tok = token.T_THROW; fbreak;}; + 'trait'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT; fbreak;}; + 'try'i => {lex.setTokenPosition(tkn); tok = token.T_TRY; fbreak;}; + 'unset'i => {lex.setTokenPosition(tkn); tok = token.T_UNSET; fbreak;}; + 'use'i => {lex.setTokenPosition(tkn); tok = token.T_USE; fbreak;}; + 'var'i => {lex.setTokenPosition(tkn); tok = token.T_VAR; fbreak;}; + 'while'i => {lex.setTokenPosition(tkn); tok = token.T_WHILE; fbreak;}; + 'yield'i whitespace_line* 'from'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD_FROM; fbreak;}; + 'yield'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD; fbreak;}; + 'include'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE; fbreak;}; + 'include_once'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE_ONCE; fbreak;}; + 'require'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE; fbreak;}; + 'require_once'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE_ONCE; fbreak;}; + '__CLASS__'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS_C; fbreak;}; + '__DIR__'i => {lex.setTokenPosition(tkn); tok = token.T_DIR; fbreak;}; + '__FILE__'i => {lex.setTokenPosition(tkn); tok = token.T_FILE; fbreak;}; + '__FUNCTION__'i => {lex.setTokenPosition(tkn); tok = token.T_FUNC_C; fbreak;}; + '__LINE__'i => {lex.setTokenPosition(tkn); tok = token.T_LINE; fbreak;}; + '__NAMESPACE__'i => {lex.setTokenPosition(tkn); tok = token.T_NS_C; fbreak;}; + '__METHOD__'i => {lex.setTokenPosition(tkn); tok = token.T_METHOD_C; fbreak;}; + '__TRAIT__'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT_C; fbreak;}; + '__halt_compiler'i => {lex.setTokenPosition(tkn); tok = token.T_HALT_COMPILER; fnext halt_compiller_open_parenthesis; fbreak;}; + 'new'i => {lex.setTokenPosition(tkn); tok = token.T_NEW; fbreak;}; + 'and'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_AND; fbreak;}; + 'or'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_OR; fbreak;}; + 'xor'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_XOR; fbreak;}; + '\\' => {lex.setTokenPosition(tkn); tok = token.T_NS_SEPARATOR; fbreak;}; + '...' => {lex.setTokenPosition(tkn); tok = token.T_ELLIPSIS; fbreak;}; + '::' => {lex.setTokenPosition(tkn); tok = token.T_PAAMAYIM_NEKUDOTAYIM; fbreak;}; + '&&' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_AND; fbreak;}; + '||' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_OR; fbreak;}; + '&=' => {lex.setTokenPosition(tkn); tok = token.T_AND_EQUAL; fbreak;}; + '|=' => {lex.setTokenPosition(tkn); tok = token.T_OR_EQUAL; fbreak;}; + '.=' => {lex.setTokenPosition(tkn); tok = token.T_CONCAT_EQUAL; fbreak;}; + '*=' => {lex.setTokenPosition(tkn); tok = token.T_MUL_EQUAL; fbreak;}; + '**=' => {lex.setTokenPosition(tkn); tok = token.T_POW_EQUAL; fbreak;}; + '/=' => {lex.setTokenPosition(tkn); tok = token.T_DIV_EQUAL; fbreak;}; + '+=' => {lex.setTokenPosition(tkn); tok = token.T_PLUS_EQUAL; fbreak;}; + '-=' => {lex.setTokenPosition(tkn); tok = token.T_MINUS_EQUAL; fbreak;}; + '^=' => {lex.setTokenPosition(tkn); tok = token.T_XOR_EQUAL; fbreak;}; + '%=' => {lex.setTokenPosition(tkn); tok = token.T_MOD_EQUAL; fbreak;}; + '--' => {lex.setTokenPosition(tkn); tok = token.T_DEC; fbreak;}; + '++' => {lex.setTokenPosition(tkn); tok = token.T_INC; fbreak;}; + '=>' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_ARROW; fbreak;}; + '<=>' => {lex.setTokenPosition(tkn); tok = token.T_SPACESHIP; fbreak;}; + '!=' | '<>' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_EQUAL; fbreak;}; + '!==' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_IDENTICAL; fbreak;}; + '==' => {lex.setTokenPosition(tkn); tok = token.T_IS_EQUAL; fbreak;}; + '===' => {lex.setTokenPosition(tkn); tok = token.T_IS_IDENTICAL; fbreak;}; + '<<=' => {lex.setTokenPosition(tkn); tok = token.T_SL_EQUAL; fbreak;}; + '>>=' => {lex.setTokenPosition(tkn); tok = token.T_SR_EQUAL; fbreak;}; + '>=' => {lex.setTokenPosition(tkn); tok = token.T_IS_GREATER_OR_EQUAL; fbreak;}; + '<=' => {lex.setTokenPosition(tkn); tok = token.T_IS_SMALLER_OR_EQUAL; fbreak;}; + '**' => {lex.setTokenPosition(tkn); tok = token.T_POW; fbreak;}; + '<<' => {lex.setTokenPosition(tkn); tok = token.T_SL; fbreak;}; + '>>' => {lex.setTokenPosition(tkn); tok = token.T_SR; fbreak;}; + '??' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE; fbreak;}; + '??=' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE_EQUAL; fbreak;}; - '(' whitespace* 'array'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_ARRAY_CAST; fbreak;}; - '(' whitespace* ('bool'i|'boolean'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_BOOL_CAST; fbreak;}; - '(' whitespace* ('real'i|'double'i|'float'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_DOUBLE_CAST; fbreak;}; - '(' whitespace* ('int'i|'integer'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_INT_CAST; fbreak;}; - '(' whitespace* 'object'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_OBJECT_CAST; fbreak;}; - '(' whitespace* ('string'i|'binary'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_STRING_CAST; fbreak;}; - '(' whitespace* 'unset'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_UNSET_CAST; fbreak;}; + '(' whitespace* 'array'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_ARRAY_CAST; fbreak;}; + '(' whitespace* ('bool'i|'boolean'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_BOOL_CAST; fbreak;}; + '(' whitespace* ('real'i|'double'i|'float'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_CAST; fbreak;}; + '(' whitespace* ('int'i|'integer'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_INT_CAST; fbreak;}; + '(' whitespace* 'object'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_CAST; fbreak;}; + '(' whitespace* ('string'i|'binary'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_STRING_CAST; fbreak;}; + '(' whitespace* 'unset'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_UNSET_CAST; fbreak;}; ('#' | '//') any_line* when is_not_comment_end => { lex.ungetStr("?>") - lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te) }; '/*' any_line* :>> '*/' { isDocComment := false; @@ -326,37 +329,35 @@ func (lex *Lexer) Lex() *Token { } if isDocComment { - lex.addHiddenToken(token, T_DOC_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_DOC_COMMENT, lex.ts, lex.te) } else { - lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te) } }; operators => { - // rune, _ := utf8.DecodeRune(lex.data[lex.ts:lex.te]); - // tok = TokenID(Rune2Class(rune)); - lex.setTokenPosition(token); - tok = TokenID(int(lex.data[lex.ts])); + lex.setTokenPosition(tkn); + tok = token.ID(int(lex.data[lex.ts])); fbreak; }; - "{" => { lex.setTokenPosition(token); tok = TokenID(int('{')); lex.call(ftargs, fentry(php)); goto _out; }; - "}" => { lex.setTokenPosition(token); tok = TokenID(int('}')); lex.ret(1); goto _out;}; - "$" varname => { lex.setTokenPosition(token); tok = T_VARIABLE; fbreak; }; - varname => { lex.setTokenPosition(token); tok = T_STRING; fbreak; }; + "{" => { lex.setTokenPosition(tkn); tok = token.ID(int('{')); lex.call(ftargs, fentry(php)); goto _out; }; + "}" => { lex.setTokenPosition(tkn); tok = token.ID(int('}')); lex.ret(1); goto _out;}; + "$" varname => { lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak; }; + varname => { lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak; }; - "->" => { lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fnext property; fbreak; }; + "->" => { lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fnext property; fbreak; }; constant_string => { - lex.setTokenPosition(token); - tok = T_CONSTANT_ENCAPSED_STRING; + lex.setTokenPosition(tkn); + tok = token.T_CONSTANT_ENCAPSED_STRING; fbreak; }; "b"i? "<<<" [ \t]* ( heredoc_label | ("'" heredoc_label "'") | ('"' heredoc_label '"') ) newline => { lex.heredocLabel = lex.data[lblStart:lblEnd] - lex.setTokenPosition(token); - tok = T_START_HEREDOC; + lex.setTokenPosition(tkn); + tok = token.T_START_HEREDOC; if lex.isHeredocEnd(lex.p+1) { fnext heredoc_end; @@ -367,8 +368,8 @@ func (lex *Lexer) Lex() *Token { } fbreak; }; - "`" => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext backqote; fbreak;}; - '"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext template_string; fbreak;}; + "`" => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext backqote; fbreak;}; + '"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext template_string; fbreak;}; any_line => { c := lex.data[lex.p] @@ -377,28 +378,28 @@ func (lex *Lexer) Lex() *Token { *|; property := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - "->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;}; - varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + "->" => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;}; + varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fnext php; fbreak;}; any => {lex.ungetCnt(1); fgoto php;}; *|; nowdoc := |* any_line* when is_not_heredoc_end => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; fnext heredoc_end; fbreak; }; *|; heredoc := |* - "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; - "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; + "{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; + "${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "$" => {lex.ungetCnt(1); fcall string_var;}; any_line* when is_not_heredoc_end_or_var => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; if len(lex.data) > lex.p+1 && lex.data[lex.p+1] != '$' && lex.data[lex.p+1] != '{' { fnext heredoc_end; @@ -408,59 +409,59 @@ func (lex *Lexer) Lex() *Token { *|; backqote := |* - "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; - "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; + "{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; + "${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "$" varname_first => {lex.ungetCnt(2); fcall string_var;}; - '`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;}; + '`' => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext php; fbreak;}; any_line* when is_not_backqoute_end_or_var => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; fbreak; }; *|; template_string := |* - "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; - "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; + "{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; + "${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "$" varname_first => {lex.ungetCnt(2); fcall string_var;}; - '"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;}; + '"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext php; fbreak;}; any_line* when is_not_string_end_or_var => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; fbreak; }; *|; heredoc_end := |* varname -- ";" => { - lex.setTokenPosition(token); - tok = T_END_HEREDOC; + lex.setTokenPosition(tkn); + tok = token.T_END_HEREDOC; fnext php; fbreak; }; varname => { - lex.setTokenPosition(token); - tok = T_END_HEREDOC; + lex.setTokenPosition(tkn); + tok = token.T_END_HEREDOC; fnext php; fbreak; }; *|; string_var := |* - '$' varname => {lex.setTokenPosition(token); tok = T_VARIABLE; fbreak;}; - '->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;}; - varname => {lex.setTokenPosition(token); tok = T_STRING; fbreak;}; - '[' => {lex.setTokenPosition(token); tok = TokenID(int('[')); lex.call(ftargs, fentry(string_var_index)); goto _out;}; + '$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;}; + '->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;}; + varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;}; + '[' => {lex.setTokenPosition(tkn); tok = token.ID(int('[')); lex.call(ftargs, fentry(string_var_index)); goto _out;}; any => {lex.ungetCnt(1); fret;}; *|; string_var_index := |* - lnum | hnum | bnum => {lex.setTokenPosition(token); tok = T_NUM_STRING; fbreak;}; - '$' varname => {lex.setTokenPosition(token); tok = T_VARIABLE; fbreak;}; - varname => {lex.setTokenPosition(token); tok = T_STRING; fbreak;}; - whitespace_line | [\\'#] => {lex.setTokenPosition(token); tok = T_ENCAPSED_AND_WHITESPACE; lex.ret(2); goto _out;}; - operators > (svi, 1) => {lex.setTokenPosition(token); tok = TokenID(int(lex.data[lex.ts])); fbreak;}; - ']' > (svi, 2) => {lex.setTokenPosition(token); tok = TokenID(int(']')); lex.ret(2); goto _out;}; + lnum | hnum | bnum => {lex.setTokenPosition(tkn); tok = token.T_NUM_STRING; fbreak;}; + '$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;}; + varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;}; + whitespace_line | [\\'#] => {lex.setTokenPosition(tkn); tok = token.T_ENCAPSED_AND_WHITESPACE; lex.ret(2); goto _out;}; + operators > (svi, 1) => {lex.setTokenPosition(tkn); tok = token.ID(int(lex.data[lex.ts])); fbreak;}; + ']' > (svi, 2) => {lex.setTokenPosition(tkn); tok = token.ID(int(']')); lex.ret(2); goto _out;}; any_line => { c := lex.data[lex.p] lex.error(fmt.Sprintf("WARNING: Unexpected character in input: '%c' (ASCII=%d)", c, c)); @@ -468,38 +469,39 @@ func (lex *Lexer) Lex() *Token { *|; string_var_name := |* - varname ("[" | "}") => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_STRING_VARNAME; fnext php; fbreak;}; + varname ("[" | "}") => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_STRING_VARNAME; fnext php; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_open_parenthesis := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - "(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + "(" => {lex.setTokenPosition(tkn); tok = token.ID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_close_parenthesis := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - ")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + ")" => {lex.setTokenPosition(tkn); tok = token.ID(int(')')); fnext halt_compiller_close_semicolon; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_close_semicolon := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - ";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + ";" => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext halt_compiller_end; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_end := |* - any_line* => { lex.addHiddenToken(token, T_HALT_COMPILER, lex.ts, lex.te); }; + any_line* => { lex.addSkippedToken(tkn, token.T_HALT_COMPILER, lex.ts, lex.te); }; *|; write exec; }%% - token.Value = lex.data[lex.ts:lex.te] - token.ID = tok - lex.addHiddenToken(token, tok, lex.ts, lex.te); + tkn.Value = lex.data[lex.ts:lex.te] + tkn.ID = token.ID(tok) + tkn.SkippedString = lex.data[lex.sts:lex.ste] + lex.addSkippedToken(tkn, tok, lex.ts, lex.te); - return token + return tkn } \ No newline at end of file diff --git a/internal/scanner/scanner_test.go b/internal/scanner/scanner_test.go index 530f972..465c40a 100644 --- a/internal/scanner/scanner_test.go +++ b/internal/scanner/scanner_test.go @@ -1,12 +1,12 @@ package scanner import ( - "github.com/z7zmey/php-parser/pkg/errors" - "github.com/z7zmey/php-parser/pkg/position" + "gotest.tools/assert" "testing" + "github.com/z7zmey/php-parser/pkg/errors" + "github.com/z7zmey/php-parser/pkg/position" "github.com/z7zmey/php-parser/pkg/token" - "gotest.tools/assert" ) func TestTokens(t *testing.T) { @@ -186,175 +186,174 @@ func TestTokens(t *testing.T) { ` expected := []string{ - T_INLINE_HTML.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), - T_ECHO.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), + token.T_INLINE_HTML.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), + token.T_ECHO.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), - T_ABSTRACT.String(), - T_ARRAY.String(), - T_AS.String(), - T_BREAK.String(), - T_CALLABLE.String(), - T_CASE.String(), - T_CATCH.String(), - T_CLASS.String(), - T_CLONE.String(), - T_CONST.String(), - T_CONTINUE.String(), - T_DECLARE.String(), - T_DEFAULT.String(), - T_DO.String(), - T_ECHO.String(), - T_ELSE.String(), - T_ELSEIF.String(), - T_EMPTY.String(), - T_ENDDECLARE.String(), - T_ENDFOR.String(), - T_ENDFOREACH.String(), - T_ENDIF.String(), - T_ENDSWITCH.String(), - T_ENDWHILE.String(), - T_EVAL.String(), - T_EXIT.String(), - T_EXTENDS.String(), - T_FINAL.String(), - T_FINALLY.String(), - T_FOR.String(), - T_FOREACH.String(), - T_FUNCTION.String(), - T_FUNCTION.String(), - T_GLOBAL.String(), - T_GOTO.String(), - T_IF.String(), - T_ISSET.String(), - T_IMPLEMENTS.String(), - T_INSTANCEOF.String(), - T_INSTEADOF.String(), - T_INTERFACE.String(), - T_LIST.String(), - T_NAMESPACE.String(), - T_PRIVATE.String(), - T_PUBLIC.String(), - T_PRINT.String(), - T_PROTECTED.String(), - T_RETURN.String(), - T_STATIC.String(), - T_SWITCH.String(), - T_THROW.String(), - T_TRAIT.String(), - T_TRY.String(), - T_UNSET.String(), - T_USE.String(), - T_VAR.String(), - T_WHILE.String(), - T_YIELD_FROM.String(), - T_YIELD.String(), - T_INCLUDE.String(), - T_INCLUDE_ONCE.String(), - T_REQUIRE.String(), - T_REQUIRE_ONCE.String(), + token.T_ABSTRACT.String(), + token.T_ARRAY.String(), + token.T_AS.String(), + token.T_BREAK.String(), + token.T_CALLABLE.String(), + token.T_CASE.String(), + token.T_CATCH.String(), + token.T_CLASS.String(), + token.T_CLONE.String(), + token.T_CONST.String(), + token.T_CONTINUE.String(), + token.T_DECLARE.String(), + token.T_DEFAULT.String(), + token.T_DO.String(), + token.T_ECHO.String(), + token.T_ELSE.String(), + token.T_ELSEIF.String(), + token.T_EMPTY.String(), + token.T_ENDDECLARE.String(), + token.T_ENDFOR.String(), + token.T_ENDFOREACH.String(), + token.T_ENDIF.String(), + token.T_ENDSWITCH.String(), + token.T_ENDWHILE.String(), + token.T_EVAL.String(), + token.T_EXIT.String(), + token.T_EXTENDS.String(), + token.T_FINAL.String(), + token.T_FINALLY.String(), + token.T_FOR.String(), + token.T_FOREACH.String(), + token.T_FUNCTION.String(), + token.T_FUNCTION.String(), + token.T_GLOBAL.String(), + token.T_GOTO.String(), + token.T_IF.String(), + token.T_ISSET.String(), + token.T_IMPLEMENTS.String(), + token.T_INSTANCEOF.String(), + token.T_INSTEADOF.String(), + token.T_INTERFACE.String(), + token.T_LIST.String(), + token.T_NAMESPACE.String(), + token.T_PRIVATE.String(), + token.T_PUBLIC.String(), + token.T_PRINT.String(), + token.T_PROTECTED.String(), + token.T_RETURN.String(), + token.T_STATIC.String(), + token.T_SWITCH.String(), + token.T_THROW.String(), + token.T_TRAIT.String(), + token.T_TRY.String(), + token.T_UNSET.String(), + token.T_USE.String(), + token.T_VAR.String(), + token.T_WHILE.String(), + token.T_YIELD_FROM.String(), + token.T_YIELD.String(), + token.T_INCLUDE.String(), + token.T_INCLUDE_ONCE.String(), + token.T_REQUIRE.String(), + token.T_REQUIRE_ONCE.String(), - T_CLASS_C.String(), - T_DIR.String(), - T_FILE.String(), - T_FUNC_C.String(), - T_LINE.String(), - T_NS_C.String(), - T_METHOD_C.String(), - T_TRAIT_C.String(), - T_HALT_COMPILER.String(), + token.T_CLASS_C.String(), + token.T_DIR.String(), + token.T_FILE.String(), + token.T_FUNC_C.String(), + token.T_LINE.String(), + token.T_NS_C.String(), + token.T_METHOD_C.String(), + token.T_TRAIT_C.String(), + token.T_HALT_COMPILER.String(), - T_NEW.String(), - T_LOGICAL_AND.String(), - T_LOGICAL_OR.String(), - T_LOGICAL_XOR.String(), + token.T_NEW.String(), + token.T_LOGICAL_AND.String(), + token.T_LOGICAL_OR.String(), + token.T_LOGICAL_XOR.String(), - T_NS_SEPARATOR.String(), - T_ELLIPSIS.String(), - T_PAAMAYIM_NEKUDOTAYIM.String(), - T_BOOLEAN_AND.String(), - T_BOOLEAN_OR.String(), - T_AND_EQUAL.String(), - T_OR_EQUAL.String(), - T_CONCAT_EQUAL.String(), - T_MUL_EQUAL.String(), - T_POW_EQUAL.String(), - T_DIV_EQUAL.String(), - T_PLUS_EQUAL.String(), - T_MINUS_EQUAL.String(), - T_XOR_EQUAL.String(), - T_MOD_EQUAL.String(), - T_DEC.String(), - T_INC.String(), - T_DOUBLE_ARROW.String(), - T_SPACESHIP.String(), - T_IS_NOT_EQUAL.String(), - T_IS_NOT_EQUAL.String(), - T_IS_NOT_IDENTICAL.String(), - T_IS_EQUAL.String(), - T_IS_IDENTICAL.String(), - T_SL_EQUAL.String(), - T_SR_EQUAL.String(), - T_IS_GREATER_OR_EQUAL.String(), - T_IS_SMALLER_OR_EQUAL.String(), - T_POW.String(), - T_SL.String(), - T_SR.String(), - T_COALESCE.String(), + token.T_NS_SEPARATOR.String(), + token.T_ELLIPSIS.String(), + token.T_PAAMAYIM_NEKUDOTAYIM.String(), + token.T_BOOLEAN_AND.String(), + token.T_BOOLEAN_OR.String(), + token.T_AND_EQUAL.String(), + token.T_OR_EQUAL.String(), + token.T_CONCAT_EQUAL.String(), + token.T_MUL_EQUAL.String(), + token.T_POW_EQUAL.String(), + token.T_DIV_EQUAL.String(), + token.T_PLUS_EQUAL.String(), + token.T_MINUS_EQUAL.String(), + token.T_XOR_EQUAL.String(), + token.T_MOD_EQUAL.String(), + token.T_DEC.String(), + token.T_INC.String(), + token.T_DOUBLE_ARROW.String(), + token.T_SPACESHIP.String(), + token.T_IS_NOT_EQUAL.String(), + token.T_IS_NOT_EQUAL.String(), + token.T_IS_NOT_IDENTICAL.String(), + token.T_IS_EQUAL.String(), + token.T_IS_IDENTICAL.String(), + token.T_SL_EQUAL.String(), + token.T_SR_EQUAL.String(), + token.T_IS_GREATER_OR_EQUAL.String(), + token.T_IS_SMALLER_OR_EQUAL.String(), + token.T_POW.String(), + token.T_SL.String(), + token.T_SR.String(), + token.T_COALESCE.String(), - TokenID(int(';')).String(), - TokenID(int(':')).String(), - TokenID(int(',')).String(), - TokenID(int('.')).String(), - TokenID(int('[')).String(), - TokenID(int(']')).String(), - TokenID(int('(')).String(), - TokenID(int(')')).String(), - TokenID(int('|')).String(), - TokenID(int('/')).String(), - TokenID(int('^')).String(), - TokenID(int('&')).String(), - TokenID(int('+')).String(), - TokenID(int('-')).String(), - TokenID(int('*')).String(), - TokenID(int('=')).String(), - TokenID(int('%')).String(), - TokenID(int('!')).String(), - TokenID(int('~')).String(), - TokenID(int('$')).String(), - TokenID(int('<')).String(), - TokenID(int('>')).String(), - TokenID(int('?')).String(), - TokenID(int('@')).String(), - TokenID(int('{')).String(), - TokenID(int('}')).String(), + token.ID(int(';')).String(), + token.ID(int(':')).String(), + token.ID(int(',')).String(), + token.ID(int('.')).String(), + token.ID(int('[')).String(), + token.ID(int(']')).String(), + token.ID(int('(')).String(), + token.ID(int(')')).String(), + token.ID(int('|')).String(), + token.ID(int('/')).String(), + token.ID(int('^')).String(), + token.ID(int('&')).String(), + token.ID(int('+')).String(), + token.ID(int('-')).String(), + token.ID(int('*')).String(), + token.ID(int('=')).String(), + token.ID(int('%')).String(), + token.ID(int('!')).String(), + token.ID(int('~')).String(), + token.ID(int('$')).String(), + token.ID(int('<')).String(), + token.ID(int('>')).String(), + token.ID(int('?')).String(), + token.ID(int('@')).String(), + token.ID(int('{')).String(), + token.ID(int('}')).String(), - T_VARIABLE.String(), - T_STRING.String(), + token.T_VARIABLE.String(), + token.T_STRING.String(), - T_OBJECT_OPERATOR.String(), - T_OBJECT_OPERATOR.String(), - T_STRING.String(), + token.T_OBJECT_OPERATOR.String(), + token.T_OBJECT_OPERATOR.String(), + token.T_STRING.String(), - T_ARRAY_CAST.String(), - T_BOOL_CAST.String(), - T_BOOL_CAST.String(), - T_DOUBLE_CAST.String(), - T_DOUBLE_CAST.String(), - T_DOUBLE_CAST.String(), - T_INT_CAST.String(), - T_INT_CAST.String(), - T_OBJECT_CAST.String(), - T_STRING_CAST.String(), - T_STRING_CAST.String(), - T_UNSET_CAST.String(), + token.T_ARRAY_CAST.String(), + token.T_BOOL_CAST.String(), + token.T_BOOL_CAST.String(), + token.T_DOUBLE_CAST.String(), + token.T_DOUBLE_CAST.String(), + token.T_DOUBLE_CAST.String(), + token.T_INT_CAST.String(), + token.T_INT_CAST.String(), + token.T_OBJECT_CAST.String(), + token.T_STRING_CAST.String(), + token.T_STRING_CAST.String(), + token.T_UNSET_CAST.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -381,15 +380,14 @@ func TestShebang(t *testing.T) { "\n", } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} tkn := lexer.Lex() - assert.Equal(t, tkn.ID, T_DNUMBER) + assert.Equal(t, tkn.ID, token.T_DNUMBER) - l := len(tkn.Tokens) - for _, tt := range tkn.Tokens[:l-1] { + l := len(tkn.SkippedTokens) + for _, tt := range tkn.SkippedTokens[:l-1] { actual = append(actual, string(tt.Value)) } @@ -402,15 +400,14 @@ func TestShebangHtml(t *testing.T) { 0.1 ` - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) tkn := lexer.Lex() - assert.Equal(t, tkn.ID, T_INLINE_HTML) - assert.Equal(t, string(tkn.Tokens[0].Value), "#!/usr/bin/env php\n") + assert.Equal(t, tkn.ID, token.T_INLINE_HTML) + assert.Equal(t, string(tkn.SkippedTokens[0].Value), "#!/usr/bin/env php\n") tkn = lexer.Lex() - assert.Equal(t, tkn.ID, T_DNUMBER) + assert.Equal(t, tkn.ID, token.T_DNUMBER) } func TestNumberTokens(t *testing.T) { @@ -434,26 +431,25 @@ func TestNumberTokens(t *testing.T) { ` expected := []string{ - T_DNUMBER.String(), - T_DNUMBER.String(), - T_DNUMBER.String(), - T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), - T_LNUMBER.String(), - T_DNUMBER.String(), + token.T_LNUMBER.String(), + token.T_DNUMBER.String(), - T_LNUMBER.String(), - T_DNUMBER.String(), + token.T_LNUMBER.String(), + token.T_DNUMBER.String(), - T_LNUMBER.String(), - T_LNUMBER.String(), + token.T_LNUMBER.String(), + token.T_LNUMBER.String(), - T_DNUMBER.String(), - T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -490,27 +486,26 @@ func TestConstantStrings(t *testing.T) { ` expected := []string{ - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -547,16 +542,16 @@ func TestSingleQuoteStringTokens(t *testing.T) { ` expected := []string{ - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -591,67 +586,66 @@ func TestTeplateStringTokens(t *testing.T) { ` expected := []string{ - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_DOLLAR_OPEN_CURLY_BRACES.String(), - T_STRING_VARNAME.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_DOLLAR_OPEN_CURLY_BRACES.String(), + token.T_STRING_VARNAME.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -682,67 +676,66 @@ func TestBackquoteStringTokens(t *testing.T) { ` expected := []string{ - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_DOLLAR_OPEN_CURLY_BRACES.String(), - T_STRING_VARNAME.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_DOLLAR_OPEN_CURLY_BRACES.String(), + token.T_STRING_VARNAME.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -782,61 +775,60 @@ CAT; ` expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_OBJECT_OPERATOR.String(), - T_STRING.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_NUM_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_NUM_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_NUM_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_VARIABLE.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_DOLLAR_OPEN_CURLY_BRACES.String(), - T_STRING_VARNAME.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_OBJECT_OPERATOR.String(), + token.T_STRING.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_NUM_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_NUM_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_NUM_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_VARIABLE.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_DOLLAR_OPEN_CURLY_BRACES.String(), + token.T_STRING_VARNAME.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -875,41 +867,40 @@ CAT ` expected := []string{ - T_START_HEREDOC.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_VARIABLE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), + token.T_START_HEREDOC.String(), + token.T_VARIABLE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -934,21 +925,20 @@ CAT; expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_CONSTANT_ENCAPSED_STRING.String(), - TokenID(int(']')).String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.ID(int(']')).String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -971,15 +961,14 @@ func TestHereDocTokens73(t *testing.T) { expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(',')).String(), - T_VARIABLE.String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(',')).String(), + token.T_VARIABLE.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -1002,15 +991,14 @@ CAT;` expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) + lexer := NewLexer([]byte(src), "7.4", nil) lexer.phpVersion = "7.2" - lexer.withTokens = true actual := []string{} for { @@ -1032,17 +1020,16 @@ func TestInlineHtmlNopTokens(t *testing.T) { ` expected := []string{ - T_VARIABLE.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), + token.T_VARIABLE.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), - T_VARIABLE.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), + token.T_VARIABLE.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -1061,11 +1048,11 @@ func TestStringTokensAfterVariable(t *testing.T) { src := ` test` - expected := []token.Token{ + expected := []*token.Token{ { ID: token.T_OPEN_TAG, Value: []byte(" bar ( '' ) ;` - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) - expected := []token.Token{ + expected := []*token.Token{ { ID: token.T_OPEN_TAG, Value: []byte("= TokenID(len(_TokenID_index)-1) { - return "TokenID(" + strconv.FormatInt(int64(i+57346), 10) + ")" - } - return _TokenID_name[_TokenID_index[i]:_TokenID_index[i+1]] -} diff --git a/pkg/ast/node.go b/pkg/ast/node.go index 81f6ef4..85b494a 100644 --- a/pkg/ast/node.go +++ b/pkg/ast/node.go @@ -16,6 +16,10 @@ func (n *Node) GetNode() *Node { return n } +func (n *Node) GetPosition() *position.Position { + return n.Position +} + // Root node type Root struct { Node diff --git a/pkg/ast/visitor/dump.go b/pkg/ast/visitor/dump.go index 312b888..67bbaac 100644 --- a/pkg/ast/visitor/dump.go +++ b/pkg/ast/visitor/dump.go @@ -121,7 +121,7 @@ func (v *Dump) printNode(n *ast.Node) { key := token.Position(k) v.printIndent(v.indent + 2) - v.print("token." + key.String() + ": []token.Token{\n") + v.print("token." + key.String() + ": []*token.Token{\n") for _, tkn := range n.Tokens[key] { v.printIndent(v.indent + 3) diff --git a/pkg/ast/visitor/dump_test.go b/pkg/ast/visitor/dump_test.go index cc9a387..0ccdc27 100644 --- a/pkg/ast/visitor/dump_test.go +++ b/pkg/ast/visitor/dump_test.go @@ -13,7 +13,7 @@ func ExampleDump() { stxTree := &ast.Root{ Node: ast.Node{ Tokens: token.Collection{ - token.Start: []token.Token{ + token.Start: []*token.Token{ { ID: token.T_WHITESPACE, Value: []byte(" "), @@ -44,7 +44,7 @@ func ExampleDump() { //&ast.Root{ // Node: ast.Node{ // Tokens: token.Collection{ - // token.Start: []token.Token{ + // token.Start: []*token.Token{ // { // ID: token.T_WHITESPACE, // Value: []byte(" "), diff --git a/pkg/ast/visitor/filter_tokens.go b/pkg/ast/visitor/filter_tokens.go new file mode 100644 index 0000000..c69a3f6 --- /dev/null +++ b/pkg/ast/visitor/filter_tokens.go @@ -0,0 +1,14 @@ +package visitor + +import ( + "github.com/z7zmey/php-parser/pkg/ast" +) + +type FilterTokens struct { + Null +} + +func (v *FilterTokens) EnterNode(n ast.Vertex) bool { + n.GetNode().Tokens = nil + return true +} diff --git a/pkg/parser/parser.go b/pkg/parser/parser.go index 67974fd..a50d09e 100644 --- a/pkg/parser/parser.go +++ b/pkg/parser/parser.go @@ -29,7 +29,7 @@ func Parse(src []byte, ver string, cfg Config) (ast.Vertex, error) { return nil, err } - lexer := scanner.NewLexer(src, ver, cfg.WithTokens, cfg.ErrorHandlerFunc) + lexer := scanner.NewLexer(src, ver, cfg.ErrorHandlerFunc) if r == -1 { parser = php5.NewParser(lexer, cfg.ErrorHandlerFunc) diff --git a/pkg/position/pool.go b/pkg/position/pool.go new file mode 100644 index 0000000..ad26891 --- /dev/null +++ b/pkg/position/pool.go @@ -0,0 +1,29 @@ +package position + +const DefaultBlockSize = 1024 + +type Pool struct { + block []Position + off int +} + +func NewPool(blockSize int) *Pool { + return &Pool{ + block: make([]Position, blockSize), + } +} + +func (p *Pool) Get() *Position { + if len(p.block) == 0 { + return nil + } + + if len(p.block) == p.off { + p.block = make([]Position, len(p.block)) + p.off = 0 + } + + p.off++ + + return &p.block[p.off-1] +} diff --git a/pkg/printer/printer_parsed_php5_test.go b/pkg/printer/printer_parsed_php5_test.go index 24b5acd..2745542 100644 --- a/pkg/printer/printer_parsed_php5_test.go +++ b/pkg/printer/printer_parsed_php5_test.go @@ -12,7 +12,7 @@ import ( ) func parsePhp5(src string) ast.Vertex { - lexer := scanner.NewLexer([]byte(src), "5.6", true, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() diff --git a/pkg/printer/printer_parsed_php7_test.go b/pkg/printer/printer_parsed_php7_test.go index 317ea02..5978f5d 100644 --- a/pkg/printer/printer_parsed_php7_test.go +++ b/pkg/printer/printer_parsed_php7_test.go @@ -29,7 +29,7 @@ abstract class Bar extends Baz // parse - lexer := scanner.NewLexer([]byte(src), "7.4", true, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() @@ -61,7 +61,7 @@ abstract class Bar extends Baz } func parse(src string) ast.Vertex { - lexer := scanner.NewLexer([]byte(src), "7.4", true, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() diff --git a/pkg/printer/printer_test.go b/pkg/printer/printer_test.go index 8e85ac0..8e6d4e0 100644 --- a/pkg/printer/printer_test.go +++ b/pkg/printer/printer_test.go @@ -75,7 +75,7 @@ func TestPrinterPrintFileInlineHtml(t *testing.T) { Expr: &ast.ExprVariable{ Node: ast.Node{ Tokens: token.Collection{ - token.Start: []token.Token{ + token.Start: []*token.Token{ { ID: token.ID('$'), Value: []byte("$"), @@ -93,7 +93,7 @@ func TestPrinterPrintFileInlineHtml(t *testing.T) { Expr: &ast.ExprVariable{ Node: ast.Node{ Tokens: token.Collection{ - token.Start: []token.Token{ + token.Start: []*token.Token{ { ID: token.ID('$'), Value: []byte("$"), diff --git a/pkg/token/pool.go b/pkg/token/pool.go new file mode 100644 index 0000000..02c88ee --- /dev/null +++ b/pkg/token/pool.go @@ -0,0 +1,29 @@ +package token + +const DefaultBlockSize = 1024 + +type Pool struct { + block []Token + off int +} + +func NewPool(blockSize int) *Pool { + return &Pool{ + block: make([]Token, blockSize), + } +} + +func (p *Pool) Get() *Token { + if len(p.block) == 0 { + return nil + } + + if len(p.block) == p.off { + p.block = make([]Token, len(p.block)) + p.off = 0 + } + + p.off++ + + return &p.block[p.off-1] +} diff --git a/pkg/token/pool_bench_test.go b/pkg/token/pool_bench_test.go new file mode 100644 index 0000000..f6b3faf --- /dev/null +++ b/pkg/token/pool_bench_test.go @@ -0,0 +1,173 @@ +package token + +import ( + "testing" +) + +const amount = 100000 + +func BenchmarkPlain(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, &Token{}) + } + } +} + +func BenchmarkSlice128(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 128) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkSlice512(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 512) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkSlice1024(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 1024) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkSlice2048(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 2048) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend128(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 128) + + for i := 0; i < amount; i++ { + if len(slc) == 128 { + slc = make([]Token, 0, 128) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend512(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 512) + + for i := 0; i < amount; i++ { + if len(slc) == 512 { + slc = make([]Token, 0, 512) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend1024(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 1024) + + for i := 0; i < amount; i++ { + if len(slc) == 1024 { + slc = make([]Token, 0, 1024) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend2048(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 2048) + + for i := 0; i < amount; i++ { + if len(slc) == 2048 { + slc = make([]Token, 0, 2048) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkPool128(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(128) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} + +func BenchmarkPool512(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(512) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} + +func BenchmarkPool1024(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(1024) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} + +func BenchmarkPool2048(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(2048) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} diff --git a/pkg/token/position.go b/pkg/token/position.go index 2ea40d4..cd76285 100644 --- a/pkg/token/position.go +++ b/pkg/token/position.go @@ -62,7 +62,7 @@ const ( CloseParenthesisToken ) -type Collection map[Position][]Token +type Collection map[Position][]*Token func (c Collection) IsEmpty() bool { for _, v := range c { diff --git a/pkg/token/token.go b/pkg/token/token.go index c1fa07e..b36c85f 100644 --- a/pkg/token/token.go +++ b/pkg/token/token.go @@ -1,5 +1,7 @@ package token +import "github.com/z7zmey/php-parser/pkg/position" + //go:generate stringer -type=ID -output ./token_string.go type ID int @@ -145,6 +147,13 @@ const ( ) type Token struct { - ID ID - Value []byte + ID ID + Value []byte + Position *position.Position + SkippedTokens []*Token + Skipped []byte +} + +func (t *Token) GetPosition() *position.Position { + return t.Position }