diff --git a/internal/php5/parser.go b/internal/php5/parser.go index 1fe5085..5b06343 100644 --- a/internal/php5/parser.go +++ b/internal/php5/parser.go @@ -12,7 +12,7 @@ import ( // Parser structure type Parser struct { Lexer *scanner.Lexer - currentToken *scanner.Token + currentToken *token.Token rootNode ast.Vertex errHandlerFunc func(*errors.Error) } @@ -40,8 +40,7 @@ func (p *Parser) Error(msg string) { return } - var pos = p.currentToken.Position - p.errHandlerFunc(errors.NewError(msg, &pos)) + p.errHandlerFunc(errors.NewError(msg, p.currentToken.Position)) } // Parse the php7 Parser entrypoint @@ -82,7 +81,7 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) { delete(src.GetNode().Tokens, token.Start) } -func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -98,7 +97,7 @@ func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []to } } -func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -108,14 +107,14 @@ func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, token *dstCollection = make(token.Collection) } - (*dstCollection)[pos] = make([]token.Token, 0) + (*dstCollection)[pos] = make([]*token.Token, 0) for _, v := range tokens { (*dstCollection)[pos] = append((*dstCollection)[pos], v) } } -func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -141,7 +140,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. } if semiColon[0].Value[0] == ';' { - p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{ + p.setFreeFloatingTokens(prevNode, token.SemiColon, []*token.Token{ { ID: token.ID(';'), Value: semiColon[0].Value[0:1], @@ -155,28 +154,18 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. tlen = 3 } - phpCloseTag := []token.Token{} + phpCloseTag := []*token.Token{} if vlen-tlen > 1 { - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: token.T_WHITESPACE, Value: semiColon[0].Value[1 : vlen-tlen], }) } - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: T_CLOSE_TAG, Value: semiColon[0].Value[vlen-tlen:], }) p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...)) } - -func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) { - for i := 1; i < len(yyDollar); i++ { - if yyDollar[i].token != nil { - p.Lexer.ReturnTokenToPool(yyDollar[i].token) - } - yyDollar[i].token = nil - } - yyVAL.token = nil -} diff --git a/internal/php5/parser_test.go b/internal/php5/parser_test.go index a33aa36..bc6d43c 100644 --- a/internal/php5/parser_test.go +++ b/internal/php5/parser_test.go @@ -60,11 +60,12 @@ func TestIdentifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -844,11 +845,12 @@ func TestPhp5ArgumentNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1658,11 +1660,12 @@ func TestPhp5ParameterNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1681,11 +1684,12 @@ func TestCommentEndFile(t *testing.T) { Stmts: []ast.Vertex{}, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1760,11 +1764,12 @@ func TestName(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1837,11 +1842,12 @@ func TestFullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1914,11 +1920,12 @@ func TestRelative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1994,11 +2001,12 @@ func TestScalarEncapsed_SimpleVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2072,11 +2080,12 @@ func TestScalarEncapsed_SimpleVarOneChar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2161,11 +2170,12 @@ func TestScalarEncapsed_SimpleVarEndsEcapsed(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2260,11 +2270,12 @@ func TestScalarEncapsed_StringVarCurveOpen(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2370,11 +2381,12 @@ func TestScalarEncapsed_SimpleVarPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2448,11 +2460,12 @@ func TestScalarEncapsed_DollarOpenCurlyBraces(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2547,11 +2560,12 @@ func TestScalarEncapsed_DollarOpenCurlyBracesDimNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2656,11 +2670,12 @@ func TestScalarEncapsed_CurlyOpenMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2749,11 +2764,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2842,11 +2858,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2903,11 +2920,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2950,11 +2968,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3011,11 +3030,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3057,11 +3077,12 @@ func TestScalarMagicConstant(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3102,11 +3123,12 @@ func TestScalarNumber_LNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3147,11 +3169,12 @@ func TestScalarNumber_DNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3192,11 +3215,12 @@ func TestScalarNumber_Float(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3237,11 +3261,12 @@ func TestScalarNumber_BinaryLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3282,11 +3307,12 @@ func TestScalarNumber_BinaryDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3327,11 +3353,12 @@ func TestScalarNumber_HLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3372,11 +3399,12 @@ func TestScalarNumber_HDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3417,11 +3445,12 @@ func TestScalarString_DoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3462,11 +3491,12 @@ func TestScalarString_DoubleQuotedScalarStringWithEscapedVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3509,11 +3539,12 @@ func TestScalarString_MultilineDoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3554,11 +3585,12 @@ func TestScalarString_SingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3601,11 +3633,12 @@ func TestScalarString_MultilineSingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3672,11 +3705,12 @@ func TestStmtAltIf_AltIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3786,11 +3820,12 @@ func TestStmtAltIf_AltElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3877,11 +3912,12 @@ func TestStmtAltIf_AltElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4056,11 +4092,12 @@ func TestStmtAltIf_AltElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4179,11 +4216,12 @@ func TestStmtClassConstList_WithoutModifiers(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4259,11 +4297,12 @@ func TestStmtClassMethod_SimpleClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4409,11 +4448,12 @@ func TestStmtClassMethod_PrivateProtectedClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4513,11 +4553,12 @@ func TestStmtClassMethod_Php5ClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4629,11 +4670,12 @@ func TestStmtClassMethod_AbstractClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4675,11 +4717,12 @@ func TestStmtClass_SimpleClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4734,11 +4777,12 @@ func TestStmtClass_AbstractClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4826,11 +4870,12 @@ func TestStmtClass_ClassExtends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4920,11 +4965,12 @@ func TestStmtClass_ClassImplement(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5037,11 +5083,12 @@ func TestStmtClass_ClassImplements(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5137,11 +5184,12 @@ func TestStmtConstList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5204,11 +5252,12 @@ func TestStmtContinue_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5282,11 +5331,12 @@ func TestStmtContinue_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5360,11 +5410,12 @@ func TestStmtContinue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5438,11 +5489,12 @@ func TestStmtDeclare(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5549,11 +5601,12 @@ func TestStmtDeclare_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5629,11 +5682,12 @@ func TestStmtDeclare_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5685,11 +5739,12 @@ func TestStmtDo(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5753,11 +5808,12 @@ func TestStmtEcho(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5810,11 +5866,12 @@ func TestStmtEcho_Parenthesis(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5855,11 +5912,12 @@ func TestStmtExpression(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6052,11 +6110,12 @@ func TestStmtFor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6174,11 +6233,12 @@ func TestStmtFor_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6261,11 +6321,12 @@ func TestStmtForeach(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6338,11 +6399,12 @@ func TestStmtForeach_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6425,11 +6487,12 @@ func TestStmtForeach_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6533,11 +6596,12 @@ func TestStmtForeach_WithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6631,11 +6695,12 @@ func TestStmtForeach_ExprWithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6749,11 +6814,12 @@ func TestStmtForeach_WithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6879,11 +6945,12 @@ func TestStmtForeach_WithList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6926,11 +6993,12 @@ func TestStmtFunction(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6984,11 +7052,12 @@ func TestStmtFunction_Return(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7149,11 +7218,12 @@ func TestStmtFunction_ReturnVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7218,11 +7288,12 @@ func TestStmtFunction_Ref(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7275,11 +7346,12 @@ func TestStmtGlobal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7437,11 +7509,12 @@ func TestStmtGlobal_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7503,11 +7576,12 @@ func TestStmtGotoLabel(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7537,11 +7611,12 @@ func TestStmtHaltCompiler(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7603,11 +7678,12 @@ func TestStmtIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7713,11 +7789,12 @@ func TestStmtIf_ElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7800,11 +7877,12 @@ func TestStmtIf_Else(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7973,11 +8051,12 @@ func TestStmtIf_ElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8156,11 +8235,12 @@ func TestStmtIf_ElseIfElseIfElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8201,11 +8281,12 @@ func TestStmtInlineHtml(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8247,11 +8328,12 @@ func TestStmtInterface(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8328,11 +8410,12 @@ func TestStmtInterface_Extend(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8432,11 +8515,12 @@ func TestStmtInterface_Extends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8489,11 +8573,12 @@ func TestStmtNamespace(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8547,11 +8632,12 @@ func TestStmtNamespace_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8582,11 +8668,12 @@ func TestStmtNamespace_Anonymous(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8685,11 +8772,12 @@ func TestStmtProperty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8841,11 +8929,12 @@ func TestStmtProperty_Properties(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8997,11 +9086,12 @@ func TestStmtProperty_Properties2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9064,11 +9154,12 @@ func TestStmtStaticVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9173,11 +9264,12 @@ func TestStmtStaticVar_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9282,11 +9374,12 @@ func TestStmtStaticVar_Vars2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9410,11 +9503,12 @@ func TestStmtSwitch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9538,11 +9632,12 @@ func TestStmtSwitch_Semicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9656,11 +9751,12 @@ func TestStmtSwitch_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9762,11 +9858,12 @@ func TestStmtSwitch_AltSemicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9817,11 +9914,12 @@ func TestStmtThrow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9863,11 +9961,12 @@ func TestStmtTrait(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9955,11 +10054,12 @@ func TestStmtTraitUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10070,11 +10170,12 @@ func TestStmtTraitUse_Uses(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10185,11 +10286,12 @@ func TestStmtTraitUse_EmptyAdaptations(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10344,11 +10446,12 @@ func TestStmtTraitUse_Modifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10514,11 +10617,12 @@ func TestStmtTraitUse_AliasModifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10798,11 +10902,12 @@ func TestStmtTraitUse_Adaptions(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10836,11 +10941,12 @@ func TestStmtTry_Try(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10932,11 +11038,12 @@ func TestStmtTry_TryCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11085,11 +11192,12 @@ func TestStmtTry_TryCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11192,11 +11300,12 @@ func TestStmtTry_TryCatchFinally(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11400,11 +11509,12 @@ func TestStmtTry_TryCatchCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11457,11 +11567,12 @@ func TestStmtUnset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11535,11 +11646,12 @@ func TestStmtUnset_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11614,11 +11726,12 @@ func TestStmtUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11693,11 +11806,12 @@ func TestStmtUse_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11783,11 +11897,12 @@ func TestStmtUse_FullyQualifiedAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11895,11 +12010,12 @@ func TestStmtUse_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12018,11 +12134,12 @@ func TestStmtUse_ListAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12151,11 +12268,12 @@ func TestStmtUse_ListFunctionType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12306,11 +12424,12 @@ func TestStmtUse_ListFunctionTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12440,11 +12559,12 @@ func TestStmtUse_ListConstType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12595,11 +12715,12 @@ func TestStmtUse_ListConstTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12662,11 +12783,12 @@ func TestStmtBreak_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12740,11 +12862,12 @@ func TestStmtBreak_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12818,11 +12941,12 @@ func TestStmtBreak(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12896,11 +13020,12 @@ func TestExprArrayDimFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12993,11 +13118,12 @@ func TestExprArrayDimFetch_Nested(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13038,11 +13164,12 @@ func TestExprArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13105,11 +13232,12 @@ func TestExprArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13225,11 +13353,12 @@ func TestExprArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13290,11 +13419,12 @@ func TestExprBitwiseNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13355,11 +13485,12 @@ func TestExprBooleanNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13433,11 +13564,12 @@ func TestExprClassConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13499,11 +13631,12 @@ func TestExprClassConstFetch_Static(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13564,11 +13697,12 @@ func TestExprClone_Brackets(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13629,11 +13763,12 @@ func TestExprClone(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13676,11 +13811,12 @@ func TestExprClosure(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13851,11 +13987,12 @@ func TestExprClosure_Use(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14026,11 +14163,12 @@ func TestExprClosure_Use2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14093,11 +14231,12 @@ func TestExprConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14160,11 +14299,12 @@ func TestExprConstFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14227,11 +14367,12 @@ func TestExprConstFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14292,11 +14433,12 @@ func TestExprEmpty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14357,11 +14499,12 @@ func TestExprErrorSuppress(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14422,11 +14565,12 @@ func TestExprEval(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14467,11 +14611,12 @@ func TestExprExit(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14512,11 +14657,12 @@ func TestExprExit_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14578,11 +14724,12 @@ func TestExprExit_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14623,11 +14770,12 @@ func TestExprDie(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14668,11 +14816,12 @@ func TestExprDie_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14734,11 +14883,12 @@ func TestExprDie_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14811,11 +14961,12 @@ func TestExprFunctionCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14888,11 +15039,12 @@ func TestExprFunctionCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14990,11 +15142,12 @@ func TestExprFunctionCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15110,11 +15263,12 @@ func TestExprFunctionCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15243,11 +15397,12 @@ func TestExprFunctionCall_ExprArg(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15308,11 +15463,12 @@ func TestExprPostDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15373,11 +15529,12 @@ func TestExprPostInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15438,11 +15595,12 @@ func TestExprPreDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15503,11 +15661,12 @@ func TestExprPreInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15568,11 +15727,12 @@ func TestExprInclude(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15633,11 +15793,12 @@ func TestExprInclude_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15698,11 +15859,12 @@ func TestExprRequire(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15763,11 +15925,12 @@ func TestExprRequire_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15851,11 +16014,12 @@ func TestExprInstanceOf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15939,11 +16103,12 @@ func TestExprInstanceOf_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16027,11 +16192,12 @@ func TestExprInstanceOf_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16094,11 +16260,12 @@ func TestExprIsset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16182,11 +16349,12 @@ func TestExprIsset_Variables(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16258,11 +16426,12 @@ func TestExprList_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16366,11 +16535,12 @@ func TestExprList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16484,11 +16654,12 @@ func TestExprList_ArrayIndex(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16614,11 +16785,12 @@ func TestExprList_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16723,11 +16895,12 @@ func TestExprList_EmptyItem(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16834,11 +17007,12 @@ func TestExprList_EmptyItems(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16920,11 +17094,12 @@ func TestExprMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16987,11 +17162,12 @@ func TestExprNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17064,11 +17240,12 @@ func TestExprNew_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17141,11 +17318,12 @@ func TestExprNew_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17206,11 +17384,12 @@ func TestExprPrint(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17282,11 +17461,12 @@ func TestExprPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17401,11 +17581,12 @@ func TestExprReference_ForeachWithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17479,11 +17660,12 @@ func TestExprShellExec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17524,11 +17706,12 @@ func TestExprShortArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17591,11 +17774,12 @@ func TestExprShortArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17711,11 +17895,12 @@ func TestExprShortArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17799,11 +17984,12 @@ func TestExprStaticCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17887,11 +18073,12 @@ func TestExprStaticCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17975,11 +18162,12 @@ func TestExprStaticCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18073,11 +18261,12 @@ func TestExprStaticCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18169,11 +18358,12 @@ func TestExprStaticCall_VarVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18257,11 +18447,12 @@ func TestExprStaticPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18345,11 +18536,12 @@ func TestExprStaticPropertyFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18433,11 +18625,12 @@ func TestExprStaticPropertyFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18540,11 +18733,12 @@ func TestExprTernary(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18626,11 +18820,12 @@ func TestExprTernary_Simple(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18785,11 +18980,12 @@ func TestExprTernary_NestedTrue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18944,11 +19140,12 @@ func TestExprTernary_NestedCond(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19009,11 +19206,12 @@ func TestExprUnaryMinus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19074,11 +19272,12 @@ func TestExprUnaryPlus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19129,11 +19328,12 @@ func TestExprVariable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19194,11 +19394,12 @@ func TestExprVariable_Variable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19238,11 +19439,12 @@ func TestExprYield(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19303,11 +19505,12 @@ func TestExprYield_Val(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19389,11 +19592,12 @@ func TestExprYield_KeyVal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19444,11 +19648,12 @@ func TestExprYield_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19520,11 +19725,12 @@ func TestExprYield_KeyExpr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19608,11 +19814,12 @@ func TestExprAssign_Assign(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19694,11 +19901,12 @@ func TestExprAssign_Reference(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19792,11 +20000,12 @@ func TestExprAssign_ReferenceNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19935,11 +20144,12 @@ func TestExprAssign_ReferenceArgs(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20021,11 +20231,12 @@ func TestExprAssign_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20107,11 +20318,12 @@ func TestExprAssign_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20193,11 +20405,12 @@ func TestExprAssign_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20279,11 +20492,12 @@ func TestExprAssign_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20365,11 +20579,12 @@ func TestExprAssign_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20451,11 +20666,12 @@ func TestExprAssign_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20537,11 +20753,12 @@ func TestExprAssign_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20623,11 +20840,12 @@ func TestExprAssign_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20709,11 +20927,12 @@ func TestExprAssign_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20795,11 +21014,12 @@ func TestExprAssign_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20881,11 +21101,12 @@ func TestExprAssign_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20967,11 +21188,12 @@ func TestExprAssign_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21055,11 +21277,12 @@ func TestExprBinary_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21141,11 +21364,12 @@ func TestExprBinary_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21227,11 +21451,12 @@ func TestExprBinary_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21313,11 +21538,12 @@ func TestExprBinary_BooleanAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21399,11 +21625,12 @@ func TestExprBinary_BooleanOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21485,11 +21712,12 @@ func TestExprBinary_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21571,11 +21799,12 @@ func TestExprBinary_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21657,11 +21886,12 @@ func TestExprBinary_Equal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21743,11 +21973,12 @@ func TestExprBinary_GreaterOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21829,11 +22060,12 @@ func TestExprBinary_Greater(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21915,11 +22147,12 @@ func TestExprBinary_Identical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22001,11 +22234,12 @@ func TestExprBinary_LogicalAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22087,11 +22321,12 @@ func TestExprBinary_LogicalOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22173,11 +22408,12 @@ func TestExprBinary_LogicalXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22259,11 +22495,12 @@ func TestExprBinary_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22345,11 +22582,12 @@ func TestExprBinary_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22431,11 +22669,12 @@ func TestExprBinary_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22517,11 +22756,12 @@ func TestExprBinary_NotEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22603,11 +22843,12 @@ func TestExprBinary_NotIdentical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22689,11 +22930,12 @@ func TestExprBinary_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22775,11 +23017,12 @@ func TestExprBinary_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22861,11 +23104,12 @@ func TestExprBinary_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22947,11 +23191,12 @@ func TestExprBinary_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23033,11 +23278,12 @@ func TestExprBinary_SmallerOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23119,11 +23365,12 @@ func TestExprBinary_Smaller(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23186,11 +23433,12 @@ func TestExprCast_Array(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23251,11 +23499,12 @@ func TestExprCast_Bool(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23316,11 +23565,12 @@ func TestExprCast_BoolShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23381,11 +23631,12 @@ func TestExprCast_Double(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23446,11 +23697,12 @@ func TestExprCast_CastFloat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23511,11 +23763,12 @@ func TestExprCast_Int(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23576,11 +23829,12 @@ func TestExprCast_IntShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23641,11 +23895,12 @@ func TestExprCast_Object(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23706,11 +23961,12 @@ func TestExprCast_String(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23771,11 +24027,12 @@ func TestExprCast_BinaryString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23836,10 +24093,11 @@ func TestExprCast_Unset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } diff --git a/internal/php5/php5.go b/internal/php5/php5.go index 1134714..f05d452 100644 Binary files a/internal/php5/php5.go and b/internal/php5/php5.go differ diff --git a/internal/php5/php5.y b/internal/php5/php5.y index 63b1661..d328d44 100644 --- a/internal/php5/php5.y +++ b/internal/php5/php5.y @@ -5,17 +5,16 @@ import ( "bytes" "strconv" - "github.com/z7zmey/php-parser/internal/position" - "github.com/z7zmey/php-parser/internal/scanner" - "github.com/z7zmey/php-parser/pkg/ast" - "github.com/z7zmey/php-parser/pkg/token" + "github.com/z7zmey/php-parser/internal/position" + "github.com/z7zmey/php-parser/pkg/ast" + "github.com/z7zmey/php-parser/pkg/token" ) %} %union{ node ast.Vertex - token *scanner.Token + token *token.Token list []ast.Vertex simpleIndirectReference simpleIndirectReference @@ -25,7 +24,6 @@ import ( ClosureUse *ast.ExprClosureUse } -%type $unk %token T_INCLUDE %token T_INCLUDE_ONCE %token T_EXIT @@ -278,9 +276,7 @@ start: yylex.(*Parser).rootNode = &ast.Root{ast.Node{}, $1} yylex.(*Parser).rootNode.GetNode().Position = position.NewNodeListPosition($1) - yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.SkippedTokens) } ; @@ -295,14 +291,10 @@ top_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -316,9 +308,7 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.SkippedTokens) } | namespace_name T_NS_SEPARATOR T_STRING { @@ -329,10 +319,8 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.SkippedTokens) } ; @@ -341,26 +329,18 @@ top_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -370,10 +350,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } | T_NAMESPACE namespace_name ';' { @@ -385,11 +363,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_NAMESPACE namespace_name '{' top_statement_list '}' { @@ -401,11 +377,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.SkippedTokens) } | T_NAMESPACE '{' top_statement_list '}' { @@ -415,11 +389,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_USE use_declarations ';' { @@ -431,10 +403,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_USE T_FUNCTION use_function_declarations ';' { @@ -450,11 +420,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | T_USE T_CONST use_const_declarations ';' { @@ -470,11 +438,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(identifier, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | constant_declaration ';' { @@ -484,10 +450,8 @@ top_statement: $$.GetNode().Position = position.NewNodeTokenPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Stmts, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } ; @@ -497,15 +461,11 @@ use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -518,8 +478,6 @@ use_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -535,10 +493,8 @@ use_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -552,9 +508,7 @@ use_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_AS T_STRING { @@ -572,11 +526,9 @@ use_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } ; @@ -586,15 +538,11 @@ use_function_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_function_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -607,8 +555,6 @@ use_function_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -624,10 +570,8 @@ use_function_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -641,9 +585,7 @@ use_function_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_AS T_STRING { @@ -661,11 +603,9 @@ use_function_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } ; @@ -675,15 +615,11 @@ use_const_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_const_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -696,8 +632,6 @@ use_const_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -713,10 +647,8 @@ use_const_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -730,9 +662,7 @@ use_const_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_AS T_STRING { @@ -750,11 +680,9 @@ use_const_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } ; @@ -774,11 +702,9 @@ constant_declaration: $$.GetNode().Position = position.NewNodeNodeListPosition($1, constList.Consts) // save comments - yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $4.SkippedTokens) } | T_CONST T_STRING '=' static_scalar { @@ -793,11 +719,9 @@ constant_declaration: $$.GetNode().Position = position.NewTokenNodeListPosition($1, constList) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $3.SkippedTokens) } ; @@ -812,14 +736,10 @@ inner_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -829,26 +749,18 @@ inner_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -858,10 +770,8 @@ inner_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } ; @@ -870,8 +780,6 @@ statement: unticked_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_STRING ':' { @@ -883,10 +791,8 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $2.SkippedTokens) } ; @@ -899,10 +805,8 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } | T_IF parenthesis_expr statement elseif_list else_single { @@ -918,9 +822,7 @@ unticked_statement: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_IF parenthesis_expr ':' inner_statement_list new_elseif_list new_else_single T_ENDIF ';' { @@ -934,18 +836,16 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.SkippedTokens) if $6 != nil { - yylex.(*Parser).setFreeFloating($6.(*ast.StmtAltElse).Stmt, token.End, append($7.Tokens, $8.Tokens...)) + yylex.(*Parser).setFreeFloating($6.(*ast.StmtAltElse).Stmt, token.End, append($7.SkippedTokens, $8.SkippedTokens...)) } else if len($5) > 0 { - yylex.(*Parser).setFreeFloating($5[len($5)-1].(*ast.StmtAltElseIf).Stmt, token.End, append($7.Tokens, $8.Tokens...)) + yylex.(*Parser).setFreeFloating($5[len($5)-1].(*ast.StmtAltElseIf).Stmt, token.End, append($7.SkippedTokens, $8.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($7.Tokens, $8.Tokens...)) + yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($7.SkippedTokens, $8.SkippedTokens...)) } - yylex.(*Parser).setToken($$, token.SemiColon, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.SemiColon, $8.SkippedTokens) } | T_WHILE parenthesis_expr while_statement { @@ -962,9 +862,7 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DO statement T_WHILE parenthesis_expr ';' { @@ -974,12 +872,10 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($4, token.End, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($4, token.End, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } | T_FOR '(' for_expr ';' for_expr ';' for_expr ')' for_statement { @@ -1000,13 +896,11 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.For, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.For, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.SkippedTokens) } | T_SWITCH parenthesis_expr switch_case_list { @@ -1025,9 +919,7 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_BREAK ';' { @@ -1037,11 +929,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_BREAK expr ';' { @@ -1051,11 +941,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_CONTINUE ';' { @@ -1065,11 +953,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_CONTINUE expr ';' { @@ -1079,11 +965,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_RETURN ';' { @@ -1093,11 +977,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_RETURN expr_without_variable ';' { @@ -1107,11 +989,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_RETURN variable ';' { @@ -1121,11 +1001,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | yield_expr ';' { @@ -1136,10 +1014,8 @@ unticked_statement: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_GLOBAL global_var_list ';' { @@ -1149,11 +1025,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_STATIC static_var_list ';' { @@ -1163,11 +1037,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_ECHO echo_expr_list ';' { @@ -1177,12 +1049,10 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Echo, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Echo, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_INLINE_HTML { @@ -1192,9 +1062,7 @@ unticked_statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr ';' { @@ -1205,10 +1073,8 @@ unticked_statement: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_UNSET '(' unset_variables ')' ';' { @@ -1218,13 +1084,11 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Unset, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Unset, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } | T_FOREACH '(' variable T_AS foreach_variable foreach_optional_arg ')' foreach_statement { @@ -1256,15 +1120,13 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) if $6 != nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Key, $6.GetNode().Tokens[token.Key]); delete($6.GetNode().Tokens, token.Key) } - yylex.(*Parser).setFreeFloating($$, token.Var, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $7.SkippedTokens) } | T_FOREACH '(' expr_without_variable T_AS foreach_variable foreach_optional_arg ')' foreach_statement { @@ -1296,15 +1158,13 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) if $6 != nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Key, $6.GetNode().Tokens[token.Key]); delete($6.GetNode().Tokens, token.Key) } - yylex.(*Parser).setFreeFloating($$, token.Var, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $7.SkippedTokens) } | T_DECLARE '(' declare_list ')' declare_statement { @@ -1315,11 +1175,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Declare, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Declare, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.SkippedTokens) } | ';' { @@ -1329,10 +1187,8 @@ unticked_statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | T_TRY '{' inner_statement_list '}' catch_statement finally_statement { @@ -1346,11 +1202,9 @@ unticked_statement: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Try, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Try, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_THROW expr ';' { @@ -1360,11 +1214,9 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_GOTO T_STRING ';' { @@ -1376,12 +1228,10 @@ unticked_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(label, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(label, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } ; @@ -1389,8 +1239,6 @@ catch_statement: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_CATCH '(' fully_qualified_class_name T_VARIABLE ')' '{' inner_statement_list '}' additional_catches { @@ -1405,14 +1253,12 @@ catch_statement: catchNode.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating(catchNode, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Catch, $2.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Var, $5.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Cond, $6.Tokens) - yylex.(*Parser).setFreeFloating(catchNode, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(catchNode, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Catch, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Var, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Cond, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(catchNode, token.Stmts, $8.SkippedTokens) } ; @@ -1420,8 +1266,6 @@ finally_statement: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_FINALLY '{' inner_statement_list '}' { @@ -1431,11 +1275,9 @@ finally_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Finally, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Finally, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } ; @@ -1443,14 +1285,10 @@ additional_catches: non_empty_additional_catches { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1458,14 +1296,10 @@ non_empty_additional_catches: additional_catch { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_additional_catches additional_catch { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1482,14 +1316,12 @@ additional_catch: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Catch, $2.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Cond, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Catch, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Cond, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.SkippedTokens) } ; @@ -1497,17 +1329,13 @@ unset_variables: unset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | unset_variables ',' unset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1515,8 +1343,6 @@ unset_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1524,8 +1350,6 @@ function_declaration_statement: unticked_function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1533,8 +1357,6 @@ class_declaration_statement: unticked_class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1571,19 +1393,17 @@ unticked_function_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 != nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParamList, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Params, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParamList, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Params, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.SkippedTokens) } ; @@ -1610,11 +1430,9 @@ unticked_class_declaration_statement: $$.GetNode().Position = position.NewNodeTokenPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.SkippedTokens) } | interface_entry T_STRING interface_extends_list '{' class_statement_list '}' { @@ -1626,12 +1444,10 @@ unticked_class_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.SkippedTokens) } ; @@ -1645,9 +1461,7 @@ class_entry_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ABSTRACT T_CLASS { @@ -1659,10 +1473,8 @@ class_entry_type: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } | T_TRAIT { @@ -1672,9 +1484,7 @@ class_entry_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL T_CLASS { @@ -1686,10 +1496,8 @@ class_entry_type: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } ; @@ -1697,8 +1505,6 @@ extends_from: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS fully_qualified_class_name { @@ -1708,9 +1514,7 @@ extends_from: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1725,8 +1529,6 @@ interface_extends_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS interface_list { @@ -1736,9 +1538,7 @@ interface_extends_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1746,8 +1546,6 @@ implements_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_IMPLEMENTS interface_list { @@ -1757,9 +1555,7 @@ implements_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1767,17 +1563,13 @@ interface_list: fully_qualified_class_name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | interface_list ',' fully_qualified_class_name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1785,17 +1577,13 @@ foreach_optional_arg: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_DOUBLE_ARROW foreach_variable { $$ = $2 // save comments - yylex.(*Parser).setFreeFloating($$, token.Key, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Key, $1.SkippedTokens) } ; @@ -1803,8 +1591,6 @@ foreach_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '&' variable { @@ -1814,9 +1600,7 @@ foreach_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LIST '(' assignment_list ')' { @@ -1826,11 +1610,9 @@ foreach_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } ; @@ -1841,8 +1623,6 @@ for_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOR ';' { @@ -1854,12 +1634,10 @@ for_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1870,8 +1648,6 @@ foreach_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOREACH ';' { @@ -1883,12 +1659,10 @@ foreach_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1900,8 +1674,6 @@ declare_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDDECLARE ';' { @@ -1913,12 +1685,10 @@ declare_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1935,10 +1705,8 @@ declare_list: constant.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(constant, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(constant, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $2.SkippedTokens) } | declare_list ',' T_STRING '=' static_scalar { @@ -1951,11 +1719,9 @@ declare_list: constant.GetNode().Position = position.NewTokenNodePosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $4.SkippedTokens) } ; @@ -1971,10 +1737,8 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) } | '{' ';' case_list '}' { @@ -1986,11 +1750,9 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) } | ':' case_list T_ENDSWITCH ';' { @@ -2002,12 +1764,10 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | ':' ';' case_list T_ENDSWITCH ';' { @@ -2020,13 +1780,11 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } ; @@ -2035,8 +1793,6 @@ case_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | case_list T_CASE expr case_separator inner_statement_list { @@ -2047,11 +1803,9 @@ case_list: _case.GetNode().Position = position.NewTokenNodeListPosition($2, $5) // save comments - yylex.(*Parser).setFreeFloating(_case, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_case, token.Expr, $4.Tokens) - yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_case, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_case, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.SkippedTokens) } | case_list T_DEFAULT case_separator inner_statement_list { @@ -2062,11 +1816,9 @@ case_list: _default.GetNode().Position = position.NewTokenNodeListPosition($2, $4) // save comments - yylex.(*Parser).setFreeFloating(_default, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_default, token.Default, $3.Tokens) - yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_default, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_default, token.Default, $3.SkippedTokens) + yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.SkippedTokens) } ; @@ -2090,8 +1842,6 @@ while_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDWHILE ';' { @@ -2103,12 +1853,10 @@ while_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -2118,8 +1866,6 @@ elseif_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | elseif_list T_ELSEIF parenthesis_expr statement { @@ -2130,9 +1876,7 @@ elseif_list: _elseIf.GetNode().Position = position.NewTokenNodePosition($2, $4) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) } ; @@ -2141,8 +1885,6 @@ new_elseif_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_elseif_list T_ELSEIF parenthesis_expr ':' inner_statement_list { @@ -2157,10 +1899,8 @@ new_elseif_list: _elseIf.GetNode().Position = position.NewTokenNodeListPosition($2, $5) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $4.SkippedTokens) } ; @@ -2169,8 +1909,6 @@ else_single: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ELSE statement { @@ -2180,9 +1918,7 @@ else_single: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2191,8 +1927,6 @@ new_else_single: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ELSE ':' inner_statement_list { @@ -2206,10 +1940,8 @@ new_else_single: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $2.SkippedTokens) } ; @@ -2218,14 +1950,10 @@ parameter_list: non_empty_parameter_list { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2233,17 +1961,13 @@ non_empty_parameter_list: parameter { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_parameter_list ',' parameter { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2256,18 +1980,18 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, nil} @@ -2281,8 +2005,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenPosition($4) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | optional_class_type is_reference is_variadic T_VARIABLE '=' static_scalar { @@ -2292,19 +2014,19 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.End, $5.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, $6} @@ -2318,8 +2040,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenNodePosition($4, $6) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2328,8 +2048,6 @@ optional_class_type: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ARRAY { @@ -2339,9 +2057,7 @@ optional_class_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CALLABLE { @@ -2351,15 +2067,11 @@ optional_class_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | fully_qualified_class_name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2373,10 +2085,8 @@ function_call_parameter_list: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.SkippedTokens) } | '(' non_empty_function_call_parameter_list ')' { @@ -2386,10 +2096,8 @@ function_call_parameter_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | '(' yield_expr ')' { @@ -2401,10 +2109,8 @@ function_call_parameter_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -2413,17 +2119,13 @@ non_empty_function_call_parameter_list: function_call_parameter { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_function_call_parameter_list ',' function_call_parameter { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2437,8 +2139,6 @@ function_call_parameter: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | variable { @@ -2449,8 +2149,6 @@ function_call_parameter: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '&' w_variable { @@ -2460,9 +2158,7 @@ function_call_parameter: $$.GetNode().Position = position.NewNodePosition($2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ELLIPSIS expr { @@ -2472,9 +2168,7 @@ function_call_parameter: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2484,15 +2178,11 @@ global_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | global_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2508,9 +2198,7 @@ global_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' r_variable { @@ -2520,9 +2208,7 @@ global_var: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' '{' expr '}' { @@ -2532,11 +2218,9 @@ global_var: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.Tokens, $3.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.SkippedTokens, $3.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.SkippedTokens...)) } ; @@ -2555,10 +2239,8 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.SkippedTokens) } | static_var_list ',' T_VARIABLE '=' static_scalar { @@ -2573,11 +2255,9 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenNodePosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Var, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Var, $4.SkippedTokens) } | T_VARIABLE { @@ -2592,9 +2272,7 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' static_scalar { @@ -2609,10 +2287,8 @@ static_var_list: staticVar.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(staticVar, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(staticVar, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(staticVar, token.Var, $2.SkippedTokens) } ; @@ -2621,14 +2297,10 @@ class_statement_list: class_statement_list class_statement { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2643,10 +2315,8 @@ class_statement: // save comments yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.PropertyList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.PropertyList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | class_constant_declaration ';' { @@ -2656,16 +2326,12 @@ class_statement: $$.GetNode().Position = position.NewNodeTokenPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.ConstList, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | trait_use_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | method_modifiers function is_reference T_STRING '(' parameter_list ')' method_body { @@ -2683,20 +2349,18 @@ class_statement: // save comments if len($1) > 0 { yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $2.SkippedTokens) } if $3 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $7.SkippedTokens) } ; @@ -2709,9 +2373,7 @@ trait_use_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2719,17 +2381,13 @@ trait_list: fully_qualified_class_name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_list ',' fully_qualified_class_name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2741,10 +2399,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' trait_adaptation_list '}' { @@ -2753,10 +2409,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.SkippedTokens) } ; @@ -2764,14 +2418,10 @@ trait_adaptation_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_trait_adaptation_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2779,14 +2429,10 @@ non_empty_trait_adaptation_list: trait_adaptation_statement { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_trait_adaptation_list trait_adaptation_statement { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2796,20 +2442,16 @@ trait_adaptation_statement: $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.NameList, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.NameList, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | trait_alias ';' { $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.Alias, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Alias, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } ; @@ -2823,9 +2465,7 @@ trait_precedence: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2833,17 +2473,13 @@ trait_reference_list: fully_qualified_class_name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_reference_list ',' fully_qualified_class_name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2858,15 +2494,11 @@ trait_method_reference: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | trait_method_reference_fully_qualified { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2882,10 +2514,8 @@ trait_method_reference_fully_qualified: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $2.SkippedTokens) } ; @@ -2901,10 +2531,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } | trait_method_reference T_AS member_modifier { @@ -2915,9 +2543,7 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2925,14 +2551,10 @@ trait_modifiers: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | member_modifier { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2945,10 +2567,8 @@ method_body: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' inner_statement_list '}' { @@ -2958,10 +2578,8 @@ method_body: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } ; @@ -2969,8 +2587,6 @@ variable_modifiers: non_empty_member_modifiers { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_VAR { @@ -2981,9 +2597,7 @@ variable_modifiers: modifier.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.SkippedTokens) } ; @@ -2991,14 +2605,10 @@ method_modifiers: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3006,14 +2616,10 @@ non_empty_member_modifiers: member_modifier { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers member_modifier { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3026,9 +2632,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PROTECTED { @@ -3038,9 +2642,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRIVATE { @@ -3050,9 +2652,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_STATIC { @@ -3062,9 +2662,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ABSTRACT { @@ -3074,9 +2672,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL { @@ -3086,9 +2682,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -3106,10 +2700,8 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Start, $3.SkippedTokens) } | class_variable_declaration ',' T_VARIABLE '=' static_scalar { @@ -3124,11 +2716,9 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenNodePosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Var, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Var, $4.SkippedTokens) } | T_VARIABLE { @@ -3143,9 +2733,7 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(property, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(property, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' static_scalar { @@ -3160,10 +2748,8 @@ class_variable_declaration: property.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(property, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(property, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(property, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(property, token.Var, $2.SkippedTokens) } ; @@ -3183,11 +2769,9 @@ class_constant_declaration: $1.GetNode().Position = position.NewNodesPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastConst, token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $4.SkippedTokens) } | T_CONST T_STRING '=' static_scalar { @@ -3201,11 +2785,9 @@ class_constant_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(constant, token.Name, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(constant, token.Name, $3.SkippedTokens) } ; @@ -3215,15 +2797,11 @@ echo_expr_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3232,14 +2810,10 @@ for_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_for_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3249,15 +2823,11 @@ non_empty_for_expr: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3265,14 +2835,10 @@ chaining_method_or_property: chaining_method_or_property variable_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | variable_property { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3286,10 +2852,8 @@ chaining_dereference: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | '[' dim_offset ']' { @@ -3300,10 +2864,8 @@ chaining_dereference: fetch.GetNode().Position = position.NewNodePosition($2) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $3.SkippedTokens) } ; @@ -3311,20 +2873,14 @@ chaining_instance_call: chaining_dereference chaining_method_or_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | chaining_dereference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | chaining_method_or_property { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3332,14 +2888,10 @@ instance_call: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | chaining_instance_call { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3356,9 +2908,7 @@ new_expr: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -3373,12 +2923,10 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $5.SkippedTokens) } | variable '=' expr { @@ -3389,9 +2937,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable '=' '&' variable { @@ -3402,10 +2948,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Equal, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Equal, $3.SkippedTokens) } | variable '=' '&' T_NEW class_name_reference ctor_arguments { @@ -3428,11 +2972,9 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Equal, $3.Tokens) - yylex.(*Parser).setFreeFloating(_new, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Equal, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(_new, token.Start, $4.SkippedTokens) } | T_CLONE expr { @@ -3442,9 +2984,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | variable T_PLUS_EQUAL expr { @@ -3454,9 +2994,7 @@ expr_without_variable: $$.GetNode().Position = position.NewNodesPosition($1, $3) yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MINUS_EQUAL expr { @@ -3467,9 +3005,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MUL_EQUAL expr { @@ -3480,9 +3016,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_POW_EQUAL expr { @@ -3493,9 +3027,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_DIV_EQUAL expr { @@ -3506,9 +3038,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_CONCAT_EQUAL expr { @@ -3519,9 +3049,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MOD_EQUAL expr { @@ -3532,9 +3060,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_AND_EQUAL expr { @@ -3545,9 +3071,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_OR_EQUAL expr { @@ -3558,9 +3082,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_XOR_EQUAL expr { @@ -3571,9 +3093,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SL_EQUAL expr { @@ -3584,9 +3104,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SR_EQUAL expr { @@ -3597,9 +3115,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | rw_variable T_INC { @@ -3610,9 +3126,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_INC rw_variable { @@ -3622,9 +3136,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | rw_variable T_DEC { @@ -3635,9 +3147,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_DEC rw_variable { @@ -3647,9 +3157,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_BOOLEAN_OR expr { @@ -3660,9 +3168,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_BOOLEAN_AND expr { @@ -3673,9 +3179,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_OR expr { @@ -3686,9 +3190,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_AND expr { @@ -3699,9 +3201,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_XOR expr { @@ -3712,9 +3212,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '|' expr { @@ -3725,9 +3223,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '&' expr { @@ -3738,9 +3234,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '^' expr { @@ -3751,9 +3245,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '.' expr { @@ -3764,9 +3256,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '+' expr { @@ -3777,9 +3267,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '-' expr { @@ -3790,9 +3278,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '*' expr { @@ -3803,9 +3289,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_POW expr { @@ -3816,9 +3300,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '/' expr { @@ -3829,9 +3311,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '%' expr { @@ -3842,9 +3322,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SL expr { @@ -3855,9 +3333,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SR expr { @@ -3868,9 +3344,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '+' expr %prec T_INC { @@ -3880,9 +3354,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' expr %prec T_INC { @@ -3892,9 +3364,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '!' expr { @@ -3904,9 +3374,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '~' expr { @@ -3916,9 +3384,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_IS_IDENTICAL expr { @@ -3929,9 +3395,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_IDENTICAL expr { @@ -3942,9 +3406,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_EQUAL expr { @@ -3955,9 +3417,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_EQUAL expr { @@ -3968,10 +3428,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.Equal, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.Equal, $2.SkippedTokens) } | expr '<' expr { @@ -3982,9 +3440,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_SMALLER_OR_EQUAL expr { @@ -3995,9 +3451,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '>' expr { @@ -4008,9 +3462,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_GREATER_OR_EQUAL expr { @@ -4021,9 +3473,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_INSTANCEOF class_name_reference { @@ -4034,29 +3484,23 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | parenthesis_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' new_expr ')' instance_call { $$ = $2 // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) for _, n := range($4) { switch nn := n.(type) { @@ -4079,8 +3523,6 @@ expr_without_variable: // save position $$.GetNode().Position = position.NewNodesPosition($$, n) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr '?' expr ':' expr { @@ -4091,10 +3533,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $4.SkippedTokens) } | expr '?' ':' expr { @@ -4105,16 +3545,12 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $3.SkippedTokens) } | internal_functions_in_yacc { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_INT_CAST expr { @@ -4124,10 +3560,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_DOUBLE_CAST expr { @@ -4137,10 +3571,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_STRING_CAST expr { @@ -4150,10 +3582,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_ARRAY_CAST expr { @@ -4163,10 +3593,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_OBJECT_CAST expr { @@ -4176,10 +3604,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_BOOL_CAST expr { @@ -4189,10 +3615,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_UNSET_CAST expr { @@ -4202,10 +3626,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_EXIT exit_expr { @@ -4223,9 +3645,7 @@ expr_without_variable: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '@' expr { @@ -4235,27 +3655,19 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | combined_scalar_offset { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | combined_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '`' backticks_expr '`' { @@ -4265,9 +3677,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRINT expr { @@ -4277,9 +3687,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD { @@ -4289,9 +3697,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}' { @@ -4301,23 +3707,21 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.SkippedTokens) // normalize if $6 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.LexicalVars]); delete($$.GetNode().Tokens, token.LexicalVars) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}' { @@ -4327,24 +3731,22 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $10) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Static, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Static, $2.SkippedTokens) if $3 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $8.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $10.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVars, $8.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $10.SkippedTokens) // normalize if $7 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.LexicalVars]); delete($$.GetNode().Tokens, token.LexicalVars) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4357,9 +3759,7 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD variable { @@ -4369,9 +3769,7 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD expr T_DOUBLE_ARROW expr_without_variable { @@ -4381,10 +3779,8 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) } | T_YIELD expr T_DOUBLE_ARROW variable { @@ -4394,10 +3790,8 @@ yield_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) } ; @@ -4410,10 +3804,8 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | combined_scalar_offset '[' dim_offset ']' { @@ -4423,10 +3815,8 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | T_CONSTANT_ENCAPSED_STRING '[' dim_offset ']' { @@ -4438,11 +3828,9 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition(str, $4) // save comments - yylex.(*Parser).setFreeFloating(str, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(str, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | general_constant '[' dim_offset ']' { @@ -4452,10 +3840,8 @@ combined_scalar_offset: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } ; @@ -4468,11 +3854,9 @@ combined_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Array, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Array, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' array_pair_list ']' { @@ -4482,10 +3866,8 @@ combined_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } ; @@ -4500,8 +3882,6 @@ lexical_vars: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_USE '(' lexical_var_list ')' { @@ -4511,11 +3891,9 @@ lexical_vars: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Use, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Use, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.SkippedTokens) } ; @@ -4531,10 +3909,8 @@ lexical_var_list: variable.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } | lexical_var_list ',' '&' T_VARIABLE { @@ -4549,11 +3925,9 @@ lexical_var_list: reference.GetNode().Position = position.NewTokensPosition($3, $4) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) } | T_VARIABLE { @@ -4566,9 +3940,7 @@ lexical_var_list: variable.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(variable, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(variable, token.Start, $1.SkippedTokens) } | '&' T_VARIABLE { @@ -4583,10 +3955,8 @@ lexical_var_list: reference.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating(reference, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(reference, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } ; @@ -4599,8 +3969,6 @@ function_call: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodesPosition(name, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name function_call_parameter_list { @@ -4612,10 +3980,8 @@ function_call: $$.GetNode().Position = position.NewNodesPosition(funcName, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(funcName, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(funcName, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name function_call_parameter_list { @@ -4627,9 +3993,7 @@ function_call: $$.GetNode().Position = position.NewNodesPosition(funcName, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | class_name T_PAAMAYIM_NEKUDOTAYIM variable_name function_call_parameter_list { @@ -4640,9 +4004,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | class_name T_PAAMAYIM_NEKUDOTAYIM variable_without_objects function_call_parameter_list { @@ -4653,9 +4015,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM variable_name function_call_parameter_list { @@ -4666,9 +4026,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM variable_without_objects function_call_parameter_list { @@ -4679,9 +4037,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_without_objects function_call_parameter_list { @@ -4692,8 +4048,6 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4706,9 +4060,7 @@ class_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | namespace_name { @@ -4716,8 +4068,6 @@ class_name: // save position $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -4727,10 +4077,8 @@ class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -4740,9 +4088,7 @@ class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4753,8 +4099,6 @@ fully_qualified_class_name: // save position $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -4764,10 +4108,8 @@ fully_qualified_class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -4777,9 +4119,7 @@ fully_qualified_class_name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4787,14 +4127,10 @@ class_name_reference: class_name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | dynamic_class_name_reference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4804,7 +4140,7 @@ dynamic_class_name_reference: $$ = $1 // save comments - yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.Tokens) + yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.SkippedTokens) for _, n := range($3) { switch nn := n.(type) { @@ -4837,14 +4173,10 @@ dynamic_class_name_reference: yylex.(*Parser).MoveFreeFloating(nn.Var, $$) } } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | base_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4853,14 +4185,10 @@ dynamic_class_name_variable_properties: dynamic_class_name_variable_properties dynamic_class_name_variable_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4871,9 +4199,7 @@ dynamic_class_name_variable_property: $$ = $2 // save comments - yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.SkippedTokens) } ; @@ -4881,8 +4207,6 @@ exit_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' ')' { @@ -4892,16 +4216,12 @@ exit_expr: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.SkippedTokens) } | parenthesis_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4909,8 +4229,6 @@ backticks_expr: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE { @@ -4919,14 +4237,10 @@ backticks_expr: // save position part.GetNode().Position = position.NewTokenPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4934,14 +4248,10 @@ ctor_arguments: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_call_parameter_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4954,9 +4264,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DNUMBER { @@ -4966,9 +4274,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CONSTANT_ENCAPSED_STRING { @@ -4978,9 +4284,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LINE { @@ -4990,9 +4294,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FILE { @@ -5002,9 +4304,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DIR { @@ -5014,9 +4314,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_TRAIT_C { @@ -5026,9 +4324,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_METHOD_C { @@ -5038,9 +4334,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FUNC_C { @@ -5050,9 +4344,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_C { @@ -5062,9 +4354,7 @@ common_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_ENCAPSED_AND_WHITESPACE T_END_HEREDOC { @@ -5076,9 +4366,7 @@ common_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_END_HEREDOC { @@ -5088,9 +4376,7 @@ common_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5106,10 +4392,8 @@ static_class_constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -5117,8 +4401,6 @@ static_scalar: static_scalar_value { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5126,14 +4408,10 @@ static_scalar_value: common_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_class_name_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name { @@ -5143,8 +4421,6 @@ static_scalar_value: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodePosition(name) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -5156,10 +4432,8 @@ static_scalar_value: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -5171,9 +4445,7 @@ static_scalar_value: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ARRAY '(' static_array_pair_list ')' { @@ -5183,11 +4455,9 @@ static_scalar_value: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Array, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Array, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' static_array_pair_list ']' { @@ -5197,16 +4467,12 @@ static_scalar_value: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } | static_class_constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_CLASS_C { @@ -5216,15 +4482,11 @@ static_scalar_value: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | static_operation { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5237,10 +4499,8 @@ static_operation: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | static_scalar_value '+' static_scalar_value { @@ -5251,9 +4511,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '-' static_scalar_value { @@ -5264,9 +4522,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '*' static_scalar_value { @@ -5277,9 +4533,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_POW static_scalar_value { @@ -5290,9 +4544,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '/' static_scalar_value { @@ -5303,9 +4555,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '%' static_scalar_value { @@ -5316,9 +4566,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '!' static_scalar_value { @@ -5328,9 +4576,7 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '~' static_scalar_value { @@ -5340,9 +4586,7 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | static_scalar_value '|' static_scalar_value { @@ -5353,9 +4597,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '&' static_scalar_value { @@ -5366,9 +4608,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '^' static_scalar_value { @@ -5379,9 +4619,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_SL static_scalar_value { @@ -5392,9 +4630,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_SR static_scalar_value { @@ -5405,9 +4641,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '.' static_scalar_value { @@ -5418,9 +4652,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_LOGICAL_XOR static_scalar_value { @@ -5431,9 +4663,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_LOGICAL_AND static_scalar_value { @@ -5444,9 +4674,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_LOGICAL_OR static_scalar_value { @@ -5457,9 +4685,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_BOOLEAN_AND static_scalar_value { @@ -5470,9 +4696,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_BOOLEAN_OR static_scalar_value { @@ -5483,9 +4707,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_IDENTICAL static_scalar_value { @@ -5496,9 +4718,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_NOT_IDENTICAL static_scalar_value { @@ -5509,9 +4729,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_EQUAL static_scalar_value { @@ -5522,9 +4740,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_NOT_EQUAL static_scalar_value { @@ -5535,10 +4751,8 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.Equal, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.Equal, $2.SkippedTokens) } | static_scalar_value '<' static_scalar_value { @@ -5549,9 +4763,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '>' static_scalar_value { @@ -5562,9 +4774,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_SMALLER_OR_EQUAL static_scalar_value { @@ -5575,9 +4785,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value T_IS_GREATER_OR_EQUAL static_scalar_value { @@ -5588,9 +4796,7 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | static_scalar_value '?' ':' static_scalar_value { @@ -5601,10 +4807,8 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $3.SkippedTokens) } | static_scalar_value '?' static_scalar_value ':' static_scalar_value { @@ -5615,10 +4819,8 @@ static_operation: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $4.SkippedTokens) } | '+' static_scalar_value { @@ -5628,9 +4830,7 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' static_scalar_value { @@ -5640,19 +4840,15 @@ static_operation: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '(' static_scalar_value ')' { $$ = $2 // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } ; @@ -5660,8 +4856,6 @@ general_constant: class_constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name { @@ -5671,8 +4865,6 @@ general_constant: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodePosition(name) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -5684,10 +4876,8 @@ general_constant: $$.GetNode().Position = position.NewNodePosition(name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -5699,9 +4889,7 @@ general_constant: $$.GetNode().Position = position.NewNodePosition(name) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5716,27 +4904,19 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | general_constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_name_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | common_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '"' encaps_list '"' { @@ -5746,9 +4926,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC encaps_list T_END_HEREDOC { @@ -5758,9 +4936,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CLASS_C { @@ -5770,9 +4946,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5780,8 +4954,6 @@ static_array_pair_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_static_array_pair_list possible_comma { @@ -5789,10 +4961,8 @@ static_array_pair_list: // save comments if $2 != nil { - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5817,11 +4987,9 @@ non_empty_static_array_pair_list: arrayItem.GetNode().Position = position.NewNodesPosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.SkippedTokens) } | non_empty_static_array_pair_list ',' static_scalar_value { @@ -5832,10 +5000,8 @@ non_empty_static_array_pair_list: arrayItem.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_scalar_value T_DOUBLE_ARROW static_scalar_value { @@ -5847,9 +5013,7 @@ non_empty_static_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.SkippedTokens) } | static_scalar_value { @@ -5861,8 +5025,6 @@ non_empty_static_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5870,14 +5032,10 @@ expr: r_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr_without_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5890,10 +5048,8 @@ parenthesis_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | '(' yield_expr ')' { @@ -5903,10 +5059,8 @@ parenthesis_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -5915,8 +5069,6 @@ r_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5925,8 +5077,6 @@ w_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5934,8 +5084,6 @@ rw_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5950,7 +5098,7 @@ variable: } // save comments - yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.Tokens) + yylex.(*Parser).setFreeFloating($3[0], token.Var, $2.SkippedTokens) for _, n := range($3) { switch nn := n.(type) { @@ -5995,14 +5143,10 @@ variable: yylex.(*Parser).MoveFreeFloating(nn.Var, $$) } } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | base_variable_with_function_calls { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6010,14 +5154,10 @@ variable_properties: variable_properties variable_property { $$ = append($1, $2...) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6033,9 +5173,7 @@ variable_property: $$ = $2 // save comments - yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($2[0], token.Var, $1.SkippedTokens) } ; @@ -6049,10 +5187,8 @@ array_method_dereference: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | method '[' dim_offset ']' { @@ -6063,10 +5199,8 @@ array_method_dereference: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } ; @@ -6077,8 +5211,6 @@ method: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6086,20 +5218,14 @@ method_or_not: method { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | array_method_dereference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6107,8 +5233,6 @@ variable_without_objects: reference_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | simple_indirect_reference reference_variable { @@ -6119,8 +5243,6 @@ variable_without_objects: } $$ = $1.all[0] - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6134,9 +5256,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM variable_without_objects { @@ -6147,9 +5267,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -6157,8 +5275,6 @@ variable_class_name: reference_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6171,10 +5287,8 @@ array_function_dereference: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | function_call '[' dim_offset ']' { @@ -6184,10 +5298,8 @@ array_function_dereference: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } ; @@ -6195,20 +5307,14 @@ base_variable_with_function_calls: base_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | array_function_dereference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_call { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6217,8 +5323,6 @@ base_variable: reference_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | simple_indirect_reference reference_variable { @@ -6229,14 +5333,10 @@ base_variable: } $$ = $1.all[0] - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_member { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6249,10 +5349,8 @@ reference_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | reference_variable '{' expr '}' { @@ -6262,16 +5360,12 @@ reference_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | compound_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6287,9 +5381,7 @@ compound_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' '{' expr '}' { @@ -6299,11 +5391,9 @@ compound_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.Tokens, $3.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.SkippedTokens, $3.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.SkippedTokens...)) } ; @@ -6311,14 +5401,10 @@ dim_offset: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6327,8 +5413,6 @@ object_property: object_dim_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | variable_without_objects { @@ -6337,8 +5421,6 @@ object_property: // save position fetch.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6352,10 +5434,8 @@ object_dim_list: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | object_dim_list '{' expr '}' { @@ -6366,10 +5446,8 @@ object_dim_list: fetch.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(fetch, token.Expr, $4.SkippedTokens) } | variable_name { @@ -6378,8 +5456,6 @@ object_dim_list: // save position fetch.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6392,9 +5468,7 @@ variable_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '{' expr '}' { @@ -6404,10 +5478,8 @@ variable_name: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } ; @@ -6421,9 +5493,7 @@ simple_indirect_reference: n.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(n, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(n, token.Start, $1.SkippedTokens) } | simple_indirect_reference '$' { @@ -6438,9 +5508,7 @@ simple_indirect_reference: n.GetNode().Position = position.NewTokenPosition($2) // save comments - yylex.(*Parser).setFreeFloating(n, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(n, token.Start, $2.SkippedTokens) } ; @@ -6454,9 +5522,7 @@ assignment_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | assignment_list_element { @@ -6465,8 +5531,6 @@ assignment_list: } else { $$ = []ast.Vertex{$1} } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6481,8 +5545,6 @@ assignment_list_element: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_LIST '(' assignment_list ')' { @@ -6494,17 +5556,13 @@ assignment_list_element: $$.GetNode().Position = position.NewNodePosition(listNode) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) } | /* empty */ { $$ = &ast.ExprArrayItem{ast.Node{}, false, nil, nil} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6513,8 +5571,6 @@ array_pair_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_array_pair_list possible_comma { @@ -6526,10 +5582,8 @@ array_pair_list: // save comments if $2 != nil { - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -6543,11 +5597,9 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewNodesPosition($3, $5) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.SkippedTokens) } | non_empty_array_pair_list ',' expr { @@ -6558,10 +5610,8 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewNodePosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr T_DOUBLE_ARROW expr { @@ -6573,9 +5623,7 @@ non_empty_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.SkippedTokens) } | expr { @@ -6587,8 +5635,6 @@ non_empty_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_array_pair_list ',' expr T_DOUBLE_ARROW '&' w_variable { @@ -6601,12 +5647,10 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewNodesPosition($3, $6) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) yylex.(*Parser).MoveFreeFloating($3, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $5.SkippedTokens) } | non_empty_array_pair_list ',' '&' w_variable { @@ -6619,10 +5663,8 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewTokenNodePosition($3, $4) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $3.SkippedTokens) } | expr T_DOUBLE_ARROW '&' w_variable { @@ -6636,10 +5678,8 @@ non_empty_array_pair_list: // save comments yylex.(*Parser).MoveFreeFloating($1, arrayItem) - yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $3.SkippedTokens) } | '&' w_variable { @@ -6652,9 +5692,7 @@ non_empty_array_pair_list: arrayItem.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(arrayItem, token.Start, $1.SkippedTokens) } ; @@ -6662,8 +5700,6 @@ encaps_list: encaps_list encaps_var { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list T_ENCAPSED_AND_WHITESPACE { @@ -6674,15 +5710,11 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($2) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.SkippedTokens) } | encaps_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE encaps_var { @@ -6693,9 +5725,7 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.SkippedTokens) } ; @@ -6710,9 +5740,7 @@ encaps_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '[' encaps_var_offset ']' { @@ -6726,10 +5754,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | T_VARIABLE T_OBJECT_OPERATOR T_STRING { @@ -6745,10 +5771,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES expr '}' { @@ -6760,10 +5784,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '}' { @@ -6777,10 +5799,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '[' expr ']' '}' { @@ -6794,22 +5814,18 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setToken(variable, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken(variable, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.SkippedTokens) } | T_CURLY_OPEN variable '}' { $$ = $2; // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -6822,9 +5838,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NUM_STRING { @@ -6839,9 +5853,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE { @@ -6853,9 +5865,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -6868,11 +5878,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Isset, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Isset, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $4.SkippedTokens) } | T_EMPTY '(' variable ')' { @@ -6884,11 +5892,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_EMPTY '(' expr ')' { @@ -6900,11 +5906,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_INCLUDE expr { @@ -6914,9 +5918,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_INCLUDE_ONCE expr { @@ -6926,9 +5928,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_EVAL '(' expr ')' { @@ -6940,11 +5940,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_REQUIRE expr { @@ -6954,9 +5952,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_REQUIRE_ONCE expr { @@ -6966,9 +5962,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -6976,17 +5970,13 @@ isset_variables: isset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | isset_variables ',' isset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -6994,14 +5984,10 @@ isset_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr_without_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -7017,10 +6003,8 @@ class_constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM T_STRING { @@ -7033,10 +6017,8 @@ class_constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -7052,10 +6034,8 @@ static_class_name_scalar: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -7071,10 +6051,8 @@ class_name_scalar: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; diff --git a/internal/php5/php5_bench_test.go b/internal/php5/php5_bench_test.go index 49efe90..3cbd675 100644 --- a/internal/php5/php5_bench_test.go +++ b/internal/php5/php5_bench_test.go @@ -414,7 +414,7 @@ CAD; ` for n := 0; n < b.N; n++ { - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() } diff --git a/internal/php5/php5_test.go b/internal/php5/php5_test.go index b1a1ada..051917b 100644 --- a/internal/php5/php5_test.go +++ b/internal/php5/php5_test.go @@ -22212,11 +22212,12 @@ func TestPhp5(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22351,11 +22352,12 @@ func TestPhp5Strings(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22579,11 +22581,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() actual := php5parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22606,7 +22609,7 @@ func TestPhp5ControlCharsErrors(t *testing.T) { parserErrors = append(parserErrors, e) } - lexer := scanner.NewLexer([]byte(src), "5.6", false, errorHandlerFunc) + lexer := scanner.NewLexer([]byte(src), "5.6", errorHandlerFunc) php5parser := php5.NewParser(lexer, errorHandlerFunc) php5parser.Parse() assert.DeepEqual(t, expected, parserErrors) diff --git a/internal/php7/parser.go b/internal/php7/parser.go index a910f9c..32f3fa5 100644 --- a/internal/php7/parser.go +++ b/internal/php7/parser.go @@ -12,7 +12,7 @@ import ( // Parser structure type Parser struct { Lexer *scanner.Lexer - currentToken *scanner.Token + currentToken *token.Token rootNode ast.Vertex errHandlerFunc func(*errors.Error) } @@ -39,8 +39,7 @@ func (p *Parser) Error(msg string) { return } - var pos = p.currentToken.Position - p.errHandlerFunc(errors.NewError(msg, &pos)) + p.errHandlerFunc(errors.NewError(msg, p.currentToken.Position)) } // Parse the php7 Parser entrypoint @@ -82,7 +81,7 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) { delete(src.GetNode().Tokens, token.Start) } -func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -98,7 +97,7 @@ func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []to } } -func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -108,14 +107,14 @@ func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, token *dstCollection = make(token.Collection) } - (*dstCollection)[pos] = make([]token.Token, 0) + (*dstCollection)[pos] = make([]*token.Token, 0) for _, v := range tokens { (*dstCollection)[pos] = append((*dstCollection)[pos], v) } } -func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) { +func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []*token.Token) { if len(tokens) == 0 { return } @@ -141,7 +140,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. } if semiColon[0].Value[0] == ';' { - p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{ + p.setFreeFloatingTokens(prevNode, token.SemiColon, []*token.Token{ { ID: token.ID(';'), Value: semiColon[0].Value[0:1], @@ -155,28 +154,18 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast. tlen = 3 } - phpCloseTag := []token.Token{} + phpCloseTag := []*token.Token{} if vlen-tlen > 1 { - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: token.T_WHITESPACE, Value: semiColon[0].Value[1 : vlen-tlen], }) } - phpCloseTag = append(phpCloseTag, token.Token{ + phpCloseTag = append(phpCloseTag, &token.Token{ ID: T_CLOSE_TAG, Value: semiColon[0].Value[vlen-tlen:], }) p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...)) } - -func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) { - for i := 1; i < len(yyDollar); i++ { - if yyDollar[i].token != nil { - p.Lexer.ReturnTokenToPool(yyDollar[i].token) - } - yyDollar[i].token = nil - } - yyVAL.token = nil -} diff --git a/internal/php7/parser_test.go b/internal/php7/parser_test.go index 9ef81b7..d909c98 100644 --- a/internal/php7/parser_test.go +++ b/internal/php7/parser_test.go @@ -60,11 +60,12 @@ func TestIdentifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -955,11 +956,12 @@ func TestPhp7ArgumentNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1809,11 +1811,12 @@ func TestPhp7ParameterNode(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1832,11 +1835,12 @@ func TestCommentEndFile(t *testing.T) { Stmts: []ast.Vertex{}, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1911,11 +1915,12 @@ func TestName(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -1988,11 +1993,12 @@ func TestFullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2065,11 +2071,12 @@ func TestRelative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2145,11 +2152,12 @@ func TestScalarEncapsed_SimpleVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2223,11 +2231,12 @@ func TestScalarEncapsed_SimpleVarOneChar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2312,11 +2321,12 @@ func TestScalarEncapsed_SimpleVarEndsEcapsed(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2411,11 +2421,12 @@ func TestScalarEncapsed_StringVarCurveOpen(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2521,11 +2532,12 @@ func TestScalarEncapsed_SimpleVarPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2599,11 +2611,12 @@ func TestScalarEncapsed_DollarOpenCurlyBraces(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2698,11 +2711,12 @@ func TestScalarEncapsed_DollarOpenCurlyBracesDimNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2807,11 +2821,12 @@ func TestScalarEncapsed_CurlyOpenMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2900,11 +2915,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -2993,11 +3009,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3054,11 +3071,12 @@ LBL; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3101,11 +3119,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3162,11 +3181,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3208,11 +3228,12 @@ func TestScalarMagicConstant(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3253,11 +3274,12 @@ func TestScalarNumber_LNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3298,11 +3320,12 @@ func TestScalarNumber_DNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3343,11 +3366,12 @@ func TestScalarNumber_Float(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3388,11 +3412,12 @@ func TestScalarNumber_BinaryLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3433,11 +3458,12 @@ func TestScalarNumber_BinaryDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3478,11 +3504,12 @@ func TestScalarNumber_HLNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3523,11 +3550,12 @@ func TestScalarNumber_HDNumber(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3568,11 +3596,12 @@ func TestScalarString_DoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3613,11 +3642,12 @@ func TestScalarString_DoubleQuotedScalarStringWithEscapedVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3660,11 +3690,12 @@ func TestScalarString_MultilineDoubleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3705,11 +3736,12 @@ func TestScalarString_SingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3752,11 +3784,12 @@ func TestScalarString_MultilineSingleQuotedScalarString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3823,11 +3856,12 @@ func TestStmtAltIf_AltIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -3937,11 +3971,12 @@ func TestStmtAltIf_AltElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4028,11 +4063,12 @@ func TestStmtAltIf_AltElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4207,11 +4243,12 @@ func TestStmtAltIf_AltElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4343,11 +4380,12 @@ func TestStmtClassConstList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4466,11 +4504,12 @@ func TestStmtClassConstList_WithoutModifiers(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4546,11 +4585,12 @@ func TestStmtClassMethod_SimpleClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4696,11 +4736,12 @@ func TestStmtClassMethod_PrivateProtectedClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4823,11 +4864,12 @@ func TestStmtClassMethod_Php7ClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -4939,11 +4981,12 @@ func TestStmtClassMethod_AbstractClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5067,11 +5110,12 @@ func TestStmtClassMethod_Php7AbstractClassMethod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5113,11 +5157,12 @@ func TestStmtClass_SimpleClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5172,11 +5217,12 @@ func TestStmtClass_AbstractClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5264,11 +5310,12 @@ func TestStmtClass_ClassExtends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5358,11 +5405,12 @@ func TestStmtClass_ClassImplement(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5475,11 +5523,12 @@ func TestStmtClass_ClassImplements(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5631,11 +5680,12 @@ func TestStmtClass_AnonimousClass(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5731,11 +5781,12 @@ func TestStmtConstList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5798,11 +5849,12 @@ func TestStmtContinue_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5876,11 +5928,12 @@ func TestStmtContinue_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -5954,11 +6007,12 @@ func TestStmtContinue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6032,11 +6086,12 @@ func TestStmtDeclare(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6143,11 +6198,12 @@ func TestStmtDeclare_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6223,11 +6279,12 @@ func TestStmtDeclare_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6279,11 +6336,12 @@ func TestStmtDo(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6347,11 +6405,12 @@ func TestStmtEcho(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6404,11 +6463,12 @@ func TestStmtEcho_Parenthesis(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6449,11 +6509,12 @@ func TestStmtExpression(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6646,11 +6707,12 @@ func TestStmtFor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6768,11 +6830,12 @@ func TestStmtFor_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6855,11 +6918,12 @@ func TestStmtForeach(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -6932,11 +6996,12 @@ func TestStmtForeach_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7019,11 +7084,12 @@ func TestStmtForeach_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7127,11 +7193,12 @@ func TestStmtForeach_WithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7225,11 +7292,12 @@ func TestStmtForeach_ExprWithKey(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7343,11 +7411,12 @@ func TestStmtForeach_WithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7473,11 +7542,12 @@ func TestStmtForeach_WithList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7520,11 +7590,12 @@ func TestStmtFunction(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7578,11 +7649,12 @@ func TestStmtFunction_Return(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7743,11 +7815,12 @@ func TestStmtFunction_ReturnVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7812,11 +7885,12 @@ func TestStmtFunction_Ref(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7882,11 +7956,12 @@ func TestStmtFunction_ReturnType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -7939,11 +8014,12 @@ func TestStmtGlobal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8101,11 +8177,12 @@ func TestStmtGlobal_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8167,11 +8244,12 @@ func TestStmtGotoLabel(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8201,11 +8279,12 @@ func TestStmtHaltCompiler(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8267,11 +8346,12 @@ func TestStmtIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8377,11 +8457,12 @@ func TestStmtIf_ElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8464,11 +8545,12 @@ func TestStmtIf_Else(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8637,11 +8719,12 @@ func TestStmtIf_ElseElseIf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8820,11 +8903,12 @@ func TestStmtIf_ElseIfElseIfElse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8865,11 +8949,12 @@ func TestStmtInlineHtml(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8911,11 +8996,12 @@ func TestStmtInterface(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -8992,11 +9078,12 @@ func TestStmtInterface_Extend(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9096,11 +9183,12 @@ func TestStmtInterface_Extends(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9153,11 +9241,12 @@ func TestStmtNamespace(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9211,11 +9300,12 @@ func TestStmtNamespace_Stmts(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9246,11 +9336,12 @@ func TestStmtNamespace_Anonymous(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9349,11 +9440,12 @@ func TestStmtProperty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9505,11 +9597,12 @@ func TestStmtProperty_Properties(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9661,11 +9754,12 @@ func TestStmtProperty_Properties2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9787,11 +9881,12 @@ func TestStmtProperty_PropertyType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9854,11 +9949,12 @@ func TestStmtStaticVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -9963,11 +10059,12 @@ func TestStmtStaticVar_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10072,11 +10169,12 @@ func TestStmtStaticVar_Vars2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10200,11 +10298,12 @@ func TestStmtSwitch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10328,11 +10427,12 @@ func TestStmtSwitch_Semicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10446,11 +10546,12 @@ func TestStmtSwitch_Alt(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10552,11 +10653,12 @@ func TestStmtSwitch_AltSemicolon(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10607,11 +10709,12 @@ func TestStmtThrow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10653,11 +10756,12 @@ func TestStmtTrait(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10745,11 +10849,12 @@ func TestStmtTraitUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10860,11 +10965,12 @@ func TestStmtTraitUse_Uses(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -10975,11 +11081,12 @@ func TestStmtTraitUse_EmptyAdaptations(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11134,11 +11241,12 @@ func TestStmtTraitUse_Modifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11304,11 +11412,12 @@ func TestStmtTraitUse_AliasModifier(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11588,11 +11697,12 @@ func TestStmtTraitUse_Adaptions(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11626,11 +11736,12 @@ func TestStmtTry_Try(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11722,11 +11833,12 @@ func TestStmtTry_TryCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11841,11 +11953,12 @@ func TestStmtTry_Php7TryCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -11994,11 +12107,12 @@ func TestStmtTry_TryCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12101,11 +12215,12 @@ func TestStmtTry_TryCatchFinally(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12309,11 +12424,12 @@ func TestStmtTry_TryCatchCatchCatch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12366,11 +12482,12 @@ func TestStmtUnset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12444,11 +12561,12 @@ func TestStmtUnset_Vars(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12522,11 +12640,12 @@ func TestStmtUnset_TrailingComma(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12601,11 +12720,12 @@ func TestStmtUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12680,11 +12800,12 @@ func TestStmtUse_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12770,11 +12891,12 @@ func TestStmtUse_FullyQualifiedAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -12882,11 +13004,12 @@ func TestStmtUse_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13005,11 +13128,12 @@ func TestStmtUse_ListAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13138,11 +13262,12 @@ func TestStmtUse_ListFunctionType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13293,11 +13418,12 @@ func TestStmtUse_ListFunctionTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13427,11 +13553,12 @@ func TestStmtUse_ListConstType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13582,11 +13709,12 @@ func TestStmtUse_ListConstTypeAliases(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13727,11 +13855,12 @@ func TestStmtUse_GroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -13883,11 +14012,12 @@ func TestStmtUse_GroupUseAlias(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14049,11 +14179,12 @@ func TestStmtUse_FunctionGroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14215,11 +14346,12 @@ func TestStmtUse_ConstGroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14402,11 +14534,12 @@ func TestStmtUse_MixedGroupUse(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14469,11 +14602,12 @@ func TestStmtBreak_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14547,11 +14681,12 @@ func TestStmtBreak_Light(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14625,11 +14760,12 @@ func TestStmtBreak(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14703,11 +14839,12 @@ func TestExprArrayDimFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14800,11 +14937,12 @@ func TestExprArrayDimFetch_Nested(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14845,11 +14983,12 @@ func TestExprArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -14912,11 +15051,12 @@ func TestExprArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15032,11 +15172,12 @@ func TestExprArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15110,11 +15251,12 @@ func TestExprArray_ItemUnpack(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15177,11 +15319,12 @@ func TestExprArrowFunction(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15267,11 +15410,12 @@ func TestExprArrowFunction_ReturnType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15332,11 +15476,12 @@ func TestExprBitwiseNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15397,11 +15542,12 @@ func TestExprBooleanNot(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15475,11 +15621,12 @@ func TestExprClassConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15541,11 +15688,12 @@ func TestExprClassConstFetch_Static(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15606,11 +15754,12 @@ func TestExprClone_Brackets(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15671,11 +15820,12 @@ func TestExprClone(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15718,11 +15868,12 @@ func TestExprClosure(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -15893,11 +16044,12 @@ func TestExprClosure_Use(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16068,11 +16220,12 @@ func TestExprClosure_Use2(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16138,11 +16291,12 @@ func TestExprClosure_ReturnType(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16205,11 +16359,12 @@ func TestExprConstFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16272,11 +16427,12 @@ func TestExprConstFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16339,11 +16495,12 @@ func TestExprConstFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16404,11 +16561,12 @@ func TestExprEmpty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16469,11 +16627,12 @@ func TestExprErrorSuppress(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16534,11 +16693,12 @@ func TestExprEval(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16579,11 +16739,12 @@ func TestExprExit(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16624,11 +16785,12 @@ func TestExprExit_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16690,11 +16852,12 @@ func TestExprExit_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16735,11 +16898,12 @@ func TestExprDie(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16780,11 +16944,12 @@ func TestExprDie_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16846,11 +17011,12 @@ func TestExprDie_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -16923,11 +17089,12 @@ func TestExprFunctionCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17000,11 +17167,12 @@ func TestExprFunctionCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17102,11 +17270,12 @@ func TestExprFunctionCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17222,11 +17391,12 @@ func TestExprFunctionCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17355,11 +17525,12 @@ func TestExprFunctionCall_ExprArg(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17420,11 +17591,12 @@ func TestExprPostDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17485,11 +17657,12 @@ func TestExprPostInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17550,11 +17723,12 @@ func TestExprPreDec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17615,11 +17789,12 @@ func TestExprPreInc(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17680,11 +17855,12 @@ func TestExprInclude(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17745,11 +17921,12 @@ func TestExprInclude_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17810,11 +17987,12 @@ func TestExprRequire(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17875,11 +18053,12 @@ func TestExprRequire_Once(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -17963,11 +18142,12 @@ func TestExprInstanceOf(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18051,11 +18231,12 @@ func TestExprInstanceOf_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18139,11 +18320,12 @@ func TestExprInstanceOf_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18206,11 +18388,12 @@ func TestExprIsset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18294,11 +18477,12 @@ func TestExprIsset_Variables(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18370,11 +18554,12 @@ func TestExprList_Empty(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18478,11 +18663,12 @@ func TestExprList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18596,11 +18782,12 @@ func TestExprList_ArrayIndex(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18726,11 +18913,12 @@ func TestExprList_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18835,11 +19023,12 @@ func TestExprList_EmptyItem(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -18946,11 +19135,12 @@ func TestExprList_EmptyItems(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19032,11 +19222,12 @@ func TestExprMethodCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19099,11 +19290,12 @@ func TestExprNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19176,11 +19368,12 @@ func TestExprNew_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19253,11 +19446,12 @@ func TestExprNew_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19386,11 +19580,12 @@ func TestExprNew_Anonymous(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19451,11 +19646,12 @@ func TestExprPrint(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19527,11 +19723,12 @@ func TestExprPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19646,11 +19843,12 @@ func TestExprReference_ForeachWithRef(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19724,11 +19922,12 @@ func TestExprShellExec(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19769,11 +19968,12 @@ func TestExprShortArray(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19836,11 +20036,12 @@ func TestExprShortArray_Item(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19956,11 +20157,12 @@ func TestExprShortArray_Items(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20064,11 +20266,12 @@ func TestExprShortList(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20182,11 +20385,12 @@ func TestExprShortList_ArrayIndex(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20312,11 +20516,12 @@ func TestExprShortList_List(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20400,11 +20605,12 @@ func TestExprStaticCall(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20488,11 +20694,12 @@ func TestExprStaticCall_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20576,11 +20783,12 @@ func TestExprStaticCall_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20674,11 +20882,12 @@ func TestExprStaticCall_Var(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20770,11 +20979,12 @@ func TestExprStaticCall_VarVar(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20858,11 +21068,12 @@ func TestExprStaticPropertyFetch(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -20946,11 +21157,12 @@ func TestExprStaticPropertyFetch_Relative(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21034,11 +21246,12 @@ func TestExprStaticPropertyFetch_FullyQualified(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21141,11 +21354,12 @@ func TestExprTernary(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21227,11 +21441,12 @@ func TestExprTernary_Simple(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21386,11 +21601,12 @@ func TestExprTernary_NestedTrue(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21545,11 +21761,12 @@ func TestExprTernary_NestedCond(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21610,11 +21827,12 @@ func TestExprUnaryMinus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21675,11 +21893,12 @@ func TestExprUnaryPlus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21730,11 +21949,12 @@ func TestExprVariable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21795,11 +22015,12 @@ func TestExprVariable_Variable(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21839,11 +22060,12 @@ func TestExprYield(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21904,11 +22126,12 @@ func TestExprYield_Val(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -21990,11 +22213,12 @@ func TestExprYield_KeyVal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22045,11 +22269,12 @@ func TestExprYield_Expr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22121,11 +22346,12 @@ func TestExprYield_KeyExpr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22186,11 +22412,12 @@ func TestExprYieldFrom(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22274,11 +22501,12 @@ func TestExprAssign_Assign(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22360,11 +22588,12 @@ func TestExprAssign_Reference(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22458,11 +22687,12 @@ func TestExprAssign_ReferenceNew(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22601,11 +22831,12 @@ func TestExprAssign_ReferenceArgs(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22687,11 +22918,12 @@ func TestExprAssign_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22773,11 +23005,12 @@ func TestExprAssign_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22859,11 +23092,12 @@ func TestExprAssign_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -22945,11 +23179,12 @@ func TestExprAssign_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23031,11 +23266,12 @@ func TestExprAssign_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23117,11 +23353,12 @@ func TestExprAssign_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23203,11 +23440,12 @@ func TestExprAssign_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23289,11 +23527,12 @@ func TestExprAssign_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23375,11 +23614,12 @@ func TestExprAssign_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23461,11 +23701,12 @@ func TestExprAssign_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23547,11 +23788,12 @@ func TestExprAssign_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23633,11 +23875,12 @@ func TestExprAssign_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23719,11 +23962,12 @@ func TestExprAssign_Coalesce(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23807,11 +24051,12 @@ func TestExprBinary_BitwiseAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23893,11 +24138,12 @@ func TestExprBinary_BitwiseOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -23979,11 +24225,12 @@ func TestExprBinary_BitwiseXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24065,11 +24312,12 @@ func TestExprBinary_BooleanAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24151,11 +24399,12 @@ func TestExprBinary_BooleanOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24237,11 +24486,12 @@ func TestExprBinary_Coalesce(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24323,11 +24573,12 @@ func TestExprBinary_Concat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24409,11 +24660,12 @@ func TestExprBinary_Div(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24495,11 +24747,12 @@ func TestExprBinary_Equal(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24581,11 +24834,12 @@ func TestExprBinary_GreaterOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24667,11 +24921,12 @@ func TestExprBinary_Greater(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24753,11 +25008,12 @@ func TestExprBinary_Identical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24839,11 +25095,12 @@ func TestExprBinary_LogicalAnd(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -24925,11 +25182,12 @@ func TestExprBinary_LogicalOr(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25011,11 +25269,12 @@ func TestExprBinary_LogicalXor(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25097,11 +25356,12 @@ func TestExprBinary_Minus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25183,11 +25443,12 @@ func TestExprBinary_Mod(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25269,11 +25530,12 @@ func TestExprBinary_Mul(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25355,11 +25617,12 @@ func TestExprBinary_NotEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25441,11 +25704,12 @@ func TestExprBinary_NotIdentical(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25527,11 +25791,12 @@ func TestExprBinary_Plus(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25613,11 +25878,12 @@ func TestExprBinary_Pow(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25699,11 +25965,12 @@ func TestExprBinary_ShiftLeft(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25785,11 +26052,12 @@ func TestExprBinary_ShiftRight(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25871,11 +26139,12 @@ func TestExprBinary_SmallerOrEqual(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -25957,11 +26226,12 @@ func TestExprBinary_Smaller(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26043,11 +26313,12 @@ func TestExprBinary_Spaceship(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26110,11 +26381,12 @@ func TestExprCast_Array(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26175,11 +26447,12 @@ func TestExprCast_Bool(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26240,11 +26513,12 @@ func TestExprCast_BoolShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26305,11 +26579,12 @@ func TestExprCast_Double(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26370,11 +26645,12 @@ func TestExprCast_CastFloat(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26435,11 +26711,12 @@ func TestExprCast_Int(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26500,11 +26777,12 @@ func TestExprCast_IntShort(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26565,11 +26843,12 @@ func TestExprCast_Object(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26630,11 +26909,12 @@ func TestExprCast_String(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26695,11 +26975,12 @@ func TestExprCast_BinaryString(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -26760,10 +27041,11 @@ func TestExprCast_Unset(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } diff --git a/internal/php7/php7.go b/internal/php7/php7.go index 89954b6..7ed02ea 100644 Binary files a/internal/php7/php7.go and b/internal/php7/php7.go differ diff --git a/internal/php7/php7.y b/internal/php7/php7.y index d43148c..23503c6 100644 --- a/internal/php7/php7.y +++ b/internal/php7/php7.y @@ -5,17 +5,17 @@ import ( "bytes" "strconv" - "github.com/z7zmey/php-parser/internal/position" - "github.com/z7zmey/php-parser/internal/scanner" - "github.com/z7zmey/php-parser/pkg/ast" - "github.com/z7zmey/php-parser/pkg/token" + "github.com/z7zmey/php-parser/internal/position" + "github.com/z7zmey/php-parser/pkg/ast" + "github.com/z7zmey/php-parser/pkg/token" ) %} %union{ node ast.Vertex - token *scanner.Token + token *token.Token + tkn *token.Token list []ast.Vertex ClassExtends *ast.StmtClassExtends @@ -24,7 +24,6 @@ import ( ClosureUse *ast.ExprClosureUse } -%type $unk %token T_INCLUDE %token T_INCLUDE_ONCE %token T_EXIT @@ -297,9 +296,7 @@ start: // save position yylex.(*Parser).rootNode.GetNode().Position = position.NewNodeListPosition($1) - yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(yylex.(*Parser).rootNode, token.End, yylex.(*Parser).currentToken.SkippedTokens) } ; @@ -343,14 +340,10 @@ top_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -364,9 +357,7 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $1.SkippedTokens) } | namespace_name T_NS_SEPARATOR T_STRING { @@ -377,10 +368,8 @@ namespace_name: namePart.GetNode().Position = position.NewTokenPosition($3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(namePart, token.Start, $3.SkippedTokens) } ; @@ -391,8 +380,6 @@ name: // save position $$.GetNode().Position = position.NewNodeListPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NAMESPACE T_NS_SEPARATOR namespace_name { @@ -402,10 +389,8 @@ name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name { @@ -415,9 +400,7 @@ name: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -426,38 +409,26 @@ top_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | interface_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -467,10 +438,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } | T_NAMESPACE namespace_name ';' { @@ -482,12 +451,10 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) yylex.(*Parser).MoveFreeFloating($2[0], name) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_NAMESPACE namespace_name '{' top_statement_list '}' { @@ -499,12 +466,10 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) yylex.(*Parser).MoveFreeFloating($2[0], name) - yylex.(*Parser).setFreeFloating(name, token.End, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(name, token.End, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $5.SkippedTokens) } | T_NAMESPACE '{' top_statement_list '}' { @@ -514,11 +479,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Namespace, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_USE mixed_group_use_declaration ';' { @@ -528,10 +491,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_USE use_type group_use_declaration ';' { @@ -543,10 +504,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | T_USE use_declarations ';' { @@ -558,10 +517,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_USE use_type use_declarations ';' { @@ -575,10 +532,8 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } | T_CONST const_list ';' { @@ -588,11 +543,9 @@ top_statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } ; @@ -605,9 +558,7 @@ use_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CONST { @@ -617,9 +568,7 @@ use_type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -641,13 +590,11 @@ group_use_declaration: // save comments if $5 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_NS_SEPARATOR '{' unprefixed_use_declarations possible_comma '}' { @@ -667,15 +614,13 @@ group_use_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.SkippedTokens) if $6 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.SkippedTokens) } ; @@ -697,13 +642,11 @@ mixed_group_use_declaration: // save comments if $5 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $5.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $2.SkippedTokens) } | T_NS_SEPARATOR namespace_name T_NS_SEPARATOR '{' inline_use_declarations possible_comma '}' { @@ -723,15 +666,13 @@ mixed_group_use_declaration: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating(prefixNsSeparator, token.Start, $1.SkippedTokens) if $6 != nil { - yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.Tokens) + yylex.(*Parser).setFreeFloatingTokens(useList, token.End, $6.SkippedTokens) } - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.Tokens) - yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(useListBrackets, token.End, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating(useListNsSeparator, token.Start, $3.SkippedTokens) } ; @@ -752,15 +693,11 @@ inline_use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | inline_use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -770,15 +707,11 @@ unprefixed_use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | unprefixed_use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -788,15 +721,11 @@ use_declarations: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | use_declaration { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -804,8 +733,6 @@ inline_use_declaration: unprefixed_use_declaration { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | use_type unprefixed_use_declaration { @@ -813,8 +740,6 @@ inline_use_declaration: // save position $$.GetNode().Position = position.NewNodesPosition($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -827,8 +752,6 @@ unprefixed_use_declaration: // save position name.GetNode().Position = position.NewNodeListPosition($1) $$.GetNode().Position = position.NewNodePosition(name) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | namespace_name T_AS T_STRING { @@ -844,10 +767,8 @@ unprefixed_use_declaration: $$.GetNode().Position = position.NewNodeListTokenPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(asAlias, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } ; @@ -855,8 +776,6 @@ use_declaration: unprefixed_use_declaration { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_NS_SEPARATOR unprefixed_use_declaration { @@ -867,9 +786,7 @@ use_declaration: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -879,15 +796,11 @@ const_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | const_decl { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -902,14 +815,10 @@ inner_statement_list: if $2 != nil { $$ = append($1, $2) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -918,38 +827,26 @@ inner_statement: { // error $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | function_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | interface_declaration_statement { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_HALT_COMPILER '(' ')' ';' { @@ -959,10 +856,8 @@ inner_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.Tokens, append($3.Tokens, $4.Tokens...)...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($2.SkippedTokens, append($3.SkippedTokens, $4.SkippedTokens...)...)) } statement: @@ -974,22 +869,16 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } | if_stmt { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | alt_if_stmt { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_WHILE '(' expr ')' while_statement { @@ -1009,11 +898,9 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_DO statement T_WHILE '(' expr ')' ';' { @@ -1025,13 +912,11 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(exprBrackets, token.End, append($6.Tokens, $7.Tokens...)) - yylex.(*Parser).setToken($$, token.SemiColon, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(exprBrackets, token.End, append($6.SkippedTokens, $7.SkippedTokens...)) + yylex.(*Parser).setToken($$, token.SemiColon, $7.SkippedTokens) } | T_FOR '(' for_exprs ';' for_exprs ';' for_exprs ')' for_statement { @@ -1052,13 +937,11 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.For, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.For, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.InitExpr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.CondExpr, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.IncExpr, $8.SkippedTokens) } | T_SWITCH '(' expr ')' switch_case_list { @@ -1080,11 +963,9 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_BREAK optional_expr ';' { @@ -1094,11 +975,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_CONTINUE optional_expr ';' { @@ -1108,11 +987,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_RETURN optional_expr ';' { @@ -1122,11 +999,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_GLOBAL global_var_list ';' { @@ -1136,11 +1011,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_STATIC static_var_list ';' { @@ -1150,11 +1023,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.VarList, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_ECHO echo_expr_list ';' { @@ -1164,12 +1035,10 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Echo, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Echo, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_INLINE_HTML { @@ -1179,9 +1048,7 @@ statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr ';' { @@ -1192,10 +1059,8 @@ statement: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | T_UNSET '(' unset_variables possible_comma ')' ';' { @@ -1205,17 +1070,15 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Unset, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Unset, $2.SkippedTokens) if $4 != nil { - yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.Tokens, $5.Tokens...)) + yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.SkippedTokens, $5.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloating($$, token.VarList, $5.Tokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $5.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $6.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.CloseParenthesisToken, $6.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $6.SkippedTokens) } | T_FOREACH '(' expr T_AS foreach_variable ')' foreach_statement { @@ -1234,13 +1097,10 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $6.Tokens) - - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $6.SkippedTokens) } | T_FOREACH '(' expr T_AS variable T_DOUBLE_ARROW foreach_variable ')' foreach_statement { @@ -1261,13 +1121,11 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Key, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Foreach, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Key, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $8.SkippedTokens) } | T_DECLARE '(' const_list ')' declare_statement { @@ -1278,11 +1136,9 @@ statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Declare, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Declare, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.SkippedTokens) } | ';' { @@ -1292,10 +1148,8 @@ statement: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | T_TRY '{' inner_statement_list '}' catch_list finally_statement { @@ -1308,11 +1162,9 @@ statement: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Try, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Try, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } | T_THROW expr ';' { @@ -1322,11 +1174,9 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_GOTO T_STRING ';' { @@ -1338,12 +1188,10 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(label, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $3.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(label, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $3.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | T_STRING ':' { @@ -1355,18 +1203,14 @@ statement: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Label, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Label, $2.SkippedTokens) } catch_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | catch_list T_CATCH '(' catch_name_list T_VARIABLE ')' '{' inner_statement_list '}' { @@ -1381,31 +1225,25 @@ catch_list: catch.GetNode().Position = position.NewTokensPosition($2, $9) // save comments - yylex.(*Parser).setFreeFloating(catch, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Catch, $3.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $5.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Var, $6.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Cond, $7.Tokens) - yylex.(*Parser).setFreeFloating(catch, token.Stmts, $9.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(catch, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Catch, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Var, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Cond, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating(catch, token.Stmts, $9.SkippedTokens) } ; catch_name_list: name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | catch_name_list '|' name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1413,8 +1251,6 @@ finally_statement: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_FINALLY '{' inner_statement_list '}' { @@ -1424,11 +1260,9 @@ finally_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Finally, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Finally, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $4.SkippedTokens) } ; @@ -1436,17 +1270,13 @@ unset_variables: unset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | unset_variables ',' unset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -1454,8 +1284,6 @@ unset_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1471,24 +1299,22 @@ function_declaration_statement: // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 != nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParamList, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParamList, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.SkippedTokens) // normalize if $8 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.ReturnType]); delete($$.GetNode().Tokens, token.ReturnType) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1526,12 +1352,10 @@ class_declaration_statement: // save comments yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $7.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $7.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $9.SkippedTokens) } | T_CLASS T_STRING extends_from implements_list backup_doc_comment '{' class_statement_list '}' { @@ -1543,12 +1367,10 @@ class_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.SkippedTokens) } ; @@ -1556,14 +1378,10 @@ class_modifiers: class_modifier { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_modifiers class_modifier { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -1576,9 +1394,7 @@ class_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL { @@ -1588,9 +1404,7 @@ class_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1605,12 +1419,10 @@ trait_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $6.SkippedTokens) } ; @@ -1625,12 +1437,10 @@ interface_declaration_statement: $$.GetNode().Position = position.NewTokensPosition($1, $7) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(name, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(name, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $7.SkippedTokens) } ; @@ -1638,8 +1448,6 @@ extends_from: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS name { @@ -1649,9 +1457,7 @@ extends_from: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1659,8 +1465,6 @@ interface_extends_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EXTENDS name_list { @@ -1670,9 +1474,7 @@ interface_extends_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1680,8 +1482,6 @@ implements_list: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_IMPLEMENTS name_list { @@ -1691,9 +1491,7 @@ implements_list: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -1701,8 +1499,6 @@ foreach_variable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '&' variable { @@ -1712,9 +1508,7 @@ foreach_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LIST '(' array_pair_list ')' { @@ -1724,11 +1518,9 @@ foreach_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' array_pair_list ']' { @@ -1738,10 +1530,8 @@ foreach_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save commentsc - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } ; @@ -1752,8 +1542,6 @@ for_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOR ';' { @@ -1765,12 +1553,10 @@ for_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1781,8 +1567,6 @@ foreach_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDFOREACH ';' { @@ -1794,12 +1578,10 @@ foreach_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1810,8 +1592,6 @@ declare_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDDECLARE ';' { @@ -1823,12 +1603,10 @@ declare_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1843,10 +1621,8 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) } | '{' ';' case_list '}' { @@ -1858,11 +1634,9 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(caseList, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) } | ':' case_list T_ENDSWITCH ';' { @@ -1874,12 +1648,10 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | ':' ';' case_list T_ENDSWITCH ';' { @@ -1892,13 +1664,11 @@ switch_case_list: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.Tokens) - yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(caseList, token.CaseListStart, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(caseList, token.CaseListEnd, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $5.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $5.SkippedTokens) } ; @@ -1906,8 +1676,6 @@ case_list: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | case_list T_CASE expr case_separator inner_statement_list { @@ -1918,11 +1686,9 @@ case_list: _case.GetNode().Position = position.NewTokenNodeListPosition($2, $5) // save comments - yylex.(*Parser).setFreeFloating(_case, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_case, token.Expr, append($4.Tokens)) - yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_case, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_case, token.Expr, append($4.SkippedTokens)) + yylex.(*Parser).setToken(_case, token.CaseSeparator, $4.SkippedTokens) } | case_list T_DEFAULT case_separator inner_statement_list { @@ -1933,11 +1699,9 @@ case_list: _default.GetNode().Position = position.NewTokenNodeListPosition($2, $4) // save comments - yylex.(*Parser).setFreeFloating(_default, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloating(_default, token.Default, $3.Tokens) - yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_default, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(_default, token.Default, $3.SkippedTokens) + yylex.(*Parser).setToken(_default, token.CaseSeparator, $3.SkippedTokens) } ; @@ -1959,8 +1723,6 @@ while_statement: // save position $$.GetNode().Position = position.NewNodePosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' inner_statement_list T_ENDWHILE ';' { @@ -1972,12 +1734,10 @@ while_statement: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Cond, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AltEnd, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } ; @@ -1992,11 +1752,9 @@ if_stmt_without_else: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | if_stmt_without_else T_ELSEIF '(' expr ')' statement { @@ -2012,11 +1770,9 @@ if_stmt_without_else: $$.GetNode().Position = position.NewNodesPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.SkippedTokens) } ; @@ -2024,8 +1780,6 @@ if_stmt: if_stmt_without_else %prec T_NOELSE { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | if_stmt_without_else T_ELSE statement { @@ -2039,9 +1793,7 @@ if_stmt: $$.GetNode().Position = position.NewNodesPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating(_else, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_else, token.Start, $2.SkippedTokens) } ; @@ -2060,12 +1812,10 @@ alt_if_stmt_without_else: $$.GetNode().Position = position.NewTokenNodeListPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $5.SkippedTokens) } | alt_if_stmt_without_else T_ELSEIF '(' expr ')' ':' inner_statement_list { @@ -2084,12 +1834,10 @@ alt_if_stmt_without_else: _elseIf.GetNode().Position = position.NewTokenNodeListPosition($2, $7) // save comments - yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_elseIf, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $5.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $6.SkippedTokens) } ; @@ -2104,13 +1852,11 @@ alt_if_stmt: // save comments altif := $$.(*ast.StmtAltIf) if len(altif.ElseIf) > 0 { - yylex.(*Parser).setFreeFloating(altif.ElseIf[len(altif.ElseIf)-1], token.End, append($2.Tokens, $3.Tokens...)) + yylex.(*Parser).setFreeFloating(altif.ElseIf[len(altif.ElseIf)-1], token.End, append($2.SkippedTokens, $3.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloating(altif.Stmt, token.End, append($2.Tokens, $3.Tokens...)) + yylex.(*Parser).setFreeFloating(altif.Stmt, token.End, append($2.SkippedTokens, $3.SkippedTokens...)) } - yylex.(*Parser).setToken($$, token.SemiColon, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.SemiColon, $3.SkippedTokens) } | alt_if_stmt_without_else T_ELSE ':' inner_statement_list T_ENDIF ';' { @@ -2128,12 +1874,10 @@ alt_if_stmt: $$.GetNode().Position = position.NewNodeTokenPosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating(_else, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($5.Tokens, $6.Tokens...)) - yylex.(*Parser).setToken($$, token.SemiColon, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(_else, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(stmtsBrackets, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(stmtsBrackets, token.End, append($5.SkippedTokens, $6.SkippedTokens...)) + yylex.(*Parser).setToken($$, token.SemiColon, $6.SkippedTokens) } ; @@ -2141,14 +1885,10 @@ parameter_list: non_empty_parameter_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2156,17 +1896,13 @@ non_empty_parameter_list: parameter { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_parameter_list ',' parameter { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2179,18 +1915,18 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, nil} @@ -2204,8 +1940,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenPosition($4) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | optional_type is_reference is_variadic T_VARIABLE '=' expr { @@ -2215,19 +1949,19 @@ parameter: var variable ast.Vertex variable = &ast.ExprVariable{ast.Node{}, identifier} variable.GetNode().Position = position.NewTokenPosition($4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $4.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.End, $5.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.End, $5.SkippedTokens) if $3 != nil { variable = &ast.Variadic{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($3, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $3.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens) } if $2 != nil { variable = &ast.Reference{ast.Node{}, variable} variable.GetNode().Position = position.NewTokensPosition($2, $4) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } $$ = &ast.Parameter{ast.Node{}, $1, variable, $6} @@ -2241,8 +1975,6 @@ parameter: } else { $$.GetNode().Position = position.NewTokenNodePosition($4, $6) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2250,14 +1982,10 @@ optional_type: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | type_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2265,8 +1993,6 @@ type_expr: type { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '?' type { @@ -2276,9 +2002,7 @@ type_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2291,9 +2015,7 @@ type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CALLABLE { @@ -2303,15 +2025,11 @@ type: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2319,17 +2037,13 @@ return_type: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | ':' type_expr { $$ = $2; // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) } ; @@ -2342,10 +2056,8 @@ argument_list: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $2.SkippedTokens) } | '(' non_empty_argument_list possible_comma ')' { @@ -2355,14 +2067,12 @@ argument_list: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) if $3 != nil { - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($3.Tokens, $4.Tokens...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($3.SkippedTokens, $4.SkippedTokens...)) } else { - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.Tokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $4.SkippedTokens) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2370,17 +2080,13 @@ non_empty_argument_list: argument { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_argument_list ',' argument { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2394,8 +2100,6 @@ argument: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ELLIPSIS expr { @@ -2405,9 +2109,7 @@ argument: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2417,15 +2119,11 @@ global_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | global_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2433,8 +2131,6 @@ global_var: simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2444,15 +2140,11 @@ static_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | static_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2469,9 +2161,7 @@ static_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' expr { @@ -2485,10 +2175,8 @@ static_var: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -2496,14 +2184,10 @@ class_statement_list: class_statement_list class_statement { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2517,10 +2201,8 @@ class_statement: // save comments yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.PropertyList, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.PropertyList, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | method_modifiers T_CONST class_const_list ';' { @@ -2532,14 +2214,12 @@ class_statement: // save comments if len($1) > 0 { yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $2.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.ConstList, $4.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $4.SkippedTokens) } | T_USE name_list trait_adaptations { @@ -2549,9 +2229,7 @@ class_statement: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | method_modifiers T_FUNCTION returns_ref identifier backup_doc_comment '(' parameter_list ')' return_type method_body { @@ -2569,20 +2247,18 @@ class_statement: // save comments if len($1) > 0 { yylex.(*Parser).MoveFreeFloating($1[0], $$) - yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ModifierList, $2.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Start, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $2.SkippedTokens) } if $3 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.Name, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $8.SkippedTokens) } ; @@ -2590,17 +2266,13 @@ name_list: name { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | name_list ',' name { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -2612,11 +2284,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' '}' { @@ -2625,10 +2294,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $2.SkippedTokens) } | '{' trait_adaptation_list '}' { @@ -2637,10 +2304,8 @@ trait_adaptations: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.AdaptationList, $3.SkippedTokens) } ; @@ -2648,14 +2313,10 @@ trait_adaptation_list: trait_adaptation { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | trait_adaptation_list trait_adaptation { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2665,20 +2326,16 @@ trait_adaptation: $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.NameList, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.NameList, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } | trait_alias ';' { $$ = $1; // save comments - yylex.(*Parser).setFreeFloating($$, token.Alias, $2.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Alias, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $2.SkippedTokens) } ; @@ -2692,9 +2349,7 @@ trait_precedence: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2710,10 +2365,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | trait_method_reference T_AS reserved_non_modifiers { @@ -2726,10 +2379,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $3.SkippedTokens) } | trait_method_reference T_AS member_modifier identifier { @@ -2742,10 +2393,8 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - yylex.(*Parser).setFreeFloating(alias, token.Start, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(alias, token.Start, $4.SkippedTokens) } | trait_method_reference T_AS member_modifier { @@ -2756,9 +2405,7 @@ trait_alias: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Ref, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Ref, $2.SkippedTokens) } ; @@ -2773,15 +2420,11 @@ trait_method_reference: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | absolute_trait_method_reference { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2797,10 +2440,8 @@ absolute_trait_method_reference: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $2.SkippedTokens) } ; @@ -2813,10 +2454,8 @@ method_body: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.SemiColon, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.SemiColon, $1.SkippedTokens) } | '{' inner_statement_list '}' { @@ -2826,10 +2465,8 @@ method_body: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $3.SkippedTokens) } ; @@ -2837,8 +2474,6 @@ variable_modifiers: non_empty_member_modifiers { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_VAR { @@ -2849,9 +2484,7 @@ variable_modifiers: modifier.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(modifier, token.Start, $1.SkippedTokens) } ; @@ -2859,14 +2492,10 @@ method_modifiers: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2874,14 +2503,10 @@ non_empty_member_modifiers: member_modifier { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_member_modifiers member_modifier { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2894,9 +2519,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PROTECTED { @@ -2906,9 +2529,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRIVATE { @@ -2918,9 +2539,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_STATIC { @@ -2930,9 +2549,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ABSTRACT { @@ -2942,9 +2559,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FINAL { @@ -2954,9 +2569,7 @@ member_modifier: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -2966,15 +2579,11 @@ property_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | property { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -2991,9 +2600,7 @@ property: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '=' expr backup_doc_comment { @@ -3007,10 +2614,8 @@ property: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -3020,15 +2625,11 @@ class_const_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | class_const_decl { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3043,10 +2644,8 @@ class_const_decl: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -3061,10 +2660,8 @@ const_decl: $$.GetNode().Position = position.NewTokenNodePosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -3074,15 +2671,11 @@ echo_expr_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | echo_expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3090,8 +2683,6 @@ echo_expr: expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3099,14 +2690,10 @@ for_exprs: /* empty */ { $$ = nil; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | non_empty_for_exprs { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3116,15 +2703,11 @@ non_empty_for_exprs: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | expr { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -3141,11 +2724,9 @@ anonymous_class: $$.GetNode().Position = position.NewTokensPosition($1, $8) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Name, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Name, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $8.SkippedTokens) } ; @@ -3161,9 +2742,7 @@ new_expr: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NEW anonymous_class { @@ -3173,9 +2752,7 @@ new_expr: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -3190,12 +2767,10 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $6) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $5.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $5.SkippedTokens) } | '[' array_pair_list ']' '=' expr { @@ -3207,11 +2782,9 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(shortList, token.ArrayPairList, $3.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Var, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(shortList, token.ArrayPairList, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Var, $4.SkippedTokens) } | variable '=' expr { @@ -3222,9 +2795,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable '=' '&' expr { @@ -3235,10 +2806,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Equal, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Equal, $3.SkippedTokens) } | T_CLONE expr { @@ -3248,9 +2817,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | variable T_PLUS_EQUAL expr { @@ -3261,9 +2828,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MINUS_EQUAL expr { @@ -3274,9 +2839,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MUL_EQUAL expr { @@ -3287,9 +2850,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_POW_EQUAL expr { @@ -3300,9 +2861,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_DIV_EQUAL expr { @@ -3313,9 +2872,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_CONCAT_EQUAL expr { @@ -3326,9 +2883,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_MOD_EQUAL expr { @@ -3339,9 +2894,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_AND_EQUAL expr { @@ -3352,9 +2905,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_OR_EQUAL expr { @@ -3365,9 +2916,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_XOR_EQUAL expr { @@ -3378,9 +2927,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SL_EQUAL expr { @@ -3391,9 +2938,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_SR_EQUAL expr { @@ -3404,9 +2949,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_COALESCE_EQUAL expr { @@ -3417,9 +2960,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | variable T_INC { @@ -3430,9 +2971,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_INC variable { @@ -3442,9 +2981,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | variable T_DEC { @@ -3455,9 +2992,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | T_DEC variable { @@ -3467,9 +3002,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_BOOLEAN_OR expr { @@ -3480,9 +3013,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_BOOLEAN_AND expr { @@ -3493,9 +3024,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_OR expr { @@ -3506,9 +3035,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_AND expr { @@ -3519,9 +3046,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_LOGICAL_XOR expr { @@ -3532,9 +3057,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '|' expr { @@ -3545,9 +3068,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '&' expr { @@ -3558,9 +3079,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '^' expr { @@ -3571,9 +3090,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '.' expr { @@ -3584,9 +3101,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '+' expr { @@ -3597,9 +3112,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '-' expr { @@ -3610,9 +3123,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '*' expr { @@ -3623,9 +3134,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_POW expr { @@ -3636,9 +3145,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '/' expr { @@ -3649,9 +3156,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '%' expr { @@ -3662,9 +3167,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SL expr { @@ -3675,9 +3178,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SR expr { @@ -3688,9 +3189,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '+' expr %prec T_INC { @@ -3700,9 +3199,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' expr %prec T_INC { @@ -3712,9 +3209,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '!' expr { @@ -3724,9 +3219,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '~' expr { @@ -3736,9 +3229,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_IS_IDENTICAL expr { @@ -3749,9 +3240,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_IDENTICAL expr { @@ -3762,9 +3251,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_EQUAL expr { @@ -3775,9 +3262,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_NOT_EQUAL expr { @@ -3788,10 +3273,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setToken($$, token.Equal, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setToken($$, token.Equal, $2.SkippedTokens) } | expr '<' expr { @@ -3802,9 +3285,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_SMALLER_OR_EQUAL expr { @@ -3815,9 +3296,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr '>' expr { @@ -3828,9 +3307,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_IS_GREATER_OR_EQUAL expr { @@ -3841,9 +3318,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_SPACESHIP expr { @@ -3854,9 +3329,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr T_INSTANCEOF class_name_reference { @@ -3867,9 +3340,7 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | '(' expr ')' { @@ -3879,16 +3350,12 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | new_expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr '?' expr ':' expr { @@ -3899,10 +3366,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $4.SkippedTokens) } | expr '?' ':' expr { @@ -3913,10 +3378,8 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Cond, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.True, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Cond, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.True, $3.SkippedTokens) } | expr T_COALESCE expr { @@ -3927,15 +3390,11 @@ expr_without_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | internal_functions_in_yacc { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_INT_CAST expr { @@ -3945,10 +3404,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_DOUBLE_CAST expr { @@ -3958,10 +3415,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_STRING_CAST expr { @@ -3971,10 +3426,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_ARRAY_CAST expr { @@ -3984,10 +3437,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_OBJECT_CAST expr { @@ -3997,10 +3448,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_BOOL_CAST expr { @@ -4010,10 +3459,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_UNSET_CAST expr { @@ -4023,10 +3470,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setToken($$, token.Cast, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setToken($$, token.Cast, $1.SkippedTokens) } | T_EXIT exit_expr { @@ -4044,9 +3489,7 @@ expr_without_variable: } // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '@' expr { @@ -4056,15 +3499,11 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '`' backticks_expr '`' { @@ -4074,9 +3513,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_PRINT expr { @@ -4086,9 +3523,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD { @@ -4098,9 +3533,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD expr { @@ -4110,9 +3543,7 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_YIELD expr T_DOUBLE_ARROW expr { @@ -4122,10 +3553,8 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Expr, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Expr, $3.SkippedTokens) } | T_YIELD_FROM expr { @@ -4135,15 +3564,11 @@ expr_without_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | inline_function { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_STATIC inline_function { @@ -4161,9 +3586,7 @@ expr_without_variable: // save comments yylex.(*Parser).setFreeFloatingTokens($$, token.Static, $$.GetNode().Tokens[token.Start]); delete($$.GetNode().Tokens, token.Start) - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens); - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens); } ; @@ -4176,16 +3599,16 @@ inline_function: $$.GetNode().Position = position.NewTokensPosition($1, $11) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $4.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $4.SkippedTokens) } - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $6.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ReturnType, $9.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Stmts, $11.SkippedTokens) // normalize if $8 == nil { @@ -4194,8 +3617,6 @@ inline_function: if $7 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.LexicalVarList]); delete($$.GetNode().Tokens, token.LexicalVarList) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_FN returns_ref '(' parameter_list ')' return_type backup_doc_comment T_DOUBLE_ARROW expr { @@ -4205,22 +3626,20 @@ inline_function: $$.GetNode().Position = position.NewTokenNodePosition($1, $9) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) if $2 == nil { - yylex.(*Parser).setFreeFloating($$, token.Function, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $3.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.Function, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Function, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Ampersand, $3.SkippedTokens) }; - yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ReturnType, $8.Tokens) + yylex.(*Parser).setFreeFloating($$, token.ParameterList, $5.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ReturnType, $8.SkippedTokens) // normalize if $6 == nil { yylex.(*Parser).setFreeFloatingTokens($$, token.Params, $$.GetNode().Tokens[token.ReturnType]); delete($$.GetNode().Tokens, token.ReturnType) }; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4243,8 +3662,6 @@ lexical_vars: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_USE '(' lexical_var_list ')' { @@ -4254,11 +3671,9 @@ lexical_vars: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Use, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Use, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.SkippedTokens) } ; @@ -4268,15 +3683,11 @@ lexical_var_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | lexical_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4291,9 +3702,7 @@ lexical_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '&' T_VARIABLE { @@ -4307,10 +3716,8 @@ lexical_var: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(variable, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens) } ; @@ -4324,8 +3731,6 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_name T_PAAMAYIM_NEKUDOTAYIM member_name argument_list { @@ -4336,9 +3741,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM member_name argument_list { @@ -4349,9 +3752,7 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | callable_expr argument_list { @@ -4362,8 +3763,6 @@ function_call: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4376,15 +3775,11 @@ class_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4392,14 +3787,10 @@ class_name_reference: class_name { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4407,8 +3798,6 @@ exit_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' optional_expr ')' { @@ -4418,10 +3807,8 @@ exit_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -4429,8 +3816,6 @@ backticks_expr: /* empty */ { $$ = []ast.Vertex{} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE { @@ -4439,14 +3824,10 @@ backticks_expr: // save position part.GetNode().Position = position.NewTokenPosition($1) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4454,14 +3835,10 @@ ctor_arguments: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | argument_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4474,11 +3851,9 @@ dereferencable_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Array, $2.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Array, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $4.SkippedTokens) } | '[' array_pair_list ']' { @@ -4488,10 +3863,8 @@ dereferencable_scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.ArrayPairList, $3.SkippedTokens) } | T_CONSTANT_ENCAPSED_STRING { @@ -4501,9 +3874,7 @@ dereferencable_scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4516,9 +3887,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DNUMBER { @@ -4528,9 +3897,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_LINE { @@ -4540,9 +3907,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FILE { @@ -4552,9 +3917,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_DIR { @@ -4564,9 +3927,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_TRAIT_C { @@ -4576,9 +3937,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_METHOD_C { @@ -4588,9 +3947,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_FUNC_C { @@ -4600,9 +3957,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NS_C { @@ -4612,9 +3967,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_CLASS_C { @@ -4624,9 +3977,7 @@ scalar: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_ENCAPSED_AND_WHITESPACE T_END_HEREDOC { @@ -4638,9 +3989,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC T_END_HEREDOC { @@ -4650,9 +3999,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '"' encaps_list '"' { @@ -4662,9 +4009,7 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_START_HEREDOC encaps_list T_END_HEREDOC { @@ -4674,21 +4019,15 @@ scalar: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | dereferencable_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | constant { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4702,8 +4041,6 @@ constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | class_name T_PAAMAYIM_NEKUDOTAYIM identifier { @@ -4716,10 +4053,8 @@ constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM identifier { @@ -4732,10 +4067,8 @@ constant: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - yylex.(*Parser).setFreeFloating(target, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(target, token.Start, $3.SkippedTokens) } ; @@ -4743,14 +4076,10 @@ expr: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr_without_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4758,14 +4087,10 @@ optional_expr: /* empty */ { $$ = nil - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4773,8 +4098,6 @@ variable_class_name: dereferencable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4782,8 +4105,6 @@ dereferencable: variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' expr ')' { @@ -4793,16 +4114,12 @@ dereferencable: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | dereferencable_scalar { $$ = $1; - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4810,8 +4127,6 @@ callable_expr: callable_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | '(' expr ')' { @@ -4821,16 +4136,12 @@ callable_expr: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | dereferencable_scalar { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4838,8 +4149,6 @@ callable_variable: simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | dereferencable '[' optional_expr ']' { @@ -4849,10 +4158,8 @@ callable_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | constant '[' optional_expr ']' { @@ -4862,10 +4169,8 @@ callable_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | dereferencable '{' expr '}' { @@ -4875,10 +4180,8 @@ callable_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | dereferencable T_OBJECT_OPERATOR property_name argument_list { @@ -4889,15 +4192,11 @@ callable_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | function_call { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -4905,14 +4204,10 @@ variable: callable_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | static_member { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | dereferencable T_OBJECT_OPERATOR property_name { @@ -4923,9 +4218,7 @@ variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -4940,9 +4233,7 @@ simple_variable: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '$' '{' expr '}' { @@ -4952,11 +4243,9 @@ simple_variable: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.Tokens, $3.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($3, token.Start, append($2.SkippedTokens, $3.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($3, token.End, append($3.GetNode().Tokens[token.End], $4.SkippedTokens...)) } | '$' simple_variable { @@ -4966,9 +4255,7 @@ simple_variable: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -4982,9 +4269,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } | variable_class_name T_PAAMAYIM_NEKUDOTAYIM simple_variable { @@ -4995,9 +4280,7 @@ static_member: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Name, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Name, $2.SkippedTokens) } ; @@ -5005,8 +4288,6 @@ new_variable: simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | new_variable '[' optional_expr ']' { @@ -5016,10 +4297,8 @@ new_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | new_variable '{' expr '}' { @@ -5029,10 +4308,8 @@ new_variable: $$.GetNode().Position = position.NewNodeTokenPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | new_variable T_OBJECT_OPERATOR property_name { @@ -5043,9 +4320,7 @@ new_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | class_name T_PAAMAYIM_NEKUDOTAYIM simple_variable { @@ -5056,9 +4331,7 @@ new_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } | new_variable T_PAAMAYIM_NEKUDOTAYIM simple_variable { @@ -5069,9 +4342,7 @@ new_variable: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) } ; @@ -5084,25 +4355,19 @@ member_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '{' expr '}' { $$ = $2; // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } | simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5115,25 +4380,19 @@ property_name: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '{' expr '}' { $$ = $2; // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.Tokens, $$.GetNode().Tokens[token.Start]...)) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.Tokens...)) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Start, append($1.SkippedTokens, $$.GetNode().Tokens[token.Start]...)) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, append($$.GetNode().Tokens[token.End], $3.SkippedTokens...)) } | simple_variable { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5141,8 +4400,6 @@ array_pair_list: non_empty_array_pair_list { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5150,14 +4407,10 @@ possible_array_pair: /* empty */ { $$ = &ast.ExprArrayItem{ast.Node{}, false, nil, nil} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | array_pair { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5171,9 +4424,7 @@ non_empty_array_pair_list: $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } | possible_array_pair { @@ -5182,8 +4433,6 @@ non_empty_array_pair_list: } else { $$ = []ast.Vertex{$1} } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; @@ -5197,9 +4446,7 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) } | expr { @@ -5210,8 +4457,6 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | expr T_DOUBLE_ARROW '&' variable { @@ -5224,10 +4469,8 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setFreeFloating(reference, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(reference, token.Start, $3.SkippedTokens) } | '&' variable { @@ -5239,9 +4482,7 @@ array_pair: reference.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_ELLIPSIS expr { @@ -5251,9 +4492,7 @@ array_pair: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | expr T_DOUBLE_ARROW T_LIST '(' array_pair_list ')' { @@ -5267,12 +4506,10 @@ array_pair: // save comments yylex.(*Parser).MoveFreeFloating($1, $$) - yylex.(*Parser).setFreeFloating($$, token.Expr, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.Start, $3.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $4.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Expr, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.Start, $3.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $4.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $6.SkippedTokens) } | T_LIST '(' array_pair_list ')' { @@ -5285,11 +4522,9 @@ array_pair: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.List, $2.Tokens) - yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.List, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(listNode, token.ArrayPairList, $4.SkippedTokens) } ; @@ -5297,8 +4532,6 @@ encaps_list: encaps_list encaps_var { $$ = append($1, $2) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | encaps_list T_ENCAPSED_AND_WHITESPACE { @@ -5309,15 +4542,11 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($2) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $2.SkippedTokens) } | encaps_var { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_ENCAPSED_AND_WHITESPACE encaps_var { @@ -5328,9 +4557,7 @@ encaps_list: encapsed.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(encapsed, token.Start, $1.SkippedTokens) } ; @@ -5345,9 +4572,7 @@ encaps_var: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE '[' encaps_var_offset ']' { @@ -5361,10 +4586,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $4.SkippedTokens) } | T_VARIABLE T_OBJECT_OPERATOR T_STRING { @@ -5380,10 +4603,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setFreeFloating($$, token.Var, $2.Tokens) - yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Var, $2.SkippedTokens) + yylex.(*Parser).setFreeFloating(fetch, token.Start, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES expr '}' { @@ -5395,10 +4616,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '}' { @@ -5412,10 +4631,8 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $3) // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } | T_DOLLAR_OPEN_CURLY_BRACES T_STRING_VARNAME '[' expr ']' '}' { @@ -5429,22 +4646,18 @@ encaps_var: $$.GetNode().Position = position.NewTokensPosition($1, $6) // save comments - yylex.(*Parser).setToken(variable, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken(variable, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Var, $3.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.Expr, $5.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $6.SkippedTokens) } | T_CURLY_OPEN variable '}' { $$ = $2; // save comments - yylex.(*Parser).setToken($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setToken($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens($$, token.End, $3.SkippedTokens) } ; @@ -5457,9 +4670,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_NUM_STRING { @@ -5474,9 +4685,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | '-' T_NUM_STRING { @@ -5500,9 +4709,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokensPosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_VARIABLE { @@ -5514,9 +4721,7 @@ encaps_var_offset: $$.GetNode().Position = position.NewTokenPosition($1) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5529,15 +4734,13 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $5) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloating($$, token.Isset, $2.Tokens) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloating($$, token.Isset, $2.SkippedTokens) if $4 == nil { - yylex.(*Parser).setFreeFloating($$, token.VarList, $5.Tokens) + yylex.(*Parser).setFreeFloating($$, token.VarList, $5.SkippedTokens) } else { - yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.Tokens, $5.Tokens...)) + yylex.(*Parser).setFreeFloating($$, token.VarList, append($4.SkippedTokens, $5.SkippedTokens...)) } - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | T_EMPTY '(' expr ')' { @@ -5549,11 +4752,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_INCLUDE expr { @@ -5563,9 +4764,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_INCLUDE_ONCE expr { @@ -5575,9 +4774,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_EVAL '(' expr ')' { @@ -5589,11 +4786,9 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokensPosition($1, $4) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.Tokens) - yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens) + yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens) } | T_REQUIRE expr { @@ -5603,9 +4798,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } | T_REQUIRE_ONCE expr { @@ -5615,9 +4808,7 @@ internal_functions_in_yacc: $$.GetNode().Position = position.NewTokenNodePosition($1, $2) // save comments - yylex.(*Parser).setFreeFloating($$, token.Start, $1.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens) } ; @@ -5625,17 +4816,13 @@ isset_variables: isset_variable { $$ = []ast.Vertex{$1} - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } | isset_variables ',' isset_variable { $$ = append($1, $3) // save comments - yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.Tokens) - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) + yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens) } ; @@ -5643,8 +4830,6 @@ isset_variable: expr { $$ = $1 - - yylex.(*Parser).returnTokenToPool(yyDollar, &yyVAL) } ; diff --git a/internal/php7/php7_bench_test.go b/internal/php7/php7_bench_test.go index 558c54b..d5f2636 100644 --- a/internal/php7/php7_bench_test.go +++ b/internal/php7/php7_bench_test.go @@ -382,7 +382,7 @@ CAD; ` for n := 0; n < b.N; n++ { - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() } diff --git a/internal/php7/php7_test.go b/internal/php7/php7_test.go index 68364f5..8d52038 100644 --- a/internal/php7/php7_test.go +++ b/internal/php7/php7_test.go @@ -19595,11 +19595,12 @@ func TestPhp7(t *testing.T) { }, } - lexer := scanner.NewLexer(src, "7.4", false, nil) + lexer := scanner.NewLexer(src, "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19734,11 +19735,12 @@ func TestPhp5Strings(t *testing.T) { }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19962,11 +19964,12 @@ CAD; }, } - lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() actual := php7parser.GetRootNode() traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual) + traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual) assert.DeepEqual(t, expected, actual) } @@ -19989,7 +19992,7 @@ func TestPhp7ControlCharsErrors(t *testing.T) { parserErrors = append(parserErrors, e) } - lexer := scanner.NewLexer([]byte(src), "7.4", false, errorHandlerFunc) + lexer := scanner.NewLexer([]byte(src), "7.4", errorHandlerFunc) php7parser := php7.NewParser(lexer, errorHandlerFunc) php7parser.Parse() assert.DeepEqual(t, expected, parserErrors) diff --git a/internal/position/position.go b/internal/position/position.go index cbcc309..2603826 100644 --- a/internal/position/position.go +++ b/internal/position/position.go @@ -1,9 +1,9 @@ package position import ( - "github.com/z7zmey/php-parser/internal/scanner" "github.com/z7zmey/php-parser/pkg/ast" "github.com/z7zmey/php-parser/pkg/position" + "github.com/z7zmey/php-parser/pkg/token" ) type startPos struct { @@ -95,7 +95,7 @@ func NewNodePosition(n ast.Vertex) *position.Position { } // NewTokenPosition returns new Position -func NewTokenPosition(t *scanner.Token) *position.Position { +func NewTokenPosition(t *token.Token) *position.Position { return &position.Position{ StartLine: t.Position.StartLine, EndLine: t.Position.EndLine, @@ -105,7 +105,7 @@ func NewTokenPosition(t *scanner.Token) *position.Position { } // NewTokensPosition returns new Position -func NewTokensPosition(startToken *scanner.Token, endToken *scanner.Token) *position.Position { +func NewTokensPosition(startToken *token.Token, endToken *token.Token) *position.Position { return &position.Position{ StartLine: startToken.Position.StartLine, EndLine: endToken.Position.EndLine, @@ -115,7 +115,7 @@ func NewTokensPosition(startToken *scanner.Token, endToken *scanner.Token) *posi } // NewTokenNodePosition returns new Position -func NewTokenNodePosition(t *scanner.Token, n ast.Vertex) *position.Position { +func NewTokenNodePosition(t *token.Token, n ast.Vertex) *position.Position { return &position.Position{ StartLine: t.Position.StartLine, EndLine: getNodeEndPos(n).endLine, @@ -125,7 +125,7 @@ func NewTokenNodePosition(t *scanner.Token, n ast.Vertex) *position.Position { } // NewNodeTokenPosition returns new Position -func NewNodeTokenPosition(n ast.Vertex, t *scanner.Token) *position.Position { +func NewNodeTokenPosition(n ast.Vertex, t *token.Token) *position.Position { return &position.Position{ StartLine: getNodeStartPos(n).startLine, EndLine: t.Position.EndLine, @@ -145,7 +145,7 @@ func NewNodesPosition(startNode ast.Vertex, endNode ast.Vertex) *position.Positi } // NewNodeListTokenPosition returns new Position -func NewNodeListTokenPosition(list []ast.Vertex, t *scanner.Token) *position.Position { +func NewNodeListTokenPosition(list []ast.Vertex, t *token.Token) *position.Position { return &position.Position{ StartLine: getListStartPos(list).startLine, EndLine: t.Position.EndLine, @@ -155,7 +155,7 @@ func NewNodeListTokenPosition(list []ast.Vertex, t *scanner.Token) *position.Pos } // NewTokenNodeListPosition returns new Position -func NewTokenNodeListPosition(t *scanner.Token, list []ast.Vertex) *position.Position { +func NewTokenNodeListPosition(t *token.Token, list []ast.Vertex) *position.Position { return &position.Position{ StartLine: t.Position.StartLine, EndLine: getListEndPos(list).endLine, @@ -185,7 +185,7 @@ func NewNodeListNodePosition(list []ast.Vertex, n ast.Vertex) *position.Position } // NewOptionalListTokensPosition returns new Position -func NewOptionalListTokensPosition(list []ast.Vertex, t *scanner.Token, endToken *scanner.Token) *position.Position { +func NewOptionalListTokensPosition(list []ast.Vertex, t *token.Token, endToken *token.Token) *position.Position { if list == nil { return &position.Position{ StartLine: t.Position.StartLine, diff --git a/internal/position/position_test.go b/internal/position/position_test.go index f932f2d..1fd6c83 100644 --- a/internal/position/position_test.go +++ b/internal/position/position_test.go @@ -5,15 +5,15 @@ import ( "testing" builder "github.com/z7zmey/php-parser/internal/position" - "github.com/z7zmey/php-parser/internal/scanner" "github.com/z7zmey/php-parser/pkg/ast" "github.com/z7zmey/php-parser/pkg/position" + "github.com/z7zmey/php-parser/pkg/token" ) func TestNewTokenPosition(t *testing.T) { - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -29,18 +29,18 @@ func TestNewTokenPosition(t *testing.T) { } func TestNewTokensPosition(t *testing.T) { - token1 := &scanner.Token{ + token1 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, EndPos: 3, }, } - token2 := &scanner.Token{ + token2 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 2, EndLine: 2, StartPos: 4, @@ -71,9 +71,9 @@ func TestNewNodePosition(t *testing.T) { } func TestNewTokenNodePosition(t *testing.T) { - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -108,9 +108,9 @@ func TestNewNodeTokenPosition(t *testing.T) { }, } - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 2, EndLine: 2, StartPos: 10, @@ -202,9 +202,9 @@ func TestNewNodeListTokenPosition(t *testing.T) { }, } - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 3, EndLine: 3, StartPos: 20, @@ -218,9 +218,9 @@ func TestNewNodeListTokenPosition(t *testing.T) { } func TestNewTokenNodeListPosition(t *testing.T) { - tkn := &scanner.Token{ + tkn := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -332,18 +332,18 @@ func TestNewNodeListNodePosition(t *testing.T) { } func TestNewOptionalListTokensPosition(t *testing.T) { - token1 := &scanner.Token{ + token1 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, EndPos: 3, }, } - token2 := &scanner.Token{ + token2 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 2, EndLine: 2, StartPos: 4, @@ -378,18 +378,18 @@ func TestNewOptionalListTokensPosition2(t *testing.T) { }, } - token1 := &scanner.Token{ + token1 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 4, EndLine: 4, StartPos: 27, EndPos: 29, }, } - token2 := &scanner.Token{ + token2 := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 5, EndLine: 5, StartPos: 30, @@ -426,9 +426,9 @@ func TestNilNodeListPos(t *testing.T) { } func TestNilNodeListTokenPos(t *testing.T) { - token := &scanner.Token{ + token := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, @@ -459,9 +459,9 @@ func TestEmptyNodeListPos(t *testing.T) { } func TestEmptyNodeListTokenPos(t *testing.T) { - token := &scanner.Token{ + token := &token.Token{ Value: []byte(`foo`), - Position: position.Position{ + Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 0, diff --git a/internal/scanner/lexer.go b/internal/scanner/lexer.go index 7456961..b21ff7c 100644 --- a/internal/scanner/lexer.go +++ b/internal/scanner/lexer.go @@ -13,31 +13,32 @@ import ( type Lexer struct { data []byte phpVersion string - withTokens bool errHandlerFunc func(*errors.Error) + sts, ste int p, pe, cs int ts, te, act int stack []int top int heredocLabel []byte - tokenPool *TokenPool + tokenPool *token.Pool + positionPool *position.Pool newLines NewLines } -func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc func(*errors.Error)) *Lexer { +func NewLexer(data []byte, phpVersion string, errHandlerFunc func(*errors.Error)) *Lexer { lex := &Lexer{ data: data, phpVersion: phpVersion, - withTokens: withTokens, errHandlerFunc: errHandlerFunc, pe: len(data), stack: make([]int, 0), - tokenPool: &TokenPool{}, - newLines: NewLines{make([]int, 0, 128)}, + tokenPool: token.NewPool(position.DefaultBlockSize), + positionPool: position.NewPool(position.DefaultBlockSize), + newLines: NewLines{make([]int, 0, 128)}, } initLexer(lex) @@ -45,26 +46,37 @@ func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc fu return lex } -func (lex *Lexer) ReturnTokenToPool(t *Token) { - lex.tokenPool.Put(t) +func (lex *Lexer) setTokenPosition(token *token.Token) { + pos := lex.positionPool.Get() + + pos.StartLine = lex.newLines.GetLine(lex.ts) + pos.EndLine = lex.newLines.GetLine(lex.te - 1) + pos.StartPos = lex.ts + pos.EndPos = lex.te + + token.Position = pos } -func (lex *Lexer) setTokenPosition(token *Token) { - token.Position.StartLine = lex.newLines.GetLine(lex.ts) - token.Position.EndLine = lex.newLines.GetLine(lex.te - 1) - token.Position.StartPos = lex.ts - token.Position.EndPos = lex.te -} - -func (lex *Lexer) addHiddenToken(t *Token, id TokenID, ps, pe int) { - if !lex.withTokens { - return +func (lex *Lexer) addSkippedToken(t *token.Token, id token.ID, ps, pe int) { + if lex.sts == 0 { + lex.sts = lex.ts } - t.Tokens = append(t.Tokens, token.Token{ - ID: token.ID(id), - Value: lex.data[ps:pe], - }) + lex.ste = lex.te + + // TODO remove after parser refactoring + + skippedTkn := lex.tokenPool.Get() + skippedTkn.ID = id + skippedTkn.Value = lex.data[ps:pe] + + lex.setTokenPosition(skippedTkn) + + if t.SkippedTokens == nil { + t.SkippedTokens = make([]*token.Token, 0, 2) + } + + t.SkippedTokens = append(t.SkippedTokens, skippedTkn) } func (lex *Lexer) isNotStringVar() bool { diff --git a/internal/scanner/lexer_tokens.go b/internal/scanner/lexer_tokens.go deleted file mode 100644 index bd54024..0000000 --- a/internal/scanner/lexer_tokens.go +++ /dev/null @@ -1,145 +0,0 @@ -package scanner - -type TokenID int - -//go:generate stringer -type=TokenID -output ./tokenid_string.go -const ( - T_INCLUDE TokenID = iota + 57346 - T_INCLUDE_ONCE - T_EXIT - T_IF - T_LNUMBER - T_DNUMBER - T_STRING - T_STRING_VARNAME - T_VARIABLE - T_NUM_STRING - T_INLINE_HTML - T_CHARACTER - T_BAD_CHARACTER - T_ENCAPSED_AND_WHITESPACE - T_CONSTANT_ENCAPSED_STRING - T_ECHO - T_DO - T_WHILE - T_ENDWHILE - T_FOR - T_ENDFOR - T_FOREACH - T_ENDFOREACH - T_DECLARE - T_ENDDECLARE - T_AS - T_SWITCH - T_ENDSWITCH - T_CASE - T_DEFAULT - T_BREAK - T_CONTINUE - T_GOTO - T_FUNCTION - T_FN - T_CONST - T_RETURN - T_TRY - T_CATCH - T_FINALLY - T_THROW - T_USE - T_INSTEADOF - T_GLOBAL - T_VAR - T_UNSET - T_ISSET - T_EMPTY - T_HALT_COMPILER - T_CLASS - T_TRAIT - T_INTERFACE - T_EXTENDS - T_IMPLEMENTS - T_OBJECT_OPERATOR - T_DOUBLE_ARROW - T_LIST - T_ARRAY - T_CALLABLE - T_CLASS_C - T_TRAIT_C - T_METHOD_C - T_FUNC_C - T_LINE - T_FILE - T_COMMENT - T_DOC_COMMENT - T_OPEN_TAG - T_OPEN_TAG_WITH_ECHO - T_CLOSE_TAG - T_WHITESPACE - T_START_HEREDOC - T_END_HEREDOC - T_DOLLAR_OPEN_CURLY_BRACES - T_CURLY_OPEN - T_PAAMAYIM_NEKUDOTAYIM - T_NAMESPACE - T_NS_C - T_DIR - T_NS_SEPARATOR - T_ELLIPSIS - T_EVAL - T_REQUIRE - T_REQUIRE_ONCE - T_LOGICAL_OR - T_LOGICAL_XOR - T_LOGICAL_AND - T_INSTANCEOF - T_NEW - T_CLONE - T_ELSEIF - T_ELSE - T_ENDIF - T_PRINT - T_YIELD - T_STATIC - T_ABSTRACT - T_FINAL - T_PRIVATE - T_PROTECTED - T_PUBLIC - T_INC - T_DEC - T_YIELD_FROM - T_INT_CAST - T_DOUBLE_CAST - T_STRING_CAST - T_ARRAY_CAST - T_OBJECT_CAST - T_BOOL_CAST - T_UNSET_CAST - T_COALESCE - T_SPACESHIP - T_NOELSE - T_PLUS_EQUAL - T_MINUS_EQUAL - T_MUL_EQUAL - T_POW_EQUAL - T_DIV_EQUAL - T_CONCAT_EQUAL - T_MOD_EQUAL - T_AND_EQUAL - T_OR_EQUAL - T_XOR_EQUAL - T_SL_EQUAL - T_SR_EQUAL - T_COALESCE_EQUAL - T_BOOLEAN_OR - T_BOOLEAN_AND - T_POW - T_SL - T_SR - T_IS_IDENTICAL - T_IS_NOT_IDENTICAL - T_IS_EQUAL - T_IS_NOT_EQUAL - T_IS_SMALLER_OR_EQUAL - T_IS_GREATER_OR_EQUAL -) diff --git a/internal/scanner/scanner.go b/internal/scanner/scanner.go index c6ebf1c..f90db23 100644 Binary files a/internal/scanner/scanner.go and b/internal/scanner/scanner.go differ diff --git a/internal/scanner/scanner.rl b/internal/scanner/scanner.rl index cd29221..18874ae 100644 --- a/internal/scanner/scanner.rl +++ b/internal/scanner/scanner.rl @@ -4,6 +4,8 @@ import ( "fmt" "strconv" "strings" + + "github.com/z7zmey/php-parser/pkg/token" ) %%{ @@ -18,13 +20,14 @@ func initLexer(lex *Lexer) { %% write init; } -func (lex *Lexer) Lex() *Token { +func (lex *Lexer) Lex() *token.Token { eof := lex.pe - var tok TokenID + var tok token.ID - token := lex.tokenPool.Get() - token.Tokens = token.Tokens[:0] - token.Value = lex.data[0:0] + tkn := lex.tokenPool.Get() + + lex.sts = 0 + lex.ste = 0 lblStart := 0 lblEnd := 0 @@ -124,7 +127,7 @@ func (lex *Lexer) Lex() *Token { main := |* "#!" any* :>> newline => { - lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te) }; any => { fnext html; @@ -135,42 +138,42 @@ func (lex *Lexer) Lex() *Token { html := |* any_line+ -- ' { lex.ungetStr("<") - lex.setTokenPosition(token) - tok = T_INLINE_HTML; + lex.setTokenPosition(tkn) + tok = token.T_INLINE_HTML; fbreak; }; ' { - lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_OPEN_TAG, lex.ts, lex.te) fnext php; }; ' { lex.ungetCnt(lex.te - lex.ts - 5) - lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.ts+5) + lex.addSkippedToken(tkn, token.T_OPEN_TAG, lex.ts, lex.ts+5) fnext php; }; ' { - lex.setTokenPosition(token); - tok = T_ECHO; + lex.setTokenPosition(tkn); + tok = token.T_ECHO; fnext php; fbreak; }; *|; php := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; - ';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + '?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;}; + ';' whitespace_line* '?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;}; - (dnum | exponent_dnum) => {lex.setTokenPosition(token); tok = T_DNUMBER; fbreak;}; + (dnum | exponent_dnum) => {lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;}; bnum => { s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1) _, err := strconv.ParseInt(s, 2, 0) if err == nil { - lex.setTokenPosition(token); tok = T_LNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak; } - lex.setTokenPosition(token); tok = T_DNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak; }; lnum => { base := 10 @@ -182,142 +185,142 @@ func (lex *Lexer) Lex() *Token { _, err := strconv.ParseInt(s, base, 0) if err == nil { - lex.setTokenPosition(token); tok = T_LNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak; } - lex.setTokenPosition(token); tok = T_DNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak; }; hnum => { s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1) _, err := strconv.ParseInt(s, 16, 0) if err == nil { - lex.setTokenPosition(token); tok = T_LNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak; } - lex.setTokenPosition(token); tok = T_DNUMBER; fbreak; + lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak; }; - 'abstract'i => {lex.setTokenPosition(token); tok = T_ABSTRACT; fbreak;}; - 'array'i => {lex.setTokenPosition(token); tok = T_ARRAY; fbreak;}; - 'as'i => {lex.setTokenPosition(token); tok = T_AS; fbreak;}; - 'break'i => {lex.setTokenPosition(token); tok = T_BREAK; fbreak;}; - 'callable'i => {lex.setTokenPosition(token); tok = T_CALLABLE; fbreak;}; - 'case'i => {lex.setTokenPosition(token); tok = T_CASE; fbreak;}; - 'catch'i => {lex.setTokenPosition(token); tok = T_CATCH; fbreak;}; - 'class'i => {lex.setTokenPosition(token); tok = T_CLASS; fbreak;}; - 'clone'i => {lex.setTokenPosition(token); tok = T_CLONE; fbreak;}; - 'const'i => {lex.setTokenPosition(token); tok = T_CONST; fbreak;}; - 'continue'i => {lex.setTokenPosition(token); tok = T_CONTINUE; fbreak;}; - 'declare'i => {lex.setTokenPosition(token); tok = T_DECLARE; fbreak;}; - 'default'i => {lex.setTokenPosition(token); tok = T_DEFAULT; fbreak;}; - 'do'i => {lex.setTokenPosition(token); tok = T_DO; fbreak;}; - 'echo'i => {lex.setTokenPosition(token); tok = T_ECHO; fbreak;}; - 'else'i => {lex.setTokenPosition(token); tok = T_ELSE; fbreak;}; - 'elseif'i => {lex.setTokenPosition(token); tok = T_ELSEIF; fbreak;}; - 'empty'i => {lex.setTokenPosition(token); tok = T_EMPTY; fbreak;}; - 'enddeclare'i => {lex.setTokenPosition(token); tok = T_ENDDECLARE; fbreak;}; - 'endfor'i => {lex.setTokenPosition(token); tok = T_ENDFOR; fbreak;}; - 'endforeach'i => {lex.setTokenPosition(token); tok = T_ENDFOREACH; fbreak;}; - 'endif'i => {lex.setTokenPosition(token); tok = T_ENDIF; fbreak;}; - 'endswitch'i => {lex.setTokenPosition(token); tok = T_ENDSWITCH; fbreak;}; - 'endwhile'i => {lex.setTokenPosition(token); tok = T_ENDWHILE; fbreak;}; - 'eval'i => {lex.setTokenPosition(token); tok = T_EVAL; fbreak;}; - 'exit'i | 'die'i => {lex.setTokenPosition(token); tok = T_EXIT; fbreak;}; - 'extends'i => {lex.setTokenPosition(token); tok = T_EXTENDS; fbreak;}; - 'final'i => {lex.setTokenPosition(token); tok = T_FINAL; fbreak;}; - 'finally'i => {lex.setTokenPosition(token); tok = T_FINALLY; fbreak;}; - 'for'i => {lex.setTokenPosition(token); tok = T_FOR; fbreak;}; - 'foreach'i => {lex.setTokenPosition(token); tok = T_FOREACH; fbreak;}; - 'function'i | 'cfunction'i => {lex.setTokenPosition(token); tok = T_FUNCTION; fbreak;}; - 'fn'i => {lex.setTokenPosition(token); tok = T_FN; fbreak;}; - 'global'i => {lex.setTokenPosition(token); tok = T_GLOBAL; fbreak;}; - 'goto'i => {lex.setTokenPosition(token); tok = T_GOTO; fbreak;}; - 'if'i => {lex.setTokenPosition(token); tok = T_IF; fbreak;}; - 'isset'i => {lex.setTokenPosition(token); tok = T_ISSET; fbreak;}; - 'implements'i => {lex.setTokenPosition(token); tok = T_IMPLEMENTS; fbreak;}; - 'instanceof'i => {lex.setTokenPosition(token); tok = T_INSTANCEOF; fbreak;}; - 'insteadof'i => {lex.setTokenPosition(token); tok = T_INSTEADOF; fbreak;}; - 'interface'i => {lex.setTokenPosition(token); tok = T_INTERFACE; fbreak;}; - 'list'i => {lex.setTokenPosition(token); tok = T_LIST; fbreak;}; - 'namespace'i => {lex.setTokenPosition(token); tok = T_NAMESPACE; fbreak;}; - 'private'i => {lex.setTokenPosition(token); tok = T_PRIVATE; fbreak;}; - 'public'i => {lex.setTokenPosition(token); tok = T_PUBLIC; fbreak;}; - 'print'i => {lex.setTokenPosition(token); tok = T_PRINT; fbreak;}; - 'protected'i => {lex.setTokenPosition(token); tok = T_PROTECTED; fbreak;}; - 'return'i => {lex.setTokenPosition(token); tok = T_RETURN; fbreak;}; - 'static'i => {lex.setTokenPosition(token); tok = T_STATIC; fbreak;}; - 'switch'i => {lex.setTokenPosition(token); tok = T_SWITCH; fbreak;}; - 'throw'i => {lex.setTokenPosition(token); tok = T_THROW; fbreak;}; - 'trait'i => {lex.setTokenPosition(token); tok = T_TRAIT; fbreak;}; - 'try'i => {lex.setTokenPosition(token); tok = T_TRY; fbreak;}; - 'unset'i => {lex.setTokenPosition(token); tok = T_UNSET; fbreak;}; - 'use'i => {lex.setTokenPosition(token); tok = T_USE; fbreak;}; - 'var'i => {lex.setTokenPosition(token); tok = T_VAR; fbreak;}; - 'while'i => {lex.setTokenPosition(token); tok = T_WHILE; fbreak;}; - 'yield'i whitespace_line* 'from'i => {lex.setTokenPosition(token); tok = T_YIELD_FROM; fbreak;}; - 'yield'i => {lex.setTokenPosition(token); tok = T_YIELD; fbreak;}; - 'include'i => {lex.setTokenPosition(token); tok = T_INCLUDE; fbreak;}; - 'include_once'i => {lex.setTokenPosition(token); tok = T_INCLUDE_ONCE; fbreak;}; - 'require'i => {lex.setTokenPosition(token); tok = T_REQUIRE; fbreak;}; - 'require_once'i => {lex.setTokenPosition(token); tok = T_REQUIRE_ONCE; fbreak;}; - '__CLASS__'i => {lex.setTokenPosition(token); tok = T_CLASS_C; fbreak;}; - '__DIR__'i => {lex.setTokenPosition(token); tok = T_DIR; fbreak;}; - '__FILE__'i => {lex.setTokenPosition(token); tok = T_FILE; fbreak;}; - '__FUNCTION__'i => {lex.setTokenPosition(token); tok = T_FUNC_C; fbreak;}; - '__LINE__'i => {lex.setTokenPosition(token); tok = T_LINE; fbreak;}; - '__NAMESPACE__'i => {lex.setTokenPosition(token); tok = T_NS_C; fbreak;}; - '__METHOD__'i => {lex.setTokenPosition(token); tok = T_METHOD_C; fbreak;}; - '__TRAIT__'i => {lex.setTokenPosition(token); tok = T_TRAIT_C; fbreak;}; - '__halt_compiler'i => {lex.setTokenPosition(token); tok = T_HALT_COMPILER; fnext halt_compiller_open_parenthesis; fbreak;}; - 'new'i => {lex.setTokenPosition(token); tok = T_NEW; fbreak;}; - 'and'i => {lex.setTokenPosition(token); tok = T_LOGICAL_AND; fbreak;}; - 'or'i => {lex.setTokenPosition(token); tok = T_LOGICAL_OR; fbreak;}; - 'xor'i => {lex.setTokenPosition(token); tok = T_LOGICAL_XOR; fbreak;}; - '\\' => {lex.setTokenPosition(token); tok = T_NS_SEPARATOR; fbreak;}; - '...' => {lex.setTokenPosition(token); tok = T_ELLIPSIS; fbreak;}; - '::' => {lex.setTokenPosition(token); tok = T_PAAMAYIM_NEKUDOTAYIM; fbreak;}; - '&&' => {lex.setTokenPosition(token); tok = T_BOOLEAN_AND; fbreak;}; - '||' => {lex.setTokenPosition(token); tok = T_BOOLEAN_OR; fbreak;}; - '&=' => {lex.setTokenPosition(token); tok = T_AND_EQUAL; fbreak;}; - '|=' => {lex.setTokenPosition(token); tok = T_OR_EQUAL; fbreak;}; - '.=' => {lex.setTokenPosition(token); tok = T_CONCAT_EQUAL; fbreak;}; - '*=' => {lex.setTokenPosition(token); tok = T_MUL_EQUAL; fbreak;}; - '**=' => {lex.setTokenPosition(token); tok = T_POW_EQUAL; fbreak;}; - '/=' => {lex.setTokenPosition(token); tok = T_DIV_EQUAL; fbreak;}; - '+=' => {lex.setTokenPosition(token); tok = T_PLUS_EQUAL; fbreak;}; - '-=' => {lex.setTokenPosition(token); tok = T_MINUS_EQUAL; fbreak;}; - '^=' => {lex.setTokenPosition(token); tok = T_XOR_EQUAL; fbreak;}; - '%=' => {lex.setTokenPosition(token); tok = T_MOD_EQUAL; fbreak;}; - '--' => {lex.setTokenPosition(token); tok = T_DEC; fbreak;}; - '++' => {lex.setTokenPosition(token); tok = T_INC; fbreak;}; - '=>' => {lex.setTokenPosition(token); tok = T_DOUBLE_ARROW; fbreak;}; - '<=>' => {lex.setTokenPosition(token); tok = T_SPACESHIP; fbreak;}; - '!=' | '<>' => {lex.setTokenPosition(token); tok = T_IS_NOT_EQUAL; fbreak;}; - '!==' => {lex.setTokenPosition(token); tok = T_IS_NOT_IDENTICAL; fbreak;}; - '==' => {lex.setTokenPosition(token); tok = T_IS_EQUAL; fbreak;}; - '===' => {lex.setTokenPosition(token); tok = T_IS_IDENTICAL; fbreak;}; - '<<=' => {lex.setTokenPosition(token); tok = T_SL_EQUAL; fbreak;}; - '>>=' => {lex.setTokenPosition(token); tok = T_SR_EQUAL; fbreak;}; - '>=' => {lex.setTokenPosition(token); tok = T_IS_GREATER_OR_EQUAL; fbreak;}; - '<=' => {lex.setTokenPosition(token); tok = T_IS_SMALLER_OR_EQUAL; fbreak;}; - '**' => {lex.setTokenPosition(token); tok = T_POW; fbreak;}; - '<<' => {lex.setTokenPosition(token); tok = T_SL; fbreak;}; - '>>' => {lex.setTokenPosition(token); tok = T_SR; fbreak;}; - '??' => {lex.setTokenPosition(token); tok = T_COALESCE; fbreak;}; - '??=' => {lex.setTokenPosition(token); tok = T_COALESCE_EQUAL; fbreak;}; + 'abstract'i => {lex.setTokenPosition(tkn); tok = token.T_ABSTRACT; fbreak;}; + 'array'i => {lex.setTokenPosition(tkn); tok = token.T_ARRAY; fbreak;}; + 'as'i => {lex.setTokenPosition(tkn); tok = token.T_AS; fbreak;}; + 'break'i => {lex.setTokenPosition(tkn); tok = token.T_BREAK; fbreak;}; + 'callable'i => {lex.setTokenPosition(tkn); tok = token.T_CALLABLE; fbreak;}; + 'case'i => {lex.setTokenPosition(tkn); tok = token.T_CASE; fbreak;}; + 'catch'i => {lex.setTokenPosition(tkn); tok = token.T_CATCH; fbreak;}; + 'class'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS; fbreak;}; + 'clone'i => {lex.setTokenPosition(tkn); tok = token.T_CLONE; fbreak;}; + 'const'i => {lex.setTokenPosition(tkn); tok = token.T_CONST; fbreak;}; + 'continue'i => {lex.setTokenPosition(tkn); tok = token.T_CONTINUE; fbreak;}; + 'declare'i => {lex.setTokenPosition(tkn); tok = token.T_DECLARE; fbreak;}; + 'default'i => {lex.setTokenPosition(tkn); tok = token.T_DEFAULT; fbreak;}; + 'do'i => {lex.setTokenPosition(tkn); tok = token.T_DO; fbreak;}; + 'echo'i => {lex.setTokenPosition(tkn); tok = token.T_ECHO; fbreak;}; + 'else'i => {lex.setTokenPosition(tkn); tok = token.T_ELSE; fbreak;}; + 'elseif'i => {lex.setTokenPosition(tkn); tok = token.T_ELSEIF; fbreak;}; + 'empty'i => {lex.setTokenPosition(tkn); tok = token.T_EMPTY; fbreak;}; + 'enddeclare'i => {lex.setTokenPosition(tkn); tok = token.T_ENDDECLARE; fbreak;}; + 'endfor'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOR; fbreak;}; + 'endforeach'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOREACH; fbreak;}; + 'endif'i => {lex.setTokenPosition(tkn); tok = token.T_ENDIF; fbreak;}; + 'endswitch'i => {lex.setTokenPosition(tkn); tok = token.T_ENDSWITCH; fbreak;}; + 'endwhile'i => {lex.setTokenPosition(tkn); tok = token.T_ENDWHILE; fbreak;}; + 'eval'i => {lex.setTokenPosition(tkn); tok = token.T_EVAL; fbreak;}; + 'exit'i | 'die'i => {lex.setTokenPosition(tkn); tok = token.T_EXIT; fbreak;}; + 'extends'i => {lex.setTokenPosition(tkn); tok = token.T_EXTENDS; fbreak;}; + 'final'i => {lex.setTokenPosition(tkn); tok = token.T_FINAL; fbreak;}; + 'finally'i => {lex.setTokenPosition(tkn); tok = token.T_FINALLY; fbreak;}; + 'for'i => {lex.setTokenPosition(tkn); tok = token.T_FOR; fbreak;}; + 'foreach'i => {lex.setTokenPosition(tkn); tok = token.T_FOREACH; fbreak;}; + 'function'i | 'cfunction'i => {lex.setTokenPosition(tkn); tok = token.T_FUNCTION; fbreak;}; + 'fn'i => {lex.setTokenPosition(tkn); tok = token.T_FN; fbreak;}; + 'global'i => {lex.setTokenPosition(tkn); tok = token.T_GLOBAL; fbreak;}; + 'goto'i => {lex.setTokenPosition(tkn); tok = token.T_GOTO; fbreak;}; + 'if'i => {lex.setTokenPosition(tkn); tok = token.T_IF; fbreak;}; + 'isset'i => {lex.setTokenPosition(tkn); tok = token.T_ISSET; fbreak;}; + 'implements'i => {lex.setTokenPosition(tkn); tok = token.T_IMPLEMENTS; fbreak;}; + 'instanceof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTANCEOF; fbreak;}; + 'insteadof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTEADOF; fbreak;}; + 'interface'i => {lex.setTokenPosition(tkn); tok = token.T_INTERFACE; fbreak;}; + 'list'i => {lex.setTokenPosition(tkn); tok = token.T_LIST; fbreak;}; + 'namespace'i => {lex.setTokenPosition(tkn); tok = token.T_NAMESPACE; fbreak;}; + 'private'i => {lex.setTokenPosition(tkn); tok = token.T_PRIVATE; fbreak;}; + 'public'i => {lex.setTokenPosition(tkn); tok = token.T_PUBLIC; fbreak;}; + 'print'i => {lex.setTokenPosition(tkn); tok = token.T_PRINT; fbreak;}; + 'protected'i => {lex.setTokenPosition(tkn); tok = token.T_PROTECTED; fbreak;}; + 'return'i => {lex.setTokenPosition(tkn); tok = token.T_RETURN; fbreak;}; + 'static'i => {lex.setTokenPosition(tkn); tok = token.T_STATIC; fbreak;}; + 'switch'i => {lex.setTokenPosition(tkn); tok = token.T_SWITCH; fbreak;}; + 'throw'i => {lex.setTokenPosition(tkn); tok = token.T_THROW; fbreak;}; + 'trait'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT; fbreak;}; + 'try'i => {lex.setTokenPosition(tkn); tok = token.T_TRY; fbreak;}; + 'unset'i => {lex.setTokenPosition(tkn); tok = token.T_UNSET; fbreak;}; + 'use'i => {lex.setTokenPosition(tkn); tok = token.T_USE; fbreak;}; + 'var'i => {lex.setTokenPosition(tkn); tok = token.T_VAR; fbreak;}; + 'while'i => {lex.setTokenPosition(tkn); tok = token.T_WHILE; fbreak;}; + 'yield'i whitespace_line* 'from'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD_FROM; fbreak;}; + 'yield'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD; fbreak;}; + 'include'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE; fbreak;}; + 'include_once'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE_ONCE; fbreak;}; + 'require'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE; fbreak;}; + 'require_once'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE_ONCE; fbreak;}; + '__CLASS__'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS_C; fbreak;}; + '__DIR__'i => {lex.setTokenPosition(tkn); tok = token.T_DIR; fbreak;}; + '__FILE__'i => {lex.setTokenPosition(tkn); tok = token.T_FILE; fbreak;}; + '__FUNCTION__'i => {lex.setTokenPosition(tkn); tok = token.T_FUNC_C; fbreak;}; + '__LINE__'i => {lex.setTokenPosition(tkn); tok = token.T_LINE; fbreak;}; + '__NAMESPACE__'i => {lex.setTokenPosition(tkn); tok = token.T_NS_C; fbreak;}; + '__METHOD__'i => {lex.setTokenPosition(tkn); tok = token.T_METHOD_C; fbreak;}; + '__TRAIT__'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT_C; fbreak;}; + '__halt_compiler'i => {lex.setTokenPosition(tkn); tok = token.T_HALT_COMPILER; fnext halt_compiller_open_parenthesis; fbreak;}; + 'new'i => {lex.setTokenPosition(tkn); tok = token.T_NEW; fbreak;}; + 'and'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_AND; fbreak;}; + 'or'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_OR; fbreak;}; + 'xor'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_XOR; fbreak;}; + '\\' => {lex.setTokenPosition(tkn); tok = token.T_NS_SEPARATOR; fbreak;}; + '...' => {lex.setTokenPosition(tkn); tok = token.T_ELLIPSIS; fbreak;}; + '::' => {lex.setTokenPosition(tkn); tok = token.T_PAAMAYIM_NEKUDOTAYIM; fbreak;}; + '&&' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_AND; fbreak;}; + '||' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_OR; fbreak;}; + '&=' => {lex.setTokenPosition(tkn); tok = token.T_AND_EQUAL; fbreak;}; + '|=' => {lex.setTokenPosition(tkn); tok = token.T_OR_EQUAL; fbreak;}; + '.=' => {lex.setTokenPosition(tkn); tok = token.T_CONCAT_EQUAL; fbreak;}; + '*=' => {lex.setTokenPosition(tkn); tok = token.T_MUL_EQUAL; fbreak;}; + '**=' => {lex.setTokenPosition(tkn); tok = token.T_POW_EQUAL; fbreak;}; + '/=' => {lex.setTokenPosition(tkn); tok = token.T_DIV_EQUAL; fbreak;}; + '+=' => {lex.setTokenPosition(tkn); tok = token.T_PLUS_EQUAL; fbreak;}; + '-=' => {lex.setTokenPosition(tkn); tok = token.T_MINUS_EQUAL; fbreak;}; + '^=' => {lex.setTokenPosition(tkn); tok = token.T_XOR_EQUAL; fbreak;}; + '%=' => {lex.setTokenPosition(tkn); tok = token.T_MOD_EQUAL; fbreak;}; + '--' => {lex.setTokenPosition(tkn); tok = token.T_DEC; fbreak;}; + '++' => {lex.setTokenPosition(tkn); tok = token.T_INC; fbreak;}; + '=>' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_ARROW; fbreak;}; + '<=>' => {lex.setTokenPosition(tkn); tok = token.T_SPACESHIP; fbreak;}; + '!=' | '<>' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_EQUAL; fbreak;}; + '!==' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_IDENTICAL; fbreak;}; + '==' => {lex.setTokenPosition(tkn); tok = token.T_IS_EQUAL; fbreak;}; + '===' => {lex.setTokenPosition(tkn); tok = token.T_IS_IDENTICAL; fbreak;}; + '<<=' => {lex.setTokenPosition(tkn); tok = token.T_SL_EQUAL; fbreak;}; + '>>=' => {lex.setTokenPosition(tkn); tok = token.T_SR_EQUAL; fbreak;}; + '>=' => {lex.setTokenPosition(tkn); tok = token.T_IS_GREATER_OR_EQUAL; fbreak;}; + '<=' => {lex.setTokenPosition(tkn); tok = token.T_IS_SMALLER_OR_EQUAL; fbreak;}; + '**' => {lex.setTokenPosition(tkn); tok = token.T_POW; fbreak;}; + '<<' => {lex.setTokenPosition(tkn); tok = token.T_SL; fbreak;}; + '>>' => {lex.setTokenPosition(tkn); tok = token.T_SR; fbreak;}; + '??' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE; fbreak;}; + '??=' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE_EQUAL; fbreak;}; - '(' whitespace* 'array'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_ARRAY_CAST; fbreak;}; - '(' whitespace* ('bool'i|'boolean'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_BOOL_CAST; fbreak;}; - '(' whitespace* ('real'i|'double'i|'float'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_DOUBLE_CAST; fbreak;}; - '(' whitespace* ('int'i|'integer'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_INT_CAST; fbreak;}; - '(' whitespace* 'object'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_OBJECT_CAST; fbreak;}; - '(' whitespace* ('string'i|'binary'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_STRING_CAST; fbreak;}; - '(' whitespace* 'unset'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_UNSET_CAST; fbreak;}; + '(' whitespace* 'array'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_ARRAY_CAST; fbreak;}; + '(' whitespace* ('bool'i|'boolean'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_BOOL_CAST; fbreak;}; + '(' whitespace* ('real'i|'double'i|'float'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_CAST; fbreak;}; + '(' whitespace* ('int'i|'integer'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_INT_CAST; fbreak;}; + '(' whitespace* 'object'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_CAST; fbreak;}; + '(' whitespace* ('string'i|'binary'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_STRING_CAST; fbreak;}; + '(' whitespace* 'unset'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_UNSET_CAST; fbreak;}; ('#' | '//') any_line* when is_not_comment_end => { lex.ungetStr("?>") - lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te) }; '/*' any_line* :>> '*/' { isDocComment := false; @@ -326,37 +329,35 @@ func (lex *Lexer) Lex() *Token { } if isDocComment { - lex.addHiddenToken(token, T_DOC_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_DOC_COMMENT, lex.ts, lex.te) } else { - lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te) + lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te) } }; operators => { - // rune, _ := utf8.DecodeRune(lex.data[lex.ts:lex.te]); - // tok = TokenID(Rune2Class(rune)); - lex.setTokenPosition(token); - tok = TokenID(int(lex.data[lex.ts])); + lex.setTokenPosition(tkn); + tok = token.ID(int(lex.data[lex.ts])); fbreak; }; - "{" => { lex.setTokenPosition(token); tok = TokenID(int('{')); lex.call(ftargs, fentry(php)); goto _out; }; - "}" => { lex.setTokenPosition(token); tok = TokenID(int('}')); lex.ret(1); goto _out;}; - "$" varname => { lex.setTokenPosition(token); tok = T_VARIABLE; fbreak; }; - varname => { lex.setTokenPosition(token); tok = T_STRING; fbreak; }; + "{" => { lex.setTokenPosition(tkn); tok = token.ID(int('{')); lex.call(ftargs, fentry(php)); goto _out; }; + "}" => { lex.setTokenPosition(tkn); tok = token.ID(int('}')); lex.ret(1); goto _out;}; + "$" varname => { lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak; }; + varname => { lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak; }; - "->" => { lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fnext property; fbreak; }; + "->" => { lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fnext property; fbreak; }; constant_string => { - lex.setTokenPosition(token); - tok = T_CONSTANT_ENCAPSED_STRING; + lex.setTokenPosition(tkn); + tok = token.T_CONSTANT_ENCAPSED_STRING; fbreak; }; "b"i? "<<<" [ \t]* ( heredoc_label | ("'" heredoc_label "'") | ('"' heredoc_label '"') ) newline => { lex.heredocLabel = lex.data[lblStart:lblEnd] - lex.setTokenPosition(token); - tok = T_START_HEREDOC; + lex.setTokenPosition(tkn); + tok = token.T_START_HEREDOC; if lex.isHeredocEnd(lex.p+1) { fnext heredoc_end; @@ -367,8 +368,8 @@ func (lex *Lexer) Lex() *Token { } fbreak; }; - "`" => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext backqote; fbreak;}; - '"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext template_string; fbreak;}; + "`" => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext backqote; fbreak;}; + '"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext template_string; fbreak;}; any_line => { c := lex.data[lex.p] @@ -377,28 +378,28 @@ func (lex *Lexer) Lex() *Token { *|; property := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - "->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;}; - varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + "->" => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;}; + varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fnext php; fbreak;}; any => {lex.ungetCnt(1); fgoto php;}; *|; nowdoc := |* any_line* when is_not_heredoc_end => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; fnext heredoc_end; fbreak; }; *|; heredoc := |* - "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; - "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; + "{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; + "${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "$" => {lex.ungetCnt(1); fcall string_var;}; any_line* when is_not_heredoc_end_or_var => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; if len(lex.data) > lex.p+1 && lex.data[lex.p+1] != '$' && lex.data[lex.p+1] != '{' { fnext heredoc_end; @@ -408,59 +409,59 @@ func (lex *Lexer) Lex() *Token { *|; backqote := |* - "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; - "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; + "{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; + "${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "$" varname_first => {lex.ungetCnt(2); fcall string_var;}; - '`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;}; + '`' => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext php; fbreak;}; any_line* when is_not_backqoute_end_or_var => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; fbreak; }; *|; template_string := |* - "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; - "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; + "{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; + "${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "$" varname_first => {lex.ungetCnt(2); fcall string_var;}; - '"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;}; + '"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext php; fbreak;}; any_line* when is_not_string_end_or_var => { - lex.setTokenPosition(token); - tok = T_ENCAPSED_AND_WHITESPACE; + lex.setTokenPosition(tkn); + tok = token.T_ENCAPSED_AND_WHITESPACE; fbreak; }; *|; heredoc_end := |* varname -- ";" => { - lex.setTokenPosition(token); - tok = T_END_HEREDOC; + lex.setTokenPosition(tkn); + tok = token.T_END_HEREDOC; fnext php; fbreak; }; varname => { - lex.setTokenPosition(token); - tok = T_END_HEREDOC; + lex.setTokenPosition(tkn); + tok = token.T_END_HEREDOC; fnext php; fbreak; }; *|; string_var := |* - '$' varname => {lex.setTokenPosition(token); tok = T_VARIABLE; fbreak;}; - '->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;}; - varname => {lex.setTokenPosition(token); tok = T_STRING; fbreak;}; - '[' => {lex.setTokenPosition(token); tok = TokenID(int('[')); lex.call(ftargs, fentry(string_var_index)); goto _out;}; + '$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;}; + '->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;}; + varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;}; + '[' => {lex.setTokenPosition(tkn); tok = token.ID(int('[')); lex.call(ftargs, fentry(string_var_index)); goto _out;}; any => {lex.ungetCnt(1); fret;}; *|; string_var_index := |* - lnum | hnum | bnum => {lex.setTokenPosition(token); tok = T_NUM_STRING; fbreak;}; - '$' varname => {lex.setTokenPosition(token); tok = T_VARIABLE; fbreak;}; - varname => {lex.setTokenPosition(token); tok = T_STRING; fbreak;}; - whitespace_line | [\\'#] => {lex.setTokenPosition(token); tok = T_ENCAPSED_AND_WHITESPACE; lex.ret(2); goto _out;}; - operators > (svi, 1) => {lex.setTokenPosition(token); tok = TokenID(int(lex.data[lex.ts])); fbreak;}; - ']' > (svi, 2) => {lex.setTokenPosition(token); tok = TokenID(int(']')); lex.ret(2); goto _out;}; + lnum | hnum | bnum => {lex.setTokenPosition(tkn); tok = token.T_NUM_STRING; fbreak;}; + '$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;}; + varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;}; + whitespace_line | [\\'#] => {lex.setTokenPosition(tkn); tok = token.T_ENCAPSED_AND_WHITESPACE; lex.ret(2); goto _out;}; + operators > (svi, 1) => {lex.setTokenPosition(tkn); tok = token.ID(int(lex.data[lex.ts])); fbreak;}; + ']' > (svi, 2) => {lex.setTokenPosition(tkn); tok = token.ID(int(']')); lex.ret(2); goto _out;}; any_line => { c := lex.data[lex.p] lex.error(fmt.Sprintf("WARNING: Unexpected character in input: '%c' (ASCII=%d)", c, c)); @@ -468,38 +469,39 @@ func (lex *Lexer) Lex() *Token { *|; string_var_name := |* - varname ("[" | "}") => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_STRING_VARNAME; fnext php; fbreak;}; + varname ("[" | "}") => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_STRING_VARNAME; fnext php; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_open_parenthesis := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - "(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + "(" => {lex.setTokenPosition(tkn); tok = token.ID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_close_parenthesis := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - ")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + ")" => {lex.setTokenPosition(tkn); tok = token.ID(int(')')); fnext halt_compiller_close_semicolon; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_close_semicolon := |* - whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)}; - ";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;}; + whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)}; + ";" => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext halt_compiller_end; fbreak;}; any => {lex.ungetCnt(1); fnext php;}; *|; halt_compiller_end := |* - any_line* => { lex.addHiddenToken(token, T_HALT_COMPILER, lex.ts, lex.te); }; + any_line* => { lex.addSkippedToken(tkn, token.T_HALT_COMPILER, lex.ts, lex.te); }; *|; write exec; }%% - token.Value = lex.data[lex.ts:lex.te] - token.ID = tok - lex.addHiddenToken(token, tok, lex.ts, lex.te); + tkn.Value = lex.data[lex.ts:lex.te] + tkn.ID = token.ID(tok) + tkn.SkippedString = lex.data[lex.sts:lex.ste] + lex.addSkippedToken(tkn, tok, lex.ts, lex.te); - return token + return tkn } \ No newline at end of file diff --git a/internal/scanner/scanner_test.go b/internal/scanner/scanner_test.go index 530f972..465c40a 100644 --- a/internal/scanner/scanner_test.go +++ b/internal/scanner/scanner_test.go @@ -1,12 +1,12 @@ package scanner import ( - "github.com/z7zmey/php-parser/pkg/errors" - "github.com/z7zmey/php-parser/pkg/position" + "gotest.tools/assert" "testing" + "github.com/z7zmey/php-parser/pkg/errors" + "github.com/z7zmey/php-parser/pkg/position" "github.com/z7zmey/php-parser/pkg/token" - "gotest.tools/assert" ) func TestTokens(t *testing.T) { @@ -186,175 +186,174 @@ func TestTokens(t *testing.T) { ` expected := []string{ - T_INLINE_HTML.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), - T_ECHO.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), + token.T_INLINE_HTML.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), + token.T_ECHO.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), - T_ABSTRACT.String(), - T_ARRAY.String(), - T_AS.String(), - T_BREAK.String(), - T_CALLABLE.String(), - T_CASE.String(), - T_CATCH.String(), - T_CLASS.String(), - T_CLONE.String(), - T_CONST.String(), - T_CONTINUE.String(), - T_DECLARE.String(), - T_DEFAULT.String(), - T_DO.String(), - T_ECHO.String(), - T_ELSE.String(), - T_ELSEIF.String(), - T_EMPTY.String(), - T_ENDDECLARE.String(), - T_ENDFOR.String(), - T_ENDFOREACH.String(), - T_ENDIF.String(), - T_ENDSWITCH.String(), - T_ENDWHILE.String(), - T_EVAL.String(), - T_EXIT.String(), - T_EXTENDS.String(), - T_FINAL.String(), - T_FINALLY.String(), - T_FOR.String(), - T_FOREACH.String(), - T_FUNCTION.String(), - T_FUNCTION.String(), - T_GLOBAL.String(), - T_GOTO.String(), - T_IF.String(), - T_ISSET.String(), - T_IMPLEMENTS.String(), - T_INSTANCEOF.String(), - T_INSTEADOF.String(), - T_INTERFACE.String(), - T_LIST.String(), - T_NAMESPACE.String(), - T_PRIVATE.String(), - T_PUBLIC.String(), - T_PRINT.String(), - T_PROTECTED.String(), - T_RETURN.String(), - T_STATIC.String(), - T_SWITCH.String(), - T_THROW.String(), - T_TRAIT.String(), - T_TRY.String(), - T_UNSET.String(), - T_USE.String(), - T_VAR.String(), - T_WHILE.String(), - T_YIELD_FROM.String(), - T_YIELD.String(), - T_INCLUDE.String(), - T_INCLUDE_ONCE.String(), - T_REQUIRE.String(), - T_REQUIRE_ONCE.String(), + token.T_ABSTRACT.String(), + token.T_ARRAY.String(), + token.T_AS.String(), + token.T_BREAK.String(), + token.T_CALLABLE.String(), + token.T_CASE.String(), + token.T_CATCH.String(), + token.T_CLASS.String(), + token.T_CLONE.String(), + token.T_CONST.String(), + token.T_CONTINUE.String(), + token.T_DECLARE.String(), + token.T_DEFAULT.String(), + token.T_DO.String(), + token.T_ECHO.String(), + token.T_ELSE.String(), + token.T_ELSEIF.String(), + token.T_EMPTY.String(), + token.T_ENDDECLARE.String(), + token.T_ENDFOR.String(), + token.T_ENDFOREACH.String(), + token.T_ENDIF.String(), + token.T_ENDSWITCH.String(), + token.T_ENDWHILE.String(), + token.T_EVAL.String(), + token.T_EXIT.String(), + token.T_EXTENDS.String(), + token.T_FINAL.String(), + token.T_FINALLY.String(), + token.T_FOR.String(), + token.T_FOREACH.String(), + token.T_FUNCTION.String(), + token.T_FUNCTION.String(), + token.T_GLOBAL.String(), + token.T_GOTO.String(), + token.T_IF.String(), + token.T_ISSET.String(), + token.T_IMPLEMENTS.String(), + token.T_INSTANCEOF.String(), + token.T_INSTEADOF.String(), + token.T_INTERFACE.String(), + token.T_LIST.String(), + token.T_NAMESPACE.String(), + token.T_PRIVATE.String(), + token.T_PUBLIC.String(), + token.T_PRINT.String(), + token.T_PROTECTED.String(), + token.T_RETURN.String(), + token.T_STATIC.String(), + token.T_SWITCH.String(), + token.T_THROW.String(), + token.T_TRAIT.String(), + token.T_TRY.String(), + token.T_UNSET.String(), + token.T_USE.String(), + token.T_VAR.String(), + token.T_WHILE.String(), + token.T_YIELD_FROM.String(), + token.T_YIELD.String(), + token.T_INCLUDE.String(), + token.T_INCLUDE_ONCE.String(), + token.T_REQUIRE.String(), + token.T_REQUIRE_ONCE.String(), - T_CLASS_C.String(), - T_DIR.String(), - T_FILE.String(), - T_FUNC_C.String(), - T_LINE.String(), - T_NS_C.String(), - T_METHOD_C.String(), - T_TRAIT_C.String(), - T_HALT_COMPILER.String(), + token.T_CLASS_C.String(), + token.T_DIR.String(), + token.T_FILE.String(), + token.T_FUNC_C.String(), + token.T_LINE.String(), + token.T_NS_C.String(), + token.T_METHOD_C.String(), + token.T_TRAIT_C.String(), + token.T_HALT_COMPILER.String(), - T_NEW.String(), - T_LOGICAL_AND.String(), - T_LOGICAL_OR.String(), - T_LOGICAL_XOR.String(), + token.T_NEW.String(), + token.T_LOGICAL_AND.String(), + token.T_LOGICAL_OR.String(), + token.T_LOGICAL_XOR.String(), - T_NS_SEPARATOR.String(), - T_ELLIPSIS.String(), - T_PAAMAYIM_NEKUDOTAYIM.String(), - T_BOOLEAN_AND.String(), - T_BOOLEAN_OR.String(), - T_AND_EQUAL.String(), - T_OR_EQUAL.String(), - T_CONCAT_EQUAL.String(), - T_MUL_EQUAL.String(), - T_POW_EQUAL.String(), - T_DIV_EQUAL.String(), - T_PLUS_EQUAL.String(), - T_MINUS_EQUAL.String(), - T_XOR_EQUAL.String(), - T_MOD_EQUAL.String(), - T_DEC.String(), - T_INC.String(), - T_DOUBLE_ARROW.String(), - T_SPACESHIP.String(), - T_IS_NOT_EQUAL.String(), - T_IS_NOT_EQUAL.String(), - T_IS_NOT_IDENTICAL.String(), - T_IS_EQUAL.String(), - T_IS_IDENTICAL.String(), - T_SL_EQUAL.String(), - T_SR_EQUAL.String(), - T_IS_GREATER_OR_EQUAL.String(), - T_IS_SMALLER_OR_EQUAL.String(), - T_POW.String(), - T_SL.String(), - T_SR.String(), - T_COALESCE.String(), + token.T_NS_SEPARATOR.String(), + token.T_ELLIPSIS.String(), + token.T_PAAMAYIM_NEKUDOTAYIM.String(), + token.T_BOOLEAN_AND.String(), + token.T_BOOLEAN_OR.String(), + token.T_AND_EQUAL.String(), + token.T_OR_EQUAL.String(), + token.T_CONCAT_EQUAL.String(), + token.T_MUL_EQUAL.String(), + token.T_POW_EQUAL.String(), + token.T_DIV_EQUAL.String(), + token.T_PLUS_EQUAL.String(), + token.T_MINUS_EQUAL.String(), + token.T_XOR_EQUAL.String(), + token.T_MOD_EQUAL.String(), + token.T_DEC.String(), + token.T_INC.String(), + token.T_DOUBLE_ARROW.String(), + token.T_SPACESHIP.String(), + token.T_IS_NOT_EQUAL.String(), + token.T_IS_NOT_EQUAL.String(), + token.T_IS_NOT_IDENTICAL.String(), + token.T_IS_EQUAL.String(), + token.T_IS_IDENTICAL.String(), + token.T_SL_EQUAL.String(), + token.T_SR_EQUAL.String(), + token.T_IS_GREATER_OR_EQUAL.String(), + token.T_IS_SMALLER_OR_EQUAL.String(), + token.T_POW.String(), + token.T_SL.String(), + token.T_SR.String(), + token.T_COALESCE.String(), - TokenID(int(';')).String(), - TokenID(int(':')).String(), - TokenID(int(',')).String(), - TokenID(int('.')).String(), - TokenID(int('[')).String(), - TokenID(int(']')).String(), - TokenID(int('(')).String(), - TokenID(int(')')).String(), - TokenID(int('|')).String(), - TokenID(int('/')).String(), - TokenID(int('^')).String(), - TokenID(int('&')).String(), - TokenID(int('+')).String(), - TokenID(int('-')).String(), - TokenID(int('*')).String(), - TokenID(int('=')).String(), - TokenID(int('%')).String(), - TokenID(int('!')).String(), - TokenID(int('~')).String(), - TokenID(int('$')).String(), - TokenID(int('<')).String(), - TokenID(int('>')).String(), - TokenID(int('?')).String(), - TokenID(int('@')).String(), - TokenID(int('{')).String(), - TokenID(int('}')).String(), + token.ID(int(';')).String(), + token.ID(int(':')).String(), + token.ID(int(',')).String(), + token.ID(int('.')).String(), + token.ID(int('[')).String(), + token.ID(int(']')).String(), + token.ID(int('(')).String(), + token.ID(int(')')).String(), + token.ID(int('|')).String(), + token.ID(int('/')).String(), + token.ID(int('^')).String(), + token.ID(int('&')).String(), + token.ID(int('+')).String(), + token.ID(int('-')).String(), + token.ID(int('*')).String(), + token.ID(int('=')).String(), + token.ID(int('%')).String(), + token.ID(int('!')).String(), + token.ID(int('~')).String(), + token.ID(int('$')).String(), + token.ID(int('<')).String(), + token.ID(int('>')).String(), + token.ID(int('?')).String(), + token.ID(int('@')).String(), + token.ID(int('{')).String(), + token.ID(int('}')).String(), - T_VARIABLE.String(), - T_STRING.String(), + token.T_VARIABLE.String(), + token.T_STRING.String(), - T_OBJECT_OPERATOR.String(), - T_OBJECT_OPERATOR.String(), - T_STRING.String(), + token.T_OBJECT_OPERATOR.String(), + token.T_OBJECT_OPERATOR.String(), + token.T_STRING.String(), - T_ARRAY_CAST.String(), - T_BOOL_CAST.String(), - T_BOOL_CAST.String(), - T_DOUBLE_CAST.String(), - T_DOUBLE_CAST.String(), - T_DOUBLE_CAST.String(), - T_INT_CAST.String(), - T_INT_CAST.String(), - T_OBJECT_CAST.String(), - T_STRING_CAST.String(), - T_STRING_CAST.String(), - T_UNSET_CAST.String(), + token.T_ARRAY_CAST.String(), + token.T_BOOL_CAST.String(), + token.T_BOOL_CAST.String(), + token.T_DOUBLE_CAST.String(), + token.T_DOUBLE_CAST.String(), + token.T_DOUBLE_CAST.String(), + token.T_INT_CAST.String(), + token.T_INT_CAST.String(), + token.T_OBJECT_CAST.String(), + token.T_STRING_CAST.String(), + token.T_STRING_CAST.String(), + token.T_UNSET_CAST.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -381,15 +380,14 @@ func TestShebang(t *testing.T) { "\n", } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} tkn := lexer.Lex() - assert.Equal(t, tkn.ID, T_DNUMBER) + assert.Equal(t, tkn.ID, token.T_DNUMBER) - l := len(tkn.Tokens) - for _, tt := range tkn.Tokens[:l-1] { + l := len(tkn.SkippedTokens) + for _, tt := range tkn.SkippedTokens[:l-1] { actual = append(actual, string(tt.Value)) } @@ -402,15 +400,14 @@ func TestShebangHtml(t *testing.T) { 0.1 ` - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) tkn := lexer.Lex() - assert.Equal(t, tkn.ID, T_INLINE_HTML) - assert.Equal(t, string(tkn.Tokens[0].Value), "#!/usr/bin/env php\n") + assert.Equal(t, tkn.ID, token.T_INLINE_HTML) + assert.Equal(t, string(tkn.SkippedTokens[0].Value), "#!/usr/bin/env php\n") tkn = lexer.Lex() - assert.Equal(t, tkn.ID, T_DNUMBER) + assert.Equal(t, tkn.ID, token.T_DNUMBER) } func TestNumberTokens(t *testing.T) { @@ -434,26 +431,25 @@ func TestNumberTokens(t *testing.T) { ` expected := []string{ - T_DNUMBER.String(), - T_DNUMBER.String(), - T_DNUMBER.String(), - T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), - T_LNUMBER.String(), - T_DNUMBER.String(), + token.T_LNUMBER.String(), + token.T_DNUMBER.String(), - T_LNUMBER.String(), - T_DNUMBER.String(), + token.T_LNUMBER.String(), + token.T_DNUMBER.String(), - T_LNUMBER.String(), - T_LNUMBER.String(), + token.T_LNUMBER.String(), + token.T_LNUMBER.String(), - T_DNUMBER.String(), - T_DNUMBER.String(), + token.T_DNUMBER.String(), + token.T_DNUMBER.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -490,27 +486,26 @@ func TestConstantStrings(t *testing.T) { ` expected := []string{ - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -547,16 +542,16 @@ func TestSingleQuoteStringTokens(t *testing.T) { ` expected := []string{ - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), - T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -591,67 +586,66 @@ func TestTeplateStringTokens(t *testing.T) { ` expected := []string{ - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_DOLLAR_OPEN_CURLY_BRACES.String(), - T_STRING_VARNAME.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_DOLLAR_OPEN_CURLY_BRACES.String(), + token.T_STRING_VARNAME.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('"')).String(), - TokenID(int('"')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('"')).String(), + token.ID(int('"')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('"')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -682,67 +676,66 @@ func TestBackquoteStringTokens(t *testing.T) { ` expected := []string{ - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_DOLLAR_OPEN_CURLY_BRACES.String(), - T_STRING_VARNAME.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_DOLLAR_OPEN_CURLY_BRACES.String(), + token.T_STRING_VARNAME.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('`')).String(), - TokenID(int('`')).String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - TokenID(int('`')).String(), + token.ID(int('`')).String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.ID(int('`')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -782,61 +775,60 @@ CAT; ` expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_OBJECT_OPERATOR.String(), - T_STRING.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_NUM_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_NUM_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_NUM_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_STRING.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_VARIABLE.String(), - TokenID(int(']')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_DOLLAR_OPEN_CURLY_BRACES.String(), - T_STRING_VARNAME.String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_OBJECT_OPERATOR.String(), + token.T_STRING.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_NUM_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_NUM_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_NUM_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_STRING.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_VARIABLE.String(), + token.ID(int(']')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_DOLLAR_OPEN_CURLY_BRACES.String(), + token.T_STRING_VARNAME.String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -875,41 +867,40 @@ CAT ` expected := []string{ - T_START_HEREDOC.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), - T_START_HEREDOC.String(), - T_VARIABLE.String(), - T_VARIABLE.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), + token.T_START_HEREDOC.String(), + token.T_VARIABLE.String(), + token.T_VARIABLE.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -934,21 +925,20 @@ CAT; expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_CURLY_OPEN.String(), - T_VARIABLE.String(), - TokenID(int('[')).String(), - T_CONSTANT_ENCAPSED_STRING.String(), - TokenID(int(']')).String(), - TokenID(int('}')).String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_CURLY_OPEN.String(), + token.T_VARIABLE.String(), + token.ID(int('[')).String(), + token.T_CONSTANT_ENCAPSED_STRING.String(), + token.ID(int(']')).String(), + token.ID(int('}')).String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -971,15 +961,14 @@ func TestHereDocTokens73(t *testing.T) { expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(',')).String(), - T_VARIABLE.String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(',')).String(), + token.T_VARIABLE.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -1002,15 +991,14 @@ CAT;` expected := []string{ - T_START_HEREDOC.String(), - T_ENCAPSED_AND_WHITESPACE.String(), - T_END_HEREDOC.String(), - TokenID(int(';')).String(), + token.T_START_HEREDOC.String(), + token.T_ENCAPSED_AND_WHITESPACE.String(), + token.T_END_HEREDOC.String(), + token.ID(int(';')).String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) + lexer := NewLexer([]byte(src), "7.4", nil) lexer.phpVersion = "7.2" - lexer.withTokens = true actual := []string{} for { @@ -1032,17 +1020,16 @@ func TestInlineHtmlNopTokens(t *testing.T) { ` expected := []string{ - T_VARIABLE.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), + token.T_VARIABLE.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), - T_VARIABLE.String(), - TokenID(int(';')).String(), - T_INLINE_HTML.String(), + token.T_VARIABLE.String(), + token.ID(int(';')).String(), + token.T_INLINE_HTML.String(), } - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) actual := []string{} for { @@ -1061,11 +1048,11 @@ func TestStringTokensAfterVariable(t *testing.T) { src := ` test` - expected := []token.Token{ + expected := []*token.Token{ { ID: token.T_OPEN_TAG, Value: []byte(" bar ( '' ) ;` - lexer := NewLexer([]byte(src), "7.4", false, nil) - lexer.withTokens = true + lexer := NewLexer([]byte(src), "7.4", nil) - expected := []token.Token{ + expected := []*token.Token{ { ID: token.T_OPEN_TAG, Value: []byte("= TokenID(len(_TokenID_index)-1) { - return "TokenID(" + strconv.FormatInt(int64(i+57346), 10) + ")" - } - return _TokenID_name[_TokenID_index[i]:_TokenID_index[i+1]] -} diff --git a/pkg/ast/node.go b/pkg/ast/node.go index 81f6ef4..85b494a 100644 --- a/pkg/ast/node.go +++ b/pkg/ast/node.go @@ -16,6 +16,10 @@ func (n *Node) GetNode() *Node { return n } +func (n *Node) GetPosition() *position.Position { + return n.Position +} + // Root node type Root struct { Node diff --git a/pkg/ast/visitor/dump.go b/pkg/ast/visitor/dump.go index 312b888..67bbaac 100644 --- a/pkg/ast/visitor/dump.go +++ b/pkg/ast/visitor/dump.go @@ -121,7 +121,7 @@ func (v *Dump) printNode(n *ast.Node) { key := token.Position(k) v.printIndent(v.indent + 2) - v.print("token." + key.String() + ": []token.Token{\n") + v.print("token." + key.String() + ": []*token.Token{\n") for _, tkn := range n.Tokens[key] { v.printIndent(v.indent + 3) diff --git a/pkg/ast/visitor/dump_test.go b/pkg/ast/visitor/dump_test.go index cc9a387..0ccdc27 100644 --- a/pkg/ast/visitor/dump_test.go +++ b/pkg/ast/visitor/dump_test.go @@ -13,7 +13,7 @@ func ExampleDump() { stxTree := &ast.Root{ Node: ast.Node{ Tokens: token.Collection{ - token.Start: []token.Token{ + token.Start: []*token.Token{ { ID: token.T_WHITESPACE, Value: []byte(" "), @@ -44,7 +44,7 @@ func ExampleDump() { //&ast.Root{ // Node: ast.Node{ // Tokens: token.Collection{ - // token.Start: []token.Token{ + // token.Start: []*token.Token{ // { // ID: token.T_WHITESPACE, // Value: []byte(" "), diff --git a/pkg/ast/visitor/filter_tokens.go b/pkg/ast/visitor/filter_tokens.go new file mode 100644 index 0000000..c69a3f6 --- /dev/null +++ b/pkg/ast/visitor/filter_tokens.go @@ -0,0 +1,14 @@ +package visitor + +import ( + "github.com/z7zmey/php-parser/pkg/ast" +) + +type FilterTokens struct { + Null +} + +func (v *FilterTokens) EnterNode(n ast.Vertex) bool { + n.GetNode().Tokens = nil + return true +} diff --git a/pkg/parser/parser.go b/pkg/parser/parser.go index 67974fd..a50d09e 100644 --- a/pkg/parser/parser.go +++ b/pkg/parser/parser.go @@ -29,7 +29,7 @@ func Parse(src []byte, ver string, cfg Config) (ast.Vertex, error) { return nil, err } - lexer := scanner.NewLexer(src, ver, cfg.WithTokens, cfg.ErrorHandlerFunc) + lexer := scanner.NewLexer(src, ver, cfg.ErrorHandlerFunc) if r == -1 { parser = php5.NewParser(lexer, cfg.ErrorHandlerFunc) diff --git a/pkg/position/pool.go b/pkg/position/pool.go new file mode 100644 index 0000000..ad26891 --- /dev/null +++ b/pkg/position/pool.go @@ -0,0 +1,29 @@ +package position + +const DefaultBlockSize = 1024 + +type Pool struct { + block []Position + off int +} + +func NewPool(blockSize int) *Pool { + return &Pool{ + block: make([]Position, blockSize), + } +} + +func (p *Pool) Get() *Position { + if len(p.block) == 0 { + return nil + } + + if len(p.block) == p.off { + p.block = make([]Position, len(p.block)) + p.off = 0 + } + + p.off++ + + return &p.block[p.off-1] +} diff --git a/pkg/printer/printer_parsed_php5_test.go b/pkg/printer/printer_parsed_php5_test.go index 24b5acd..2745542 100644 --- a/pkg/printer/printer_parsed_php5_test.go +++ b/pkg/printer/printer_parsed_php5_test.go @@ -12,7 +12,7 @@ import ( ) func parsePhp5(src string) ast.Vertex { - lexer := scanner.NewLexer([]byte(src), "5.6", true, nil) + lexer := scanner.NewLexer([]byte(src), "5.6", nil) php5parser := php5.NewParser(lexer, nil) php5parser.Parse() diff --git a/pkg/printer/printer_parsed_php7_test.go b/pkg/printer/printer_parsed_php7_test.go index 317ea02..5978f5d 100644 --- a/pkg/printer/printer_parsed_php7_test.go +++ b/pkg/printer/printer_parsed_php7_test.go @@ -29,7 +29,7 @@ abstract class Bar extends Baz // parse - lexer := scanner.NewLexer([]byte(src), "7.4", true, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() @@ -61,7 +61,7 @@ abstract class Bar extends Baz } func parse(src string) ast.Vertex { - lexer := scanner.NewLexer([]byte(src), "7.4", true, nil) + lexer := scanner.NewLexer([]byte(src), "7.4", nil) php7parser := php7.NewParser(lexer, nil) php7parser.Parse() diff --git a/pkg/printer/printer_test.go b/pkg/printer/printer_test.go index 8e85ac0..8e6d4e0 100644 --- a/pkg/printer/printer_test.go +++ b/pkg/printer/printer_test.go @@ -75,7 +75,7 @@ func TestPrinterPrintFileInlineHtml(t *testing.T) { Expr: &ast.ExprVariable{ Node: ast.Node{ Tokens: token.Collection{ - token.Start: []token.Token{ + token.Start: []*token.Token{ { ID: token.ID('$'), Value: []byte("$"), @@ -93,7 +93,7 @@ func TestPrinterPrintFileInlineHtml(t *testing.T) { Expr: &ast.ExprVariable{ Node: ast.Node{ Tokens: token.Collection{ - token.Start: []token.Token{ + token.Start: []*token.Token{ { ID: token.ID('$'), Value: []byte("$"), diff --git a/pkg/token/pool.go b/pkg/token/pool.go new file mode 100644 index 0000000..02c88ee --- /dev/null +++ b/pkg/token/pool.go @@ -0,0 +1,29 @@ +package token + +const DefaultBlockSize = 1024 + +type Pool struct { + block []Token + off int +} + +func NewPool(blockSize int) *Pool { + return &Pool{ + block: make([]Token, blockSize), + } +} + +func (p *Pool) Get() *Token { + if len(p.block) == 0 { + return nil + } + + if len(p.block) == p.off { + p.block = make([]Token, len(p.block)) + p.off = 0 + } + + p.off++ + + return &p.block[p.off-1] +} diff --git a/pkg/token/pool_bench_test.go b/pkg/token/pool_bench_test.go new file mode 100644 index 0000000..f6b3faf --- /dev/null +++ b/pkg/token/pool_bench_test.go @@ -0,0 +1,173 @@ +package token + +import ( + "testing" +) + +const amount = 100000 + +func BenchmarkPlain(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, &Token{}) + } + } +} + +func BenchmarkSlice128(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 128) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkSlice512(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 512) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkSlice1024(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 1024) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkSlice2048(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 2048) + + for i := 0; i < amount; i++ { + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend128(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 128) + + for i := 0; i < amount; i++ { + if len(slc) == 128 { + slc = make([]Token, 0, 128) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend512(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 512) + + for i := 0; i < amount; i++ { + if len(slc) == 512 { + slc = make([]Token, 0, 512) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend1024(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 1024) + + for i := 0; i < amount; i++ { + if len(slc) == 1024 { + slc = make([]Token, 0, 1024) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkBlockAppend2048(b *testing.B) { + for n := 0; n < b.N; n++ { + buf := make([]*Token, 0, amount) + slc := make([]Token, 0, 2048) + + for i := 0; i < amount; i++ { + if len(slc) == 2048 { + slc = make([]Token, 0, 2048) + } + + slc = append(slc, Token{}) + buf = append(buf, &slc[len(slc)-1]) + } + } +} + +func BenchmarkPool128(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(128) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} + +func BenchmarkPool512(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(512) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} + +func BenchmarkPool1024(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(1024) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} + +func BenchmarkPool2048(b *testing.B) { + for n := 0; n < b.N; n++ { + pool := NewPool(2048) + buf := make([]*Token, 0, amount) + + for i := 0; i < amount; i++ { + buf = append(buf, pool.Get()) + } + } +} diff --git a/pkg/token/position.go b/pkg/token/position.go index 2ea40d4..cd76285 100644 --- a/pkg/token/position.go +++ b/pkg/token/position.go @@ -62,7 +62,7 @@ const ( CloseParenthesisToken ) -type Collection map[Position][]Token +type Collection map[Position][]*Token func (c Collection) IsEmpty() bool { for _, v := range c { diff --git a/pkg/token/token.go b/pkg/token/token.go index c1fa07e..b36c85f 100644 --- a/pkg/token/token.go +++ b/pkg/token/token.go @@ -1,5 +1,7 @@ package token +import "github.com/z7zmey/php-parser/pkg/position" + //go:generate stringer -type=ID -output ./token_string.go type ID int @@ -145,6 +147,13 @@ const ( ) type Token struct { - ID ID - Value []byte + ID ID + Value []byte + Position *position.Position + SkippedTokens []*Token + Skipped []byte +} + +func (t *Token) GetPosition() *position.Position { + return t.Position }