refactoring: update ast structure of "Closure" and "ClosureUse" nodes

This commit is contained in:
Vadym Slizov 2020-12-26 18:20:10 +02:00
parent 03c7979ccd
commit b85bae2ec1
No known key found for this signature in database
GPG Key ID: AEA2A9388EF42A4A
14 changed files with 654 additions and 589 deletions

View File

@ -31770,7 +31770,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 36, EndPos: 36,
}, },
FunctionTkn: &token.Token{ FunctionTkn: &token.Token{
ID: token.T_FUNCTION, ID: token.T_FUNCTION,
Value: []byte("function"), Value: []byte("function"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31780,7 +31780,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_OPEN_TAG, ID: token.T_OPEN_TAG,
Value: []byte("<?"), Value: []byte("<?"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31790,7 +31790,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31802,7 +31802,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
OpenParenthesisTkn: &token.Token{ OpenParenthesisTkn: &token.Token{
ID: token.ID(40), ID: token.ID(40),
Value: []byte("("), Value: []byte("("),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31834,7 +31834,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 14, EndPos: 14,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$a"), Value: []byte("$a"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31869,7 +31869,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 18, EndPos: 18,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$b"), Value: []byte("$b"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31879,7 +31879,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31897,7 +31897,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
SeparatorTkns: []*token.Token{ SeparatorTkns: []*token.Token{
{ {
ID: token.ID(44), ID: token.ID(44),
Value: []byte(","), Value: []byte(","),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31908,7 +31908,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
CloseParenthesisTkn: &token.Token{ CloseParenthesisTkn: &token.Token{
ID: token.ID(41), ID: token.ID(41),
Value: []byte(")"), Value: []byte(")"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31917,59 +31917,59 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 19, EndPos: 19,
}, },
}, },
ClosureUse: &ast.ExprClosureUse{ UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 20, StartPos: 20,
EndPos: 33, EndPos: 23,
}, },
UseTkn: &token.Token{ FreeFloating: []*token.Token{
ID: token.T_USE, {
Value: []byte("use"), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 20, EndLine: 1,
EndPos: 23, StartPos: 19,
}, EndPos: 20,
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
}, },
}, },
}, },
OpenParenthesisTkn: &token.Token{ },
ID: token.ID(40), UseOpenParenthesisTkn: &token.Token{
Value: []byte("("), ID: token.ID(40),
Position: &position.Position{ Value: []byte("("),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 24, EndLine: 1,
EndPos: 25, StartPos: 24,
}, EndPos: 25,
FreeFloating: []*token.Token{ },
{ FreeFloating: []*token.Token{
ID: token.T_WHITESPACE, {
Value: []byte(" "), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 23, EndLine: 1,
EndPos: 24, StartPos: 23,
}, EndPos: 24,
}, },
}, },
}, },
Uses: []ast.Vertex{ },
&ast.ExprVariable{ Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 27,
},
Var: &ast.ExprVariable{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
@ -31984,7 +31984,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 27, EndPos: 27,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$c"), Value: []byte("$c"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -31996,89 +31996,89 @@ func TestExprClosure_Use(t *testing.T) {
Value: []byte("$c"), Value: []byte("$c"),
}, },
}, },
&ast.ExprReference{ },
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 29, StartPos: 29,
EndPos: 32, EndPos: 30,
}, },
AmpersandTkn: &token.Token{ FreeFloating: []*token.Token{
ID: token.ID(38), {
Value: []byte("&"), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 29, EndLine: 1,
EndPos: 30, StartPos: 28,
}, EndPos: 29,
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
}, },
}, },
}, },
Var: &ast.ExprVariable{ },
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 30, StartPos: 30,
EndPos: 32, EndPos: 32,
}, },
VarName: &ast.Identifier{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 30, StartPos: 30,
EndPos: 32, EndPos: 32,
}, },
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
},
Value: []byte("$d"),
}, },
Value: []byte("$d"),
}, },
}, },
}, },
SeparatorTkns: []*token.Token{ },
{ UseSeparatorTkns: []*token.Token{
ID: token.ID(44), {
Value: []byte(","), ID: token.ID(44),
Position: &position.Position{ Value: []byte(","),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 27, EndLine: 1,
EndPos: 28, StartPos: 27,
}, EndPos: 28,
}, },
}, },
CloseParenthesisTkn: &token.Token{ },
ID: token.ID(41), UseCloseParenthesisTkn: &token.Token{
Value: []byte(")"), ID: token.ID(41),
Position: &position.Position{ Value: []byte(")"),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 32, EndLine: 1,
EndPos: 33, StartPos: 32,
}, EndPos: 33,
}, },
}, },
OpenCurlyBracketTkn: &token.Token{ OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123), ID: token.ID(123),
Value: []byte("{"), Value: []byte("{"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32088,7 +32088,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32101,7 +32101,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
Stmts: []ast.Vertex{}, Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{ CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125), ID: token.ID(125),
Value: []byte("}"), Value: []byte("}"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32112,7 +32112,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
SemiColonTkn: &token.Token{ SemiColonTkn: &token.Token{
ID: token.ID(59), ID: token.ID(59),
Value: []byte(";"), Value: []byte(";"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32123,7 +32123,8 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
}, },
EndTkn: &token.Token{}, EndTkn: &token.Token{
},
} }
lexer := scanner.NewLexer([]byte(src), "5.6", nil) lexer := scanner.NewLexer([]byte(src), "5.6", nil)
@ -32159,7 +32160,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 36, EndPos: 36,
}, },
FunctionTkn: &token.Token{ FunctionTkn: &token.Token{
ID: token.T_FUNCTION, ID: token.T_FUNCTION,
Value: []byte("function"), Value: []byte("function"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32169,7 +32170,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_OPEN_TAG, ID: token.T_OPEN_TAG,
Value: []byte("<?"), Value: []byte("<?"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32179,7 +32180,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32191,7 +32192,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
OpenParenthesisTkn: &token.Token{ OpenParenthesisTkn: &token.Token{
ID: token.ID(40), ID: token.ID(40),
Value: []byte("("), Value: []byte("("),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32223,7 +32224,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 14, EndPos: 14,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$a"), Value: []byte("$a"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32258,7 +32259,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 18, EndPos: 18,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$b"), Value: []byte("$b"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32268,7 +32269,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32286,7 +32287,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
SeparatorTkns: []*token.Token{ SeparatorTkns: []*token.Token{
{ {
ID: token.ID(44), ID: token.ID(44),
Value: []byte(","), Value: []byte(","),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32297,7 +32298,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
CloseParenthesisTkn: &token.Token{ CloseParenthesisTkn: &token.Token{
ID: token.ID(41), ID: token.ID(41),
Value: []byte(")"), Value: []byte(")"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32306,104 +32307,104 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 19, EndPos: 19,
}, },
}, },
ClosureUse: &ast.ExprClosureUse{ UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 20, StartPos: 20,
EndPos: 33, EndPos: 23,
}, },
UseTkn: &token.Token{ FreeFloating: []*token.Token{
ID: token.T_USE, {
Value: []byte("use"), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 20, EndLine: 1,
EndPos: 23, StartPos: 19,
}, EndPos: 20,
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
}, },
}, },
}, },
OpenParenthesisTkn: &token.Token{ },
ID: token.ID(40), UseOpenParenthesisTkn: &token.Token{
Value: []byte("("), ID: token.ID(40),
Position: &position.Position{ Value: []byte("("),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 24, EndLine: 1,
EndPos: 25, StartPos: 24,
}, EndPos: 25,
FreeFloating: []*token.Token{ },
{ FreeFloating: []*token.Token{
ID: token.T_WHITESPACE, {
Value: []byte(" "), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 23, EndLine: 1,
EndPos: 24, StartPos: 23,
}, EndPos: 24,
}, },
}, },
}, },
Uses: []ast.Vertex{ },
&ast.ExprReference{ Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 25, StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28, EndPos: 28,
}, },
AmpersandTkn: &token.Token{ VarName: &ast.Identifier{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 26, StartPos: 26,
EndPos: 28, EndPos: 28,
}, },
VarName: &ast.Identifier{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 26, StartPos: 26,
EndPos: 28, EndPos: 28,
}, },
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
},
Value: []byte("$c"),
}, },
Value: []byte("$c"),
}, },
}, },
&ast.ExprVariable{ },
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
Var: &ast.ExprVariable{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
@ -32418,7 +32419,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 32, EndPos: 32,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$d"), Value: []byte("$d"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32428,7 +32429,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32443,31 +32444,31 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
}, },
SeparatorTkns: []*token.Token{ },
{ UseSeparatorTkns: []*token.Token{
ID: token.ID(44), {
Value: []byte(","), ID: token.ID(44),
Position: &position.Position{ Value: []byte(","),
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 32, StartPos: 28,
EndPos: 33, EndPos: 29,
}, },
}, },
}, },
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{ OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123), ID: token.ID(123),
Value: []byte("{"), Value: []byte("{"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32477,7 +32478,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32490,7 +32491,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
Stmts: []ast.Vertex{}, Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{ CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125), ID: token.ID(125),
Value: []byte("}"), Value: []byte("}"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32501,7 +32502,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
SemiColonTkn: &token.Token{ SemiColonTkn: &token.Token{
ID: token.ID(59), ID: token.ID(59),
Value: []byte(";"), Value: []byte(";"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -32512,7 +32513,8 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
}, },
EndTkn: &token.Token{}, EndTkn: &token.Token{
},
} }
lexer := scanner.NewLexer([]byte(src), "5.6", nil) lexer := scanner.NewLexer([]byte(src), "5.6", nil)

BIN
internal/php5/php5.go generated

Binary file not shown.

View File

@ -3367,36 +3367,38 @@ expr_without_variable:
} }
| function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}' | function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
{ {
$$ = &ast.ExprClosure{ closure := $6.(*ast.ExprClosure)
Position: yylex.(*Parser).builder.NewTokensPosition($1, $9),
FunctionTkn: $1, closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $9)
AmpersandTkn: $2, closure.FunctionTkn = $1
OpenParenthesisTkn: $3, closure.AmpersandTkn = $2
Params: $4.(*ast.ParserSeparatedList).Items, closure.OpenParenthesisTkn = $3
SeparatorTkns: $4.(*ast.ParserSeparatedList).SeparatorTkns, closure.Params = $4.(*ast.ParserSeparatedList).Items
CloseParenthesisTkn: $5, closure.SeparatorTkns = $4.(*ast.ParserSeparatedList).SeparatorTkns
ClosureUse: $6, closure.CloseParenthesisTkn = $5
OpenCurlyBracketTkn: $7, closure.OpenCurlyBracketTkn = $7
Stmts: $8, closure.Stmts = $8
CloseCurlyBracketTkn: $9, closure.CloseCurlyBracketTkn = $9
}
$$ = closure
} }
| T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}' | T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
{ {
$$ = &ast.ExprClosure{ closure := $7.(*ast.ExprClosure)
Position: yylex.(*Parser).builder.NewTokensPosition($1, $10),
StaticTkn: $1, closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $10)
FunctionTkn: $2, closure.StaticTkn = $1
AmpersandTkn: $3, closure.FunctionTkn = $2
OpenParenthesisTkn: $4, closure.AmpersandTkn = $3
Params: $5.(*ast.ParserSeparatedList).Items, closure.OpenParenthesisTkn = $4
SeparatorTkns: $5.(*ast.ParserSeparatedList).SeparatorTkns, closure.Params = $5.(*ast.ParserSeparatedList).Items
CloseParenthesisTkn: $6, closure.SeparatorTkns = $5.(*ast.ParserSeparatedList).SeparatorTkns
ClosureUse: $7, closure.CloseParenthesisTkn = $6
OpenCurlyBracketTkn: $8, closure.OpenCurlyBracketTkn = $8
Stmts: $9, closure.Stmts = $9
CloseCurlyBracketTkn: $10, closure.CloseCurlyBracketTkn = $10
}
$$ = closure
} }
; ;
@ -3520,17 +3522,16 @@ function:
lexical_vars: lexical_vars:
/* empty */ /* empty */
{ {
$$ = nil $$ = &ast.ExprClosure{}
} }
| T_USE '(' lexical_var_list ')' | T_USE '(' lexical_var_list ')'
{ {
$$ = &ast.ExprClosureUse{ $$ = &ast.ExprClosure{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $4), UseTkn: $1,
UseTkn: $1, UseOpenParenthesisTkn: $2,
OpenParenthesisTkn: $2, Use: $3.(*ast.ParserSeparatedList).Items,
Uses: $3.(*ast.ParserSeparatedList).Items, UseSeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns, UseCloseParenthesisTkn: $4,
CloseParenthesisTkn: $4,
} }
} }
; ;
@ -3538,12 +3539,15 @@ lexical_vars:
lexical_var_list: lexical_var_list:
lexical_var_list ',' T_VARIABLE lexical_var_list ',' T_VARIABLE
{ {
variable := &ast.ExprVariable{ variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokenPosition($3), Position: yylex.(*Parser).builder.NewTokenPosition($3),
VarName: &ast.Identifier{ Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($3), Position: yylex.(*Parser).builder.NewTokenPosition($3),
IdentifierTkn: $3, VarName: &ast.Identifier{
Value: $3.Value, Position: yylex.(*Parser).builder.NewTokenPosition($3),
IdentifierTkn: $3,
Value: $3.Value,
},
}, },
} }
@ -3554,7 +3558,7 @@ lexical_var_list:
} }
| lexical_var_list ',' '&' T_VARIABLE | lexical_var_list ',' '&' T_VARIABLE
{ {
reference := &ast.ExprReference{ variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($3, $4), Position: yylex.(*Parser).builder.NewTokensPosition($3, $4),
AmpersandTkn: $3, AmpersandTkn: $3,
Var: &ast.ExprVariable{ Var: &ast.ExprVariable{
@ -3568,43 +3572,46 @@ lexical_var_list:
} }
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2) $1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, reference) $1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, variable)
$$ = $1 $$ = $1
} }
| T_VARIABLE | T_VARIABLE
{ {
$$ = &ast.ParserSeparatedList{ variable := &ast.ExprClosureUse{
Items: []ast.Vertex{ Position: yylex.(*Parser).builder.NewTokenPosition($1),
&ast.ExprVariable{ Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($1), Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{ IdentifierTkn: $1,
Position: yylex.(*Parser).builder.NewTokenPosition($1), Value: $1.Value,
IdentifierTkn: $1,
Value: $1.Value,
},
}, },
}, },
} }
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{ variable },
}
} }
| '&' T_VARIABLE | '&' T_VARIABLE
{ {
$$ = &ast.ParserSeparatedList{ variable := &ast.ExprClosureUse{
Items: []ast.Vertex{ Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
&ast.ExprReference{ AmpersandTkn: $1,
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2), Var: &ast.ExprVariable{
AmpersandTkn: $1, Position: yylex.(*Parser).builder.NewTokenPosition($2),
Var: &ast.ExprVariable{ VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($2), Position: yylex.(*Parser).builder.NewTokenPosition($2),
VarName: &ast.Identifier{ IdentifierTkn: $2,
Position: yylex.(*Parser).builder.NewTokenPosition($2), Value: $2.Value,
IdentifierTkn: $2,
Value: $2.Value,
},
},
}, },
}, },
} }
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{ variable },
}
} }
; ;

View File

@ -36268,7 +36268,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 36, EndPos: 36,
}, },
FunctionTkn: &token.Token{ FunctionTkn: &token.Token{
ID: token.T_FUNCTION, ID: token.T_FUNCTION,
Value: []byte("function"), Value: []byte("function"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36278,7 +36278,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_OPEN_TAG, ID: token.T_OPEN_TAG,
Value: []byte("<?"), Value: []byte("<?"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36288,7 +36288,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36300,7 +36300,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
OpenParenthesisTkn: &token.Token{ OpenParenthesisTkn: &token.Token{
ID: token.ID(40), ID: token.ID(40),
Value: []byte("("), Value: []byte("("),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36332,7 +36332,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 14, EndPos: 14,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$a"), Value: []byte("$a"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36367,7 +36367,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 18, EndPos: 18,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$b"), Value: []byte("$b"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36377,7 +36377,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36395,7 +36395,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
SeparatorTkns: []*token.Token{ SeparatorTkns: []*token.Token{
{ {
ID: token.ID(44), ID: token.ID(44),
Value: []byte(","), Value: []byte(","),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36406,7 +36406,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
CloseParenthesisTkn: &token.Token{ CloseParenthesisTkn: &token.Token{
ID: token.ID(41), ID: token.ID(41),
Value: []byte(")"), Value: []byte(")"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36415,59 +36415,59 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 19, EndPos: 19,
}, },
}, },
ClosureUse: &ast.ExprClosureUse{ UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 20, StartPos: 20,
EndPos: 33, EndPos: 23,
}, },
UseTkn: &token.Token{ FreeFloating: []*token.Token{
ID: token.T_USE, {
Value: []byte("use"), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 20, EndLine: 1,
EndPos: 23, StartPos: 19,
}, EndPos: 20,
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
}, },
}, },
}, },
OpenParenthesisTkn: &token.Token{ },
ID: token.ID(40), UseOpenParenthesisTkn: &token.Token{
Value: []byte("("), ID: token.ID(40),
Position: &position.Position{ Value: []byte("("),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 24, EndLine: 1,
EndPos: 25, StartPos: 24,
}, EndPos: 25,
FreeFloating: []*token.Token{ },
{ FreeFloating: []*token.Token{
ID: token.T_WHITESPACE, {
Value: []byte(" "), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 23, EndLine: 1,
EndPos: 24, StartPos: 23,
}, EndPos: 24,
}, },
}, },
}, },
Uses: []ast.Vertex{ },
&ast.ExprVariable{ Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 27,
},
Var: &ast.ExprVariable{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
@ -36482,7 +36482,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 27, EndPos: 27,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$c"), Value: []byte("$c"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36494,89 +36494,89 @@ func TestExprClosure_Use(t *testing.T) {
Value: []byte("$c"), Value: []byte("$c"),
}, },
}, },
&ast.ExprReference{ },
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 29, StartPos: 29,
EndPos: 32, EndPos: 30,
}, },
AmpersandTkn: &token.Token{ FreeFloating: []*token.Token{
ID: token.ID(38), {
Value: []byte("&"), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 29, EndLine: 1,
EndPos: 30, StartPos: 28,
}, EndPos: 29,
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
}, },
}, },
}, },
Var: &ast.ExprVariable{ },
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 30, StartPos: 30,
EndPos: 32, EndPos: 32,
}, },
VarName: &ast.Identifier{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 30, StartPos: 30,
EndPos: 32, EndPos: 32,
}, },
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
},
Value: []byte("$d"),
}, },
Value: []byte("$d"),
}, },
}, },
}, },
SeparatorTkns: []*token.Token{ },
{ UseSeparatorTkns: []*token.Token{
ID: token.ID(44), {
Value: []byte(","), ID: token.ID(44),
Position: &position.Position{ Value: []byte(","),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 27, EndLine: 1,
EndPos: 28, StartPos: 27,
}, EndPos: 28,
}, },
}, },
CloseParenthesisTkn: &token.Token{ },
ID: token.ID(41), UseCloseParenthesisTkn: &token.Token{
Value: []byte(")"), ID: token.ID(41),
Position: &position.Position{ Value: []byte(")"),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 32, EndLine: 1,
EndPos: 33, StartPos: 32,
}, EndPos: 33,
}, },
}, },
OpenCurlyBracketTkn: &token.Token{ OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123), ID: token.ID(123),
Value: []byte("{"), Value: []byte("{"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36586,7 +36586,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36599,7 +36599,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
Stmts: []ast.Vertex{}, Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{ CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125), ID: token.ID(125),
Value: []byte("}"), Value: []byte("}"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36610,7 +36610,7 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
SemiColonTkn: &token.Token{ SemiColonTkn: &token.Token{
ID: token.ID(59), ID: token.ID(59),
Value: []byte(";"), Value: []byte(";"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36621,7 +36621,8 @@ func TestExprClosure_Use(t *testing.T) {
}, },
}, },
}, },
EndTkn: &token.Token{}, EndTkn: &token.Token{
},
} }
lexer := scanner.NewLexer([]byte(src), "7.4", nil) lexer := scanner.NewLexer([]byte(src), "7.4", nil)
@ -36657,7 +36658,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 36, EndPos: 36,
}, },
FunctionTkn: &token.Token{ FunctionTkn: &token.Token{
ID: token.T_FUNCTION, ID: token.T_FUNCTION,
Value: []byte("function"), Value: []byte("function"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36667,7 +36668,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_OPEN_TAG, ID: token.T_OPEN_TAG,
Value: []byte("<?"), Value: []byte("<?"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36677,7 +36678,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36689,7 +36690,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
OpenParenthesisTkn: &token.Token{ OpenParenthesisTkn: &token.Token{
ID: token.ID(40), ID: token.ID(40),
Value: []byte("("), Value: []byte("("),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36721,7 +36722,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 14, EndPos: 14,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$a"), Value: []byte("$a"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36756,7 +36757,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 18, EndPos: 18,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$b"), Value: []byte("$b"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36766,7 +36767,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36784,7 +36785,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
SeparatorTkns: []*token.Token{ SeparatorTkns: []*token.Token{
{ {
ID: token.ID(44), ID: token.ID(44),
Value: []byte(","), Value: []byte(","),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36795,7 +36796,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
CloseParenthesisTkn: &token.Token{ CloseParenthesisTkn: &token.Token{
ID: token.ID(41), ID: token.ID(41),
Value: []byte(")"), Value: []byte(")"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36804,104 +36805,104 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 19, EndPos: 19,
}, },
}, },
ClosureUse: &ast.ExprClosureUse{ UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 20, StartPos: 20,
EndPos: 33, EndPos: 23,
}, },
UseTkn: &token.Token{ FreeFloating: []*token.Token{
ID: token.T_USE, {
Value: []byte("use"), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 20, EndLine: 1,
EndPos: 23, StartPos: 19,
}, EndPos: 20,
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
}, },
}, },
}, },
OpenParenthesisTkn: &token.Token{ },
ID: token.ID(40), UseOpenParenthesisTkn: &token.Token{
Value: []byte("("), ID: token.ID(40),
Position: &position.Position{ Value: []byte("("),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 24, EndLine: 1,
EndPos: 25, StartPos: 24,
}, EndPos: 25,
FreeFloating: []*token.Token{ },
{ FreeFloating: []*token.Token{
ID: token.T_WHITESPACE, {
Value: []byte(" "), ID: token.T_WHITESPACE,
Position: &position.Position{ Value: []byte(" "),
StartLine: 1, Position: &position.Position{
EndLine: 1, StartLine: 1,
StartPos: 23, EndLine: 1,
EndPos: 24, StartPos: 23,
}, EndPos: 24,
}, },
}, },
}, },
Uses: []ast.Vertex{ },
&ast.ExprReference{ Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 25, StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28, EndPos: 28,
}, },
AmpersandTkn: &token.Token{ VarName: &ast.Identifier{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 26, StartPos: 26,
EndPos: 28, EndPos: 28,
}, },
VarName: &ast.Identifier{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 26, StartPos: 26,
EndPos: 28, EndPos: 28,
}, },
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
},
Value: []byte("$c"),
}, },
Value: []byte("$c"),
}, },
}, },
&ast.ExprVariable{ },
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
Var: &ast.ExprVariable{
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
@ -36916,7 +36917,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 32, EndPos: 32,
}, },
IdentifierTkn: &token.Token{ IdentifierTkn: &token.Token{
ID: token.T_VARIABLE, ID: token.T_VARIABLE,
Value: []byte("$d"), Value: []byte("$d"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36926,7 +36927,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36941,31 +36942,31 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
}, },
SeparatorTkns: []*token.Token{ },
{ UseSeparatorTkns: []*token.Token{
ID: token.ID(44), {
Value: []byte(","), ID: token.ID(44),
Position: &position.Position{ Value: []byte(","),
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
EndLine: 1, EndLine: 1,
StartPos: 32, StartPos: 28,
EndPos: 33, EndPos: 29,
}, },
}, },
}, },
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{ OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123), ID: token.ID(123),
Value: []byte("{"), Value: []byte("{"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36975,7 +36976,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
FreeFloating: []*token.Token{ FreeFloating: []*token.Token{
{ {
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36988,7 +36989,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
Stmts: []ast.Vertex{}, Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{ CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125), ID: token.ID(125),
Value: []byte("}"), Value: []byte("}"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -36999,7 +37000,7 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
SemiColonTkn: &token.Token{ SemiColonTkn: &token.Token{
ID: token.ID(59), ID: token.ID(59),
Value: []byte(";"), Value: []byte(";"),
Position: &position.Position{ Position: &position.Position{
StartLine: 1, StartLine: 1,
@ -37010,7 +37011,8 @@ func TestExprClosure_Use2(t *testing.T) {
}, },
}, },
}, },
EndTkn: &token.Token{}, EndTkn: &token.Token{
},
} }
lexer := scanner.NewLexer([]byte(src), "7.4", nil) lexer := scanner.NewLexer([]byte(src), "7.4", nil)

BIN
internal/php7/php7.go generated

Binary file not shown.

View File

@ -3178,21 +3178,22 @@ expr_without_variable:
inline_function: inline_function:
T_FUNCTION returns_ref backup_doc_comment '(' parameter_list ')' lexical_vars return_type '{' inner_statement_list '}' T_FUNCTION returns_ref backup_doc_comment '(' parameter_list ')' lexical_vars return_type '{' inner_statement_list '}'
{ {
$$ = &ast.ExprClosure{ closure := $7.(*ast.ExprClosure)
Position: yylex.(*Parser).builder.NewTokensPosition($1, $11),
FunctionTkn: $1, closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $11)
AmpersandTkn: $2, closure.FunctionTkn = $1
OpenParenthesisTkn: $4, closure.AmpersandTkn = $2
Params: $5.(*ast.ParserSeparatedList).Items, closure.OpenParenthesisTkn = $4
SeparatorTkns: $5.(*ast.ParserSeparatedList).SeparatorTkns, closure.Params = $5.(*ast.ParserSeparatedList).Items
CloseParenthesisTkn: $6, closure.SeparatorTkns = $5.(*ast.ParserSeparatedList).SeparatorTkns
ClosureUse: $7, closure.CloseParenthesisTkn = $6
ColonTkn: $8.(*ast.ReturnType).ColonTkn, closure.ColonTkn = $8.(*ast.ReturnType).ColonTkn
ReturnType: $8.(*ast.ReturnType).Type, closure.ReturnType = $8.(*ast.ReturnType).Type
OpenCurlyBracketTkn: $9, closure.OpenCurlyBracketTkn = $9
Stmts: $10, closure.Stmts = $10
CloseCurlyBracketTkn: $11, closure.CloseCurlyBracketTkn = $11
}
$$ = closure
} }
| T_FN returns_ref '(' parameter_list ')' return_type backup_doc_comment T_DOUBLE_ARROW expr | T_FN returns_ref '(' parameter_list ')' return_type backup_doc_comment T_DOUBLE_ARROW expr
{ {
@ -3230,17 +3231,16 @@ returns_ref:
lexical_vars: lexical_vars:
/* empty */ /* empty */
{ {
$$ = nil $$ = &ast.ExprClosure{}
} }
| T_USE '(' lexical_var_list ')' | T_USE '(' lexical_var_list ')'
{ {
$$ = &ast.ExprClosureUse{ $$ = &ast.ExprClosure{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $4), UseTkn: $1,
UseTkn: $1, UseOpenParenthesisTkn: $2,
OpenParenthesisTkn: $2, Use: $3.(*ast.ParserSeparatedList).Items,
Uses: $3.(*ast.ParserSeparatedList).Items, UseSeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns, UseCloseParenthesisTkn: $4,
CloseParenthesisTkn: $4,
} }
} }
; ;
@ -3264,18 +3264,21 @@ lexical_var_list:
lexical_var: lexical_var:
T_VARIABLE T_VARIABLE
{ {
$$ = &ast.ExprVariable{ $$ = &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokenPosition($1), Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{ Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($1), Position: yylex.(*Parser).builder.NewTokenPosition($1),
IdentifierTkn: $1, VarName: &ast.Identifier{
Value: $1.Value, Position: yylex.(*Parser).builder.NewTokenPosition($1),
IdentifierTkn: $1,
Value: $1.Value,
},
}, },
} }
} }
| '&' T_VARIABLE | '&' T_VARIABLE
{ {
$$ = &ast.ExprReference{ $$ = &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2), Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
AmpersandTkn: $1, AmpersandTkn: $1,
Var: &ast.ExprVariable{ Var: &ast.ExprVariable{

View File

@ -1274,20 +1274,24 @@ func (n *ExprClone) GetPosition() *position.Position {
// ExprClosure node // ExprClosure node
type ExprClosure struct { type ExprClosure struct {
Position *position.Position Position *position.Position
StaticTkn *token.Token StaticTkn *token.Token
FunctionTkn *token.Token FunctionTkn *token.Token
AmpersandTkn *token.Token AmpersandTkn *token.Token
OpenParenthesisTkn *token.Token OpenParenthesisTkn *token.Token
Params []Vertex Params []Vertex
SeparatorTkns []*token.Token SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token CloseParenthesisTkn *token.Token
ClosureUse Vertex UseTkn *token.Token
ColonTkn *token.Token UseOpenParenthesisTkn *token.Token
ReturnType Vertex Use []Vertex
OpenCurlyBracketTkn *token.Token UseSeparatorTkns []*token.Token
Stmts []Vertex UseCloseParenthesisTkn *token.Token
CloseCurlyBracketTkn *token.Token ColonTkn *token.Token
ReturnType Vertex
OpenCurlyBracketTkn *token.Token
Stmts []Vertex
CloseCurlyBracketTkn *token.Token
} }
func (n *ExprClosure) Accept(v NodeVisitor) { func (n *ExprClosure) Accept(v NodeVisitor) {
@ -1300,12 +1304,9 @@ func (n *ExprClosure) GetPosition() *position.Position {
// ExprClosureUse node // ExprClosureUse node
type ExprClosureUse struct { type ExprClosureUse struct {
Position *position.Position Position *position.Position
UseTkn *token.Token AmpersandTkn *token.Token
OpenParenthesisTkn *token.Token Var Vertex
Uses []Vertex
SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token
} }
func (n *ExprClosureUse) Accept(v NodeVisitor) { func (n *ExprClosureUse) Accept(v NodeVisitor) {

View File

@ -1152,10 +1152,12 @@ func (t *DFS) Traverse(n ast.Vertex) {
} }
t.visitor.Leave("Params", false) t.visitor.Leave("Params", false)
} }
if nn.ClosureUse != nil { if nn.Use != nil {
t.visitor.Enter("ClosureUse", true) t.visitor.Enter("Use", false)
t.Traverse(nn.ClosureUse) for _, c := range nn.Use {
t.visitor.Leave("ClosureUse", true) t.Traverse(c)
}
t.visitor.Leave("Use", false)
} }
if nn.ReturnType != nil { if nn.ReturnType != nil {
t.visitor.Enter("ReturnType", true) t.visitor.Enter("ReturnType", true)
@ -1176,12 +1178,10 @@ func (t *DFS) Traverse(n ast.Vertex) {
if !t.visitor.EnterNode(nn) { if !t.visitor.EnterNode(nn) {
return return
} }
if nn.Uses != nil { if nn.Var != nil {
t.visitor.Enter("Uses", false) t.visitor.Enter("Var", true)
for _, c := range nn.Uses { t.Traverse(nn.Var)
t.Traverse(c) t.visitor.Leave("Var", true)
}
t.visitor.Leave("Uses", false)
} }
case *ast.ExprConstFetch: case *ast.ExprConstFetch:
if nn == nil { if nn == nil {

View File

@ -1143,7 +1143,11 @@ func (v *Dumper) ExprClosure(n *ast.ExprClosure) {
v.dumpVertexList("Params", n.Params) v.dumpVertexList("Params", n.Params)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns) v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn) v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
v.dumpVertex("ClosureUse", n.ClosureUse) v.dumpToken("UseTkn", n.UseTkn)
v.dumpToken("UseOpenParenthesisTkn", n.UseOpenParenthesisTkn)
v.dumpVertexList("Use", n.Use)
v.dumpTokenList("UseSeparatorTkns", n.UseSeparatorTkns)
v.dumpToken("UseCloseParenthesisTkn", n.UseCloseParenthesisTkn)
v.dumpToken("ColonTkn", n.ColonTkn) v.dumpToken("ColonTkn", n.ColonTkn)
v.dumpVertex("ReturnType", n.ReturnType) v.dumpVertex("ReturnType", n.ReturnType)
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn) v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
@ -1159,11 +1163,8 @@ func (v *Dumper) ExprClosureUse(n *ast.ExprClosureUse) {
v.indent++ v.indent++
v.dumpPosition(n.Position) v.dumpPosition(n.Position)
v.dumpToken("UseTkn", n.UseTkn) v.dumpToken("AmpersandTkn", n.AmpersandTkn)
v.dumpToken("OpenParenthesisTkn", n.OpenParenthesisTkn) v.dumpVertex("Var", n.Var)
v.dumpVertexList("Uses", n.Uses)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
v.indent-- v.indent--
v.print(v.indent, "},\n") v.print(v.indent, "},\n")

View File

@ -1162,9 +1162,16 @@ func (f *formatter) ExprClosure(n *ast.ExprClosure) {
} }
n.CloseParenthesisTkn = f.newToken(')', []byte(")")) n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
if n.ClosureUse != nil { n.UseTkn = nil
n.UseOpenParenthesisTkn = nil
n.UseCloseParenthesisTkn = nil
n.UseSeparatorTkns = nil
if len(n.Use) > 0 {
f.addFreeFloating(token.T_WHITESPACE, []byte(" ")) f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
n.ClosureUse.Accept(f) n.UseTkn = f.newToken(token.T_USE, []byte("use"))
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
n.SeparatorTkns = f.formatList(n.Use, ',')
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
} }
n.ColonTkn = nil n.ColonTkn = nil
@ -1189,10 +1196,11 @@ func (f *formatter) ExprClosure(n *ast.ExprClosure) {
} }
func (f *formatter) ExprClosureUse(n *ast.ExprClosureUse) { func (f *formatter) ExprClosureUse(n *ast.ExprClosureUse) {
n.UseTkn = f.newToken(token.T_USE, []byte("use")) if n.AmpersandTkn != nil {
n.OpenParenthesisTkn = f.newToken('(', []byte("(")) n.AmpersandTkn = f.newToken('&', []byte("&"))
n.SeparatorTkns = f.formatList(n.Uses, ',') }
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
n.Var.Accept(f)
} }
func (f *formatter) ExprConstFetch(n *ast.ExprConstFetch) { func (f *formatter) ExprConstFetch(n *ast.ExprConstFetch) {

View File

@ -3714,9 +3714,9 @@ func TestFormatter_ExprClosure_Use(t *testing.T) {
o := bytes.NewBufferString("") o := bytes.NewBufferString("")
n := &ast.ExprClosure{ n := &ast.ExprClosure{
ClosureUse: &ast.ExprClosureUse{ Use: []ast.Vertex{
Uses: []ast.Vertex{ &ast.ExprClosureUse{
&ast.ExprVariable{ Var: &ast.ExprVariable{
VarName: &ast.Identifier{ VarName: &ast.Identifier{
Value: []byte("$foo"), Value: []byte("$foo"),
}, },
@ -3748,16 +3748,9 @@ func TestFormatter_ExprClosureUse(t *testing.T) {
o := bytes.NewBufferString("") o := bytes.NewBufferString("")
n := &ast.ExprClosureUse{ n := &ast.ExprClosureUse{
Uses: []ast.Vertex{ Var: &ast.ExprVariable{
&ast.ExprVariable{ VarName: &ast.Identifier{
VarName: &ast.Identifier{ Value: []byte("$a"),
Value: []byte("$a"),
},
},
&ast.ExprVariable{
VarName: &ast.Identifier{
Value: []byte("$b"),
},
}, },
}, },
} }
@ -3768,7 +3761,33 @@ func TestFormatter_ExprClosureUse(t *testing.T) {
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP) p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n.Accept(p) n.Accept(p)
expected := `use($a, $b)` expected := `$a`
actual := o.String()
if expected != actual {
t.Errorf("\nexpected: %s\ngot: %s\n", expected, actual)
}
}
func TestFormatter_ExprClosureUse_Reference(t *testing.T) {
o := bytes.NewBufferString("")
n := &ast.ExprClosureUse{
AmpersandTkn: &token.Token{},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{
Value: []byte("$a"),
},
},
}
f := visitor.NewFormatter().WithState(visitor.FormatterStatePHP).WithIndent(1)
n.Accept(f)
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n.Accept(p)
expected := `&$a`
actual := o.String() actual := o.String()
if expected != actual { if expected != actual {

View File

@ -562,7 +562,6 @@ func TestResolveClosureName(t *testing.T) {
Var: &ast.ExprVariable{VarName: &ast.Identifier{Value: []byte("foo")}}, Var: &ast.ExprVariable{VarName: &ast.Identifier{Value: []byte("foo")}},
}, },
}, },
ClosureUse: nil,
ReturnType: &ast.Nullable{Expr: nameBC}, ReturnType: &ast.Nullable{Expr: nameBC},
Stmts: []ast.Vertex{}, Stmts: []ast.Vertex{},
} }

View File

@ -684,7 +684,10 @@ func (p *printer) ExprClosure(n *ast.ExprClosure) {
p.printToken(n.OpenParenthesisTkn, []byte("(")) p.printToken(n.OpenParenthesisTkn, []byte("("))
p.printSeparatedList(n.Params, n.SeparatorTkns, []byte(",")) p.printSeparatedList(n.Params, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseParenthesisTkn, []byte(")")) p.printToken(n.CloseParenthesisTkn, []byte(")"))
p.printNode(n.ClosureUse) p.printToken(n.UseTkn, p.ifNodeList(n.Use, []byte("use")))
p.printToken(n.UseOpenParenthesisTkn, p.ifNodeList(n.Use, []byte("(")))
p.printSeparatedList(n.Use, n.UseSeparatorTkns, []byte(","))
p.printToken(n.UseCloseParenthesisTkn, p.ifNodeList(n.Use, []byte(")")))
p.printToken(n.ColonTkn, p.ifNode(n.ReturnType, []byte(":"))) p.printToken(n.ColonTkn, p.ifNode(n.ReturnType, []byte(":")))
p.printNode(n.ReturnType) p.printNode(n.ReturnType)
p.printToken(n.OpenCurlyBracketTkn, []byte("{")) p.printToken(n.OpenCurlyBracketTkn, []byte("{"))
@ -693,10 +696,8 @@ func (p *printer) ExprClosure(n *ast.ExprClosure) {
} }
func (p *printer) ExprClosureUse(n *ast.ExprClosureUse) { func (p *printer) ExprClosureUse(n *ast.ExprClosureUse) {
p.printToken(n.UseTkn, []byte("use")) p.printToken(n.AmpersandTkn, nil)
p.printToken(n.OpenParenthesisTkn, []byte("(")) p.printNode(n.Var)
p.printSeparatedList(n.Uses, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseParenthesisTkn, []byte(")"))
} }
func (p *printer) ExprConstFetch(n *ast.ExprConstFetch) { func (p *printer) ExprConstFetch(n *ast.ExprConstFetch) {

View File

@ -1717,18 +1717,35 @@ func TestPrinterPrintExprClosureUse(t *testing.T) {
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP) p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n := &ast.ExprClosureUse{ n := &ast.ExprClosureUse{
Uses: []ast.Vertex{ Var: &ast.ExprVariable{
&ast.ExprReference{Var: &ast.ExprVariable{ VarName: &ast.Identifier{Value: []byte("$foo")},
VarName: &ast.Identifier{Value: []byte("$foo")},
}},
&ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$bar")},
},
}, },
} }
n.Accept(p) n.Accept(p)
expected := `use(&$foo,$bar)` expected := `$foo`
actual := o.String()
if expected != actual {
t.Errorf("\nexpected: %s\ngot: %s\n", expected, actual)
}
}
func TestPrinterPrintExprClosureUse_Reference(t *testing.T) {
o := bytes.NewBufferString("")
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n := &ast.ExprClosureUse{
AmpersandTkn: &token.Token{
Value: []byte("&"),
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$foo")},
},
}
n.Accept(p)
expected := `&$foo`
actual := o.String() actual := o.String()
if expected != actual { if expected != actual {
@ -1754,12 +1771,17 @@ func TestPrinterPrintExprClosure(t *testing.T) {
}, },
}, },
}, },
ClosureUse: &ast.ExprClosureUse{ Use: []ast.Vertex{
Uses: []ast.Vertex{ &ast.ExprClosureUse{
&ast.ExprReference{Var: &ast.ExprVariable{ AmpersandTkn: &token.Token{
Value: []byte("&"),
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$a")}, VarName: &ast.Identifier{Value: []byte("$a")},
}}, },
&ast.ExprVariable{ },
&ast.ExprClosureUse{
Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$b")}, VarName: &ast.Identifier{Value: []byte("$b")},
}, },
}, },