refactoring: update ast structure of "Closure" and "ClosureUse" nodes

This commit is contained in:
Vadym Slizov 2020-12-26 18:20:10 +02:00
parent 03c7979ccd
commit b85bae2ec1
No known key found for this signature in database
GPG Key ID: AEA2A9388EF42A4A
14 changed files with 654 additions and 589 deletions

View File

@ -31770,7 +31770,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 36,
},
FunctionTkn: &token.Token{
ID: token.T_FUNCTION,
ID: token.T_FUNCTION,
Value: []byte("function"),
Position: &position.Position{
StartLine: 1,
@ -31780,7 +31780,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_OPEN_TAG,
ID: token.T_OPEN_TAG,
Value: []byte("<?"),
Position: &position.Position{
StartLine: 1,
@ -31790,7 +31790,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -31802,7 +31802,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
@ -31834,7 +31834,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 14,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$a"),
Position: &position.Position{
StartLine: 1,
@ -31869,7 +31869,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 18,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$b"),
Position: &position.Position{
StartLine: 1,
@ -31879,7 +31879,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -31897,7 +31897,7 @@ func TestExprClosure_Use(t *testing.T) {
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
@ -31908,7 +31908,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
@ -31917,59 +31917,59 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 19,
},
},
ClosureUse: &ast.ExprClosureUse{
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 33,
EndPos: 23,
},
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 23,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
UseOpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
},
Uses: []ast.Vertex{
&ast.ExprVariable{
},
Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 27,
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
@ -31984,7 +31984,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 27,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
@ -31996,89 +31996,89 @@ func TestExprClosure_Use(t *testing.T) {
Value: []byte("$c"),
},
},
&ast.ExprReference{
},
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
EndPos: 30,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 30,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
Var: &ast.ExprVariable{
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
},
Value: []byte("$d"),
},
Value: []byte("$d"),
},
},
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 27,
EndPos: 28,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
},
UseSeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 27,
EndPos: 28,
},
},
},
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123),
ID: token.ID(123),
Value: []byte("{"),
Position: &position.Position{
StartLine: 1,
@ -32088,7 +32088,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -32101,7 +32101,7 @@ func TestExprClosure_Use(t *testing.T) {
},
Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125),
ID: token.ID(125),
Value: []byte("}"),
Position: &position.Position{
StartLine: 1,
@ -32112,7 +32112,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
SemiColonTkn: &token.Token{
ID: token.ID(59),
ID: token.ID(59),
Value: []byte(";"),
Position: &position.Position{
StartLine: 1,
@ -32123,7 +32123,8 @@ func TestExprClosure_Use(t *testing.T) {
},
},
},
EndTkn: &token.Token{},
EndTkn: &token.Token{
},
}
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
@ -32159,7 +32160,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 36,
},
FunctionTkn: &token.Token{
ID: token.T_FUNCTION,
ID: token.T_FUNCTION,
Value: []byte("function"),
Position: &position.Position{
StartLine: 1,
@ -32169,7 +32170,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_OPEN_TAG,
ID: token.T_OPEN_TAG,
Value: []byte("<?"),
Position: &position.Position{
StartLine: 1,
@ -32179,7 +32180,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -32191,7 +32192,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
@ -32223,7 +32224,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 14,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$a"),
Position: &position.Position{
StartLine: 1,
@ -32258,7 +32259,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 18,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$b"),
Position: &position.Position{
StartLine: 1,
@ -32268,7 +32269,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -32286,7 +32287,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
@ -32297,7 +32298,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
@ -32306,104 +32307,104 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 19,
},
},
ClosureUse: &ast.ExprClosureUse{
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 33,
EndPos: 23,
},
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 23,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
UseOpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
},
Uses: []ast.Vertex{
&ast.ExprReference{
},
Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
VarName: &ast.Identifier{
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
},
Value: []byte("$c"),
},
Value: []byte("$c"),
},
},
&ast.ExprVariable{
},
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
@ -32418,7 +32419,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 32,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
@ -32428,7 +32429,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -32443,31 +32444,31 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
},
UseSeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
StartPos: 28,
EndPos: 29,
},
},
},
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123),
ID: token.ID(123),
Value: []byte("{"),
Position: &position.Position{
StartLine: 1,
@ -32477,7 +32478,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -32490,7 +32491,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125),
ID: token.ID(125),
Value: []byte("}"),
Position: &position.Position{
StartLine: 1,
@ -32501,7 +32502,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
SemiColonTkn: &token.Token{
ID: token.ID(59),
ID: token.ID(59),
Value: []byte(";"),
Position: &position.Position{
StartLine: 1,
@ -32512,7 +32513,8 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
},
EndTkn: &token.Token{},
EndTkn: &token.Token{
},
}
lexer := scanner.NewLexer([]byte(src), "5.6", nil)

BIN
internal/php5/php5.go generated

Binary file not shown.

View File

@ -3367,36 +3367,38 @@ expr_without_variable:
}
| function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
{
$$ = &ast.ExprClosure{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $9),
FunctionTkn: $1,
AmpersandTkn: $2,
OpenParenthesisTkn: $3,
Params: $4.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $4.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $5,
ClosureUse: $6,
OpenCurlyBracketTkn: $7,
Stmts: $8,
CloseCurlyBracketTkn: $9,
}
closure := $6.(*ast.ExprClosure)
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $9)
closure.FunctionTkn = $1
closure.AmpersandTkn = $2
closure.OpenParenthesisTkn = $3
closure.Params = $4.(*ast.ParserSeparatedList).Items
closure.SeparatorTkns = $4.(*ast.ParserSeparatedList).SeparatorTkns
closure.CloseParenthesisTkn = $5
closure.OpenCurlyBracketTkn = $7
closure.Stmts = $8
closure.CloseCurlyBracketTkn = $9
$$ = closure
}
| T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
{
$$ = &ast.ExprClosure{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $10),
StaticTkn: $1,
FunctionTkn: $2,
AmpersandTkn: $3,
OpenParenthesisTkn: $4,
Params: $5.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $5.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $6,
ClosureUse: $7,
OpenCurlyBracketTkn: $8,
Stmts: $9,
CloseCurlyBracketTkn: $10,
}
closure := $7.(*ast.ExprClosure)
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $10)
closure.StaticTkn = $1
closure.FunctionTkn = $2
closure.AmpersandTkn = $3
closure.OpenParenthesisTkn = $4
closure.Params = $5.(*ast.ParserSeparatedList).Items
closure.SeparatorTkns = $5.(*ast.ParserSeparatedList).SeparatorTkns
closure.CloseParenthesisTkn = $6
closure.OpenCurlyBracketTkn = $8
closure.Stmts = $9
closure.CloseCurlyBracketTkn = $10
$$ = closure
}
;
@ -3520,17 +3522,16 @@ function:
lexical_vars:
/* empty */
{
$$ = nil
$$ = &ast.ExprClosure{}
}
| T_USE '(' lexical_var_list ')'
{
$$ = &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $4),
UseTkn: $1,
OpenParenthesisTkn: $2,
Uses: $3.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $4,
$$ = &ast.ExprClosure{
UseTkn: $1,
UseOpenParenthesisTkn: $2,
Use: $3.(*ast.ParserSeparatedList).Items,
UseSeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
UseCloseParenthesisTkn: $4,
}
}
;
@ -3538,12 +3539,15 @@ lexical_vars:
lexical_var_list:
lexical_var_list ',' T_VARIABLE
{
variable := &ast.ExprVariable{
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokenPosition($3),
VarName: &ast.Identifier{
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($3),
IdentifierTkn: $3,
Value: $3.Value,
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($3),
IdentifierTkn: $3,
Value: $3.Value,
},
},
}
@ -3554,7 +3558,7 @@ lexical_var_list:
}
| lexical_var_list ',' '&' T_VARIABLE
{
reference := &ast.ExprReference{
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($3, $4),
AmpersandTkn: $3,
Var: &ast.ExprVariable{
@ -3568,43 +3572,46 @@ lexical_var_list:
}
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, reference)
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, variable)
$$ = $1
}
| T_VARIABLE
{
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{
&ast.ExprVariable{
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
IdentifierTkn: $1,
Value: $1.Value,
},
IdentifierTkn: $1,
Value: $1.Value,
},
},
}
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{ variable },
}
}
| '&' T_VARIABLE
{
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{
&ast.ExprReference{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
AmpersandTkn: $1,
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
IdentifierTkn: $2,
Value: $2.Value,
},
},
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
AmpersandTkn: $1,
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
IdentifierTkn: $2,
Value: $2.Value,
},
},
}
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{ variable },
}
}
;

View File

@ -36268,7 +36268,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 36,
},
FunctionTkn: &token.Token{
ID: token.T_FUNCTION,
ID: token.T_FUNCTION,
Value: []byte("function"),
Position: &position.Position{
StartLine: 1,
@ -36278,7 +36278,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_OPEN_TAG,
ID: token.T_OPEN_TAG,
Value: []byte("<?"),
Position: &position.Position{
StartLine: 1,
@ -36288,7 +36288,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -36300,7 +36300,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
@ -36332,7 +36332,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 14,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$a"),
Position: &position.Position{
StartLine: 1,
@ -36367,7 +36367,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 18,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$b"),
Position: &position.Position{
StartLine: 1,
@ -36377,7 +36377,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -36395,7 +36395,7 @@ func TestExprClosure_Use(t *testing.T) {
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
@ -36406,7 +36406,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
@ -36415,59 +36415,59 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 19,
},
},
ClosureUse: &ast.ExprClosureUse{
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 33,
EndPos: 23,
},
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 23,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
UseOpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
},
Uses: []ast.Vertex{
&ast.ExprVariable{
},
Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 27,
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
@ -36482,7 +36482,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 27,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
@ -36494,89 +36494,89 @@ func TestExprClosure_Use(t *testing.T) {
Value: []byte("$c"),
},
},
&ast.ExprReference{
},
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
EndPos: 30,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 30,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
Var: &ast.ExprVariable{
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
},
Value: []byte("$d"),
},
Value: []byte("$d"),
},
},
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 27,
EndPos: 28,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
},
UseSeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 27,
EndPos: 28,
},
},
},
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123),
ID: token.ID(123),
Value: []byte("{"),
Position: &position.Position{
StartLine: 1,
@ -36586,7 +36586,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -36599,7 +36599,7 @@ func TestExprClosure_Use(t *testing.T) {
},
Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125),
ID: token.ID(125),
Value: []byte("}"),
Position: &position.Position{
StartLine: 1,
@ -36610,7 +36610,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
SemiColonTkn: &token.Token{
ID: token.ID(59),
ID: token.ID(59),
Value: []byte(";"),
Position: &position.Position{
StartLine: 1,
@ -36621,7 +36621,8 @@ func TestExprClosure_Use(t *testing.T) {
},
},
},
EndTkn: &token.Token{},
EndTkn: &token.Token{
},
}
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
@ -36657,7 +36658,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 36,
},
FunctionTkn: &token.Token{
ID: token.T_FUNCTION,
ID: token.T_FUNCTION,
Value: []byte("function"),
Position: &position.Position{
StartLine: 1,
@ -36667,7 +36668,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_OPEN_TAG,
ID: token.T_OPEN_TAG,
Value: []byte("<?"),
Position: &position.Position{
StartLine: 1,
@ -36677,7 +36678,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -36689,7 +36690,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
@ -36721,7 +36722,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 14,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$a"),
Position: &position.Position{
StartLine: 1,
@ -36756,7 +36757,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 18,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$b"),
Position: &position.Position{
StartLine: 1,
@ -36766,7 +36767,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -36784,7 +36785,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
@ -36795,7 +36796,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
@ -36804,104 +36805,104 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 19,
},
},
ClosureUse: &ast.ExprClosureUse{
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 33,
EndPos: 23,
},
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 23,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
UseOpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
},
Uses: []ast.Vertex{
&ast.ExprReference{
},
Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
VarName: &ast.Identifier{
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
},
Value: []byte("$c"),
},
Value: []byte("$c"),
},
},
&ast.ExprVariable{
},
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
@ -36916,7 +36917,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 32,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
@ -36926,7 +36927,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -36941,31 +36942,31 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
},
UseSeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
StartPos: 28,
EndPos: 29,
},
},
},
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123),
ID: token.ID(123),
Value: []byte("{"),
Position: &position.Position{
StartLine: 1,
@ -36975,7 +36976,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@ -36988,7 +36989,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125),
ID: token.ID(125),
Value: []byte("}"),
Position: &position.Position{
StartLine: 1,
@ -36999,7 +37000,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
SemiColonTkn: &token.Token{
ID: token.ID(59),
ID: token.ID(59),
Value: []byte(";"),
Position: &position.Position{
StartLine: 1,
@ -37010,7 +37011,8 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
},
EndTkn: &token.Token{},
EndTkn: &token.Token{
},
}
lexer := scanner.NewLexer([]byte(src), "7.4", nil)

BIN
internal/php7/php7.go generated

Binary file not shown.

View File

@ -3178,21 +3178,22 @@ expr_without_variable:
inline_function:
T_FUNCTION returns_ref backup_doc_comment '(' parameter_list ')' lexical_vars return_type '{' inner_statement_list '}'
{
$$ = &ast.ExprClosure{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $11),
FunctionTkn: $1,
AmpersandTkn: $2,
OpenParenthesisTkn: $4,
Params: $5.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $5.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $6,
ClosureUse: $7,
ColonTkn: $8.(*ast.ReturnType).ColonTkn,
ReturnType: $8.(*ast.ReturnType).Type,
OpenCurlyBracketTkn: $9,
Stmts: $10,
CloseCurlyBracketTkn: $11,
}
closure := $7.(*ast.ExprClosure)
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $11)
closure.FunctionTkn = $1
closure.AmpersandTkn = $2
closure.OpenParenthesisTkn = $4
closure.Params = $5.(*ast.ParserSeparatedList).Items
closure.SeparatorTkns = $5.(*ast.ParserSeparatedList).SeparatorTkns
closure.CloseParenthesisTkn = $6
closure.ColonTkn = $8.(*ast.ReturnType).ColonTkn
closure.ReturnType = $8.(*ast.ReturnType).Type
closure.OpenCurlyBracketTkn = $9
closure.Stmts = $10
closure.CloseCurlyBracketTkn = $11
$$ = closure
}
| T_FN returns_ref '(' parameter_list ')' return_type backup_doc_comment T_DOUBLE_ARROW expr
{
@ -3230,17 +3231,16 @@ returns_ref:
lexical_vars:
/* empty */
{
$$ = nil
$$ = &ast.ExprClosure{}
}
| T_USE '(' lexical_var_list ')'
{
$$ = &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $4),
UseTkn: $1,
OpenParenthesisTkn: $2,
Uses: $3.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $4,
$$ = &ast.ExprClosure{
UseTkn: $1,
UseOpenParenthesisTkn: $2,
Use: $3.(*ast.ParserSeparatedList).Items,
UseSeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
UseCloseParenthesisTkn: $4,
}
}
;
@ -3264,18 +3264,21 @@ lexical_var_list:
lexical_var:
T_VARIABLE
{
$$ = &ast.ExprVariable{
$$ = &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
IdentifierTkn: $1,
Value: $1.Value,
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
IdentifierTkn: $1,
Value: $1.Value,
},
},
}
}
| '&' T_VARIABLE
{
$$ = &ast.ExprReference{
$$ = &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
AmpersandTkn: $1,
Var: &ast.ExprVariable{

View File

@ -1274,20 +1274,24 @@ func (n *ExprClone) GetPosition() *position.Position {
// ExprClosure node
type ExprClosure struct {
Position *position.Position
StaticTkn *token.Token
FunctionTkn *token.Token
AmpersandTkn *token.Token
OpenParenthesisTkn *token.Token
Params []Vertex
SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token
ClosureUse Vertex
ColonTkn *token.Token
ReturnType Vertex
OpenCurlyBracketTkn *token.Token
Stmts []Vertex
CloseCurlyBracketTkn *token.Token
Position *position.Position
StaticTkn *token.Token
FunctionTkn *token.Token
AmpersandTkn *token.Token
OpenParenthesisTkn *token.Token
Params []Vertex
SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token
UseTkn *token.Token
UseOpenParenthesisTkn *token.Token
Use []Vertex
UseSeparatorTkns []*token.Token
UseCloseParenthesisTkn *token.Token
ColonTkn *token.Token
ReturnType Vertex
OpenCurlyBracketTkn *token.Token
Stmts []Vertex
CloseCurlyBracketTkn *token.Token
}
func (n *ExprClosure) Accept(v NodeVisitor) {
@ -1300,12 +1304,9 @@ func (n *ExprClosure) GetPosition() *position.Position {
// ExprClosureUse node
type ExprClosureUse struct {
Position *position.Position
UseTkn *token.Token
OpenParenthesisTkn *token.Token
Uses []Vertex
SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token
Position *position.Position
AmpersandTkn *token.Token
Var Vertex
}
func (n *ExprClosureUse) Accept(v NodeVisitor) {

View File

@ -1152,10 +1152,12 @@ func (t *DFS) Traverse(n ast.Vertex) {
}
t.visitor.Leave("Params", false)
}
if nn.ClosureUse != nil {
t.visitor.Enter("ClosureUse", true)
t.Traverse(nn.ClosureUse)
t.visitor.Leave("ClosureUse", true)
if nn.Use != nil {
t.visitor.Enter("Use", false)
for _, c := range nn.Use {
t.Traverse(c)
}
t.visitor.Leave("Use", false)
}
if nn.ReturnType != nil {
t.visitor.Enter("ReturnType", true)
@ -1176,12 +1178,10 @@ func (t *DFS) Traverse(n ast.Vertex) {
if !t.visitor.EnterNode(nn) {
return
}
if nn.Uses != nil {
t.visitor.Enter("Uses", false)
for _, c := range nn.Uses {
t.Traverse(c)
}
t.visitor.Leave("Uses", false)
if nn.Var != nil {
t.visitor.Enter("Var", true)
t.Traverse(nn.Var)
t.visitor.Leave("Var", true)
}
case *ast.ExprConstFetch:
if nn == nil {

View File

@ -1143,7 +1143,11 @@ func (v *Dumper) ExprClosure(n *ast.ExprClosure) {
v.dumpVertexList("Params", n.Params)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
v.dumpVertex("ClosureUse", n.ClosureUse)
v.dumpToken("UseTkn", n.UseTkn)
v.dumpToken("UseOpenParenthesisTkn", n.UseOpenParenthesisTkn)
v.dumpVertexList("Use", n.Use)
v.dumpTokenList("UseSeparatorTkns", n.UseSeparatorTkns)
v.dumpToken("UseCloseParenthesisTkn", n.UseCloseParenthesisTkn)
v.dumpToken("ColonTkn", n.ColonTkn)
v.dumpVertex("ReturnType", n.ReturnType)
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
@ -1159,11 +1163,8 @@ func (v *Dumper) ExprClosureUse(n *ast.ExprClosureUse) {
v.indent++
v.dumpPosition(n.Position)
v.dumpToken("UseTkn", n.UseTkn)
v.dumpToken("OpenParenthesisTkn", n.OpenParenthesisTkn)
v.dumpVertexList("Uses", n.Uses)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
v.dumpVertex("Var", n.Var)
v.indent--
v.print(v.indent, "},\n")

View File

@ -1162,9 +1162,16 @@ func (f *formatter) ExprClosure(n *ast.ExprClosure) {
}
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
if n.ClosureUse != nil {
n.UseTkn = nil
n.UseOpenParenthesisTkn = nil
n.UseCloseParenthesisTkn = nil
n.UseSeparatorTkns = nil
if len(n.Use) > 0 {
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
n.ClosureUse.Accept(f)
n.UseTkn = f.newToken(token.T_USE, []byte("use"))
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
n.SeparatorTkns = f.formatList(n.Use, ',')
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
}
n.ColonTkn = nil
@ -1189,10 +1196,11 @@ func (f *formatter) ExprClosure(n *ast.ExprClosure) {
}
func (f *formatter) ExprClosureUse(n *ast.ExprClosureUse) {
n.UseTkn = f.newToken(token.T_USE, []byte("use"))
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
n.SeparatorTkns = f.formatList(n.Uses, ',')
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
if n.AmpersandTkn != nil {
n.AmpersandTkn = f.newToken('&', []byte("&"))
}
n.Var.Accept(f)
}
func (f *formatter) ExprConstFetch(n *ast.ExprConstFetch) {

View File

@ -3714,9 +3714,9 @@ func TestFormatter_ExprClosure_Use(t *testing.T) {
o := bytes.NewBufferString("")
n := &ast.ExprClosure{
ClosureUse: &ast.ExprClosureUse{
Uses: []ast.Vertex{
&ast.ExprVariable{
Use: []ast.Vertex{
&ast.ExprClosureUse{
Var: &ast.ExprVariable{
VarName: &ast.Identifier{
Value: []byte("$foo"),
},
@ -3748,16 +3748,9 @@ func TestFormatter_ExprClosureUse(t *testing.T) {
o := bytes.NewBufferString("")
n := &ast.ExprClosureUse{
Uses: []ast.Vertex{
&ast.ExprVariable{
VarName: &ast.Identifier{
Value: []byte("$a"),
},
},
&ast.ExprVariable{
VarName: &ast.Identifier{
Value: []byte("$b"),
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{
Value: []byte("$a"),
},
},
}
@ -3768,7 +3761,33 @@ func TestFormatter_ExprClosureUse(t *testing.T) {
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n.Accept(p)
expected := `use($a, $b)`
expected := `$a`
actual := o.String()
if expected != actual {
t.Errorf("\nexpected: %s\ngot: %s\n", expected, actual)
}
}
func TestFormatter_ExprClosureUse_Reference(t *testing.T) {
o := bytes.NewBufferString("")
n := &ast.ExprClosureUse{
AmpersandTkn: &token.Token{},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{
Value: []byte("$a"),
},
},
}
f := visitor.NewFormatter().WithState(visitor.FormatterStatePHP).WithIndent(1)
n.Accept(f)
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n.Accept(p)
expected := `&$a`
actual := o.String()
if expected != actual {

View File

@ -562,7 +562,6 @@ func TestResolveClosureName(t *testing.T) {
Var: &ast.ExprVariable{VarName: &ast.Identifier{Value: []byte("foo")}},
},
},
ClosureUse: nil,
ReturnType: &ast.Nullable{Expr: nameBC},
Stmts: []ast.Vertex{},
}

View File

@ -684,7 +684,10 @@ func (p *printer) ExprClosure(n *ast.ExprClosure) {
p.printToken(n.OpenParenthesisTkn, []byte("("))
p.printSeparatedList(n.Params, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseParenthesisTkn, []byte(")"))
p.printNode(n.ClosureUse)
p.printToken(n.UseTkn, p.ifNodeList(n.Use, []byte("use")))
p.printToken(n.UseOpenParenthesisTkn, p.ifNodeList(n.Use, []byte("(")))
p.printSeparatedList(n.Use, n.UseSeparatorTkns, []byte(","))
p.printToken(n.UseCloseParenthesisTkn, p.ifNodeList(n.Use, []byte(")")))
p.printToken(n.ColonTkn, p.ifNode(n.ReturnType, []byte(":")))
p.printNode(n.ReturnType)
p.printToken(n.OpenCurlyBracketTkn, []byte("{"))
@ -693,10 +696,8 @@ func (p *printer) ExprClosure(n *ast.ExprClosure) {
}
func (p *printer) ExprClosureUse(n *ast.ExprClosureUse) {
p.printToken(n.UseTkn, []byte("use"))
p.printToken(n.OpenParenthesisTkn, []byte("("))
p.printSeparatedList(n.Uses, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseParenthesisTkn, []byte(")"))
p.printToken(n.AmpersandTkn, nil)
p.printNode(n.Var)
}
func (p *printer) ExprConstFetch(n *ast.ExprConstFetch) {

View File

@ -1717,18 +1717,35 @@ func TestPrinterPrintExprClosureUse(t *testing.T) {
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n := &ast.ExprClosureUse{
Uses: []ast.Vertex{
&ast.ExprReference{Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$foo")},
}},
&ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$bar")},
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$foo")},
},
}
n.Accept(p)
expected := `use(&$foo,$bar)`
expected := `$foo`
actual := o.String()
if expected != actual {
t.Errorf("\nexpected: %s\ngot: %s\n", expected, actual)
}
}
func TestPrinterPrintExprClosureUse_Reference(t *testing.T) {
o := bytes.NewBufferString("")
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
n := &ast.ExprClosureUse{
AmpersandTkn: &token.Token{
Value: []byte("&"),
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$foo")},
},
}
n.Accept(p)
expected := `&$foo`
actual := o.String()
if expected != actual {
@ -1754,12 +1771,17 @@ func TestPrinterPrintExprClosure(t *testing.T) {
},
},
},
ClosureUse: &ast.ExprClosureUse{
Uses: []ast.Vertex{
&ast.ExprReference{Var: &ast.ExprVariable{
Use: []ast.Vertex{
&ast.ExprClosureUse{
AmpersandTkn: &token.Token{
Value: []byte("&"),
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$a")},
}},
&ast.ExprVariable{
},
},
&ast.ExprClosureUse{
Var: &ast.ExprVariable{
VarName: &ast.Identifier{Value: []byte("$b")},
},
},