refactoring: update ast structure of "Closure" and "ClosureUse" nodes
This commit is contained in:
parent
03c7979ccd
commit
b85bae2ec1
@ -31917,13 +31917,6 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
EndPos: 19,
|
EndPos: 19,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ClosureUse: &ast.ExprClosureUse{
|
|
||||||
Position: &position.Position{
|
|
||||||
StartLine: 1,
|
|
||||||
EndLine: 1,
|
|
||||||
StartPos: 20,
|
|
||||||
EndPos: 33,
|
|
||||||
},
|
|
||||||
UseTkn: &token.Token{
|
UseTkn: &token.Token{
|
||||||
ID: token.T_USE,
|
ID: token.T_USE,
|
||||||
Value: []byte("use"),
|
Value: []byte("use"),
|
||||||
@ -31946,7 +31939,7 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
OpenParenthesisTkn: &token.Token{
|
UseOpenParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(40),
|
ID: token.ID(40),
|
||||||
Value: []byte("("),
|
Value: []byte("("),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -31968,8 +31961,15 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Uses: []ast.Vertex{
|
Use: []ast.Vertex{
|
||||||
&ast.ExprVariable{
|
&ast.ExprClosureUse{
|
||||||
|
Position: &position.Position{
|
||||||
|
StartLine: 1,
|
||||||
|
EndLine: 1,
|
||||||
|
StartPos: 25,
|
||||||
|
EndPos: 27,
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -31996,7 +31996,8 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
Value: []byte("$c"),
|
Value: []byte("$c"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&ast.ExprReference{
|
},
|
||||||
|
&ast.ExprClosureUse{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -32054,7 +32055,7 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SeparatorTkns: []*token.Token{
|
UseSeparatorTkns: []*token.Token{
|
||||||
{
|
{
|
||||||
ID: token.ID(44),
|
ID: token.ID(44),
|
||||||
Value: []byte(","),
|
Value: []byte(","),
|
||||||
@ -32066,7 +32067,7 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
CloseParenthesisTkn: &token.Token{
|
UseCloseParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(41),
|
ID: token.ID(41),
|
||||||
Value: []byte(")"),
|
Value: []byte(")"),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -32076,7 +32077,6 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
EndPos: 33,
|
EndPos: 33,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
|
||||||
OpenCurlyBracketTkn: &token.Token{
|
OpenCurlyBracketTkn: &token.Token{
|
||||||
ID: token.ID(123),
|
ID: token.ID(123),
|
||||||
Value: []byte("{"),
|
Value: []byte("{"),
|
||||||
@ -32123,7 +32123,8 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
EndTkn: &token.Token{},
|
EndTkn: &token.Token{
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
||||||
@ -32306,13 +32307,6 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
EndPos: 19,
|
EndPos: 19,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ClosureUse: &ast.ExprClosureUse{
|
|
||||||
Position: &position.Position{
|
|
||||||
StartLine: 1,
|
|
||||||
EndLine: 1,
|
|
||||||
StartPos: 20,
|
|
||||||
EndPos: 33,
|
|
||||||
},
|
|
||||||
UseTkn: &token.Token{
|
UseTkn: &token.Token{
|
||||||
ID: token.T_USE,
|
ID: token.T_USE,
|
||||||
Value: []byte("use"),
|
Value: []byte("use"),
|
||||||
@ -32335,7 +32329,7 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
OpenParenthesisTkn: &token.Token{
|
UseOpenParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(40),
|
ID: token.ID(40),
|
||||||
Value: []byte("("),
|
Value: []byte("("),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -32357,8 +32351,8 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Uses: []ast.Vertex{
|
Use: []ast.Vertex{
|
||||||
&ast.ExprReference{
|
&ast.ExprClosureUse{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -32403,7 +32397,14 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&ast.ExprVariable{
|
&ast.ExprClosureUse{
|
||||||
|
Position: &position.Position{
|
||||||
|
StartLine: 1,
|
||||||
|
EndLine: 1,
|
||||||
|
StartPos: 30,
|
||||||
|
EndPos: 32,
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -32443,7 +32444,8 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SeparatorTkns: []*token.Token{
|
},
|
||||||
|
UseSeparatorTkns: []*token.Token{
|
||||||
{
|
{
|
||||||
ID: token.ID(44),
|
ID: token.ID(44),
|
||||||
Value: []byte(","),
|
Value: []byte(","),
|
||||||
@ -32455,7 +32457,7 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
CloseParenthesisTkn: &token.Token{
|
UseCloseParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(41),
|
ID: token.ID(41),
|
||||||
Value: []byte(")"),
|
Value: []byte(")"),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -32465,7 +32467,6 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
EndPos: 33,
|
EndPos: 33,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
|
||||||
OpenCurlyBracketTkn: &token.Token{
|
OpenCurlyBracketTkn: &token.Token{
|
||||||
ID: token.ID(123),
|
ID: token.ID(123),
|
||||||
Value: []byte("{"),
|
Value: []byte("{"),
|
||||||
@ -32512,7 +32513,8 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
EndTkn: &token.Token{},
|
EndTkn: &token.Token{
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
||||||
|
BIN
internal/php5/php5.go
generated
BIN
internal/php5/php5.go
generated
Binary file not shown.
@ -3367,36 +3367,38 @@ expr_without_variable:
|
|||||||
}
|
}
|
||||||
| function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
|
| function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprClosure{
|
closure := $6.(*ast.ExprClosure)
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($1, $9),
|
|
||||||
FunctionTkn: $1,
|
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $9)
|
||||||
AmpersandTkn: $2,
|
closure.FunctionTkn = $1
|
||||||
OpenParenthesisTkn: $3,
|
closure.AmpersandTkn = $2
|
||||||
Params: $4.(*ast.ParserSeparatedList).Items,
|
closure.OpenParenthesisTkn = $3
|
||||||
SeparatorTkns: $4.(*ast.ParserSeparatedList).SeparatorTkns,
|
closure.Params = $4.(*ast.ParserSeparatedList).Items
|
||||||
CloseParenthesisTkn: $5,
|
closure.SeparatorTkns = $4.(*ast.ParserSeparatedList).SeparatorTkns
|
||||||
ClosureUse: $6,
|
closure.CloseParenthesisTkn = $5
|
||||||
OpenCurlyBracketTkn: $7,
|
closure.OpenCurlyBracketTkn = $7
|
||||||
Stmts: $8,
|
closure.Stmts = $8
|
||||||
CloseCurlyBracketTkn: $9,
|
closure.CloseCurlyBracketTkn = $9
|
||||||
}
|
|
||||||
|
$$ = closure
|
||||||
}
|
}
|
||||||
| T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
|
| T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprClosure{
|
closure := $7.(*ast.ExprClosure)
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($1, $10),
|
|
||||||
StaticTkn: $1,
|
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $10)
|
||||||
FunctionTkn: $2,
|
closure.StaticTkn = $1
|
||||||
AmpersandTkn: $3,
|
closure.FunctionTkn = $2
|
||||||
OpenParenthesisTkn: $4,
|
closure.AmpersandTkn = $3
|
||||||
Params: $5.(*ast.ParserSeparatedList).Items,
|
closure.OpenParenthesisTkn = $4
|
||||||
SeparatorTkns: $5.(*ast.ParserSeparatedList).SeparatorTkns,
|
closure.Params = $5.(*ast.ParserSeparatedList).Items
|
||||||
CloseParenthesisTkn: $6,
|
closure.SeparatorTkns = $5.(*ast.ParserSeparatedList).SeparatorTkns
|
||||||
ClosureUse: $7,
|
closure.CloseParenthesisTkn = $6
|
||||||
OpenCurlyBracketTkn: $8,
|
closure.OpenCurlyBracketTkn = $8
|
||||||
Stmts: $9,
|
closure.Stmts = $9
|
||||||
CloseCurlyBracketTkn: $10,
|
closure.CloseCurlyBracketTkn = $10
|
||||||
}
|
|
||||||
|
$$ = closure
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -3520,17 +3522,16 @@ function:
|
|||||||
lexical_vars:
|
lexical_vars:
|
||||||
/* empty */
|
/* empty */
|
||||||
{
|
{
|
||||||
$$ = nil
|
$$ = &ast.ExprClosure{}
|
||||||
}
|
}
|
||||||
| T_USE '(' lexical_var_list ')'
|
| T_USE '(' lexical_var_list ')'
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprClosureUse{
|
$$ = &ast.ExprClosure{
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($1, $4),
|
|
||||||
UseTkn: $1,
|
UseTkn: $1,
|
||||||
OpenParenthesisTkn: $2,
|
UseOpenParenthesisTkn: $2,
|
||||||
Uses: $3.(*ast.ParserSeparatedList).Items,
|
Use: $3.(*ast.ParserSeparatedList).Items,
|
||||||
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
|
UseSeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
|
||||||
CloseParenthesisTkn: $4,
|
UseCloseParenthesisTkn: $4,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
@ -3538,13 +3539,16 @@ lexical_vars:
|
|||||||
lexical_var_list:
|
lexical_var_list:
|
||||||
lexical_var_list ',' T_VARIABLE
|
lexical_var_list ',' T_VARIABLE
|
||||||
{
|
{
|
||||||
variable := &ast.ExprVariable{
|
variable := &ast.ExprClosureUse{
|
||||||
|
Position: yylex.(*Parser).builder.NewTokenPosition($3),
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
Position: yylex.(*Parser).builder.NewTokenPosition($3),
|
Position: yylex.(*Parser).builder.NewTokenPosition($3),
|
||||||
VarName: &ast.Identifier{
|
VarName: &ast.Identifier{
|
||||||
Position: yylex.(*Parser).builder.NewTokenPosition($3),
|
Position: yylex.(*Parser).builder.NewTokenPosition($3),
|
||||||
IdentifierTkn: $3,
|
IdentifierTkn: $3,
|
||||||
Value: $3.Value,
|
Value: $3.Value,
|
||||||
},
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
|
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
|
||||||
@ -3554,7 +3558,7 @@ lexical_var_list:
|
|||||||
}
|
}
|
||||||
| lexical_var_list ',' '&' T_VARIABLE
|
| lexical_var_list ',' '&' T_VARIABLE
|
||||||
{
|
{
|
||||||
reference := &ast.ExprReference{
|
variable := &ast.ExprClosureUse{
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($3, $4),
|
Position: yylex.(*Parser).builder.NewTokensPosition($3, $4),
|
||||||
AmpersandTkn: $3,
|
AmpersandTkn: $3,
|
||||||
Var: &ast.ExprVariable{
|
Var: &ast.ExprVariable{
|
||||||
@ -3568,15 +3572,15 @@ lexical_var_list:
|
|||||||
}
|
}
|
||||||
|
|
||||||
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
|
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
|
||||||
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, reference)
|
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, variable)
|
||||||
|
|
||||||
$$ = $1
|
$$ = $1
|
||||||
}
|
}
|
||||||
| T_VARIABLE
|
| T_VARIABLE
|
||||||
{
|
{
|
||||||
$$ = &ast.ParserSeparatedList{
|
variable := &ast.ExprClosureUse{
|
||||||
Items: []ast.Vertex{
|
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
||||||
&ast.ExprVariable{
|
Var: &ast.ExprVariable{
|
||||||
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
||||||
VarName: &ast.Identifier{
|
VarName: &ast.Identifier{
|
||||||
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
||||||
@ -3584,14 +3588,15 @@ lexical_var_list:
|
|||||||
Value: $1.Value,
|
Value: $1.Value,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
|
|
||||||
|
$$ = &ast.ParserSeparatedList{
|
||||||
|
Items: []ast.Vertex{ variable },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
| '&' T_VARIABLE
|
| '&' T_VARIABLE
|
||||||
{
|
{
|
||||||
$$ = &ast.ParserSeparatedList{
|
variable := &ast.ExprClosureUse{
|
||||||
Items: []ast.Vertex{
|
|
||||||
&ast.ExprReference{
|
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
|
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
|
||||||
AmpersandTkn: $1,
|
AmpersandTkn: $1,
|
||||||
Var: &ast.ExprVariable{
|
Var: &ast.ExprVariable{
|
||||||
@ -3602,8 +3607,10 @@ lexical_var_list:
|
|||||||
Value: $2.Value,
|
Value: $2.Value,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
},
|
|
||||||
|
$$ = &ast.ParserSeparatedList{
|
||||||
|
Items: []ast.Vertex{ variable },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
@ -36415,13 +36415,6 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
EndPos: 19,
|
EndPos: 19,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ClosureUse: &ast.ExprClosureUse{
|
|
||||||
Position: &position.Position{
|
|
||||||
StartLine: 1,
|
|
||||||
EndLine: 1,
|
|
||||||
StartPos: 20,
|
|
||||||
EndPos: 33,
|
|
||||||
},
|
|
||||||
UseTkn: &token.Token{
|
UseTkn: &token.Token{
|
||||||
ID: token.T_USE,
|
ID: token.T_USE,
|
||||||
Value: []byte("use"),
|
Value: []byte("use"),
|
||||||
@ -36444,7 +36437,7 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
OpenParenthesisTkn: &token.Token{
|
UseOpenParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(40),
|
ID: token.ID(40),
|
||||||
Value: []byte("("),
|
Value: []byte("("),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -36466,8 +36459,15 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Uses: []ast.Vertex{
|
Use: []ast.Vertex{
|
||||||
&ast.ExprVariable{
|
&ast.ExprClosureUse{
|
||||||
|
Position: &position.Position{
|
||||||
|
StartLine: 1,
|
||||||
|
EndLine: 1,
|
||||||
|
StartPos: 25,
|
||||||
|
EndPos: 27,
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -36494,7 +36494,8 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
Value: []byte("$c"),
|
Value: []byte("$c"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&ast.ExprReference{
|
},
|
||||||
|
&ast.ExprClosureUse{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -36552,7 +36553,7 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SeparatorTkns: []*token.Token{
|
UseSeparatorTkns: []*token.Token{
|
||||||
{
|
{
|
||||||
ID: token.ID(44),
|
ID: token.ID(44),
|
||||||
Value: []byte(","),
|
Value: []byte(","),
|
||||||
@ -36564,7 +36565,7 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
CloseParenthesisTkn: &token.Token{
|
UseCloseParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(41),
|
ID: token.ID(41),
|
||||||
Value: []byte(")"),
|
Value: []byte(")"),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -36574,7 +36575,6 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
EndPos: 33,
|
EndPos: 33,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
|
||||||
OpenCurlyBracketTkn: &token.Token{
|
OpenCurlyBracketTkn: &token.Token{
|
||||||
ID: token.ID(123),
|
ID: token.ID(123),
|
||||||
Value: []byte("{"),
|
Value: []byte("{"),
|
||||||
@ -36621,7 +36621,8 @@ func TestExprClosure_Use(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
EndTkn: &token.Token{},
|
EndTkn: &token.Token{
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
||||||
@ -36804,13 +36805,6 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
EndPos: 19,
|
EndPos: 19,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ClosureUse: &ast.ExprClosureUse{
|
|
||||||
Position: &position.Position{
|
|
||||||
StartLine: 1,
|
|
||||||
EndLine: 1,
|
|
||||||
StartPos: 20,
|
|
||||||
EndPos: 33,
|
|
||||||
},
|
|
||||||
UseTkn: &token.Token{
|
UseTkn: &token.Token{
|
||||||
ID: token.T_USE,
|
ID: token.T_USE,
|
||||||
Value: []byte("use"),
|
Value: []byte("use"),
|
||||||
@ -36833,7 +36827,7 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
OpenParenthesisTkn: &token.Token{
|
UseOpenParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(40),
|
ID: token.ID(40),
|
||||||
Value: []byte("("),
|
Value: []byte("("),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -36855,8 +36849,8 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Uses: []ast.Vertex{
|
Use: []ast.Vertex{
|
||||||
&ast.ExprReference{
|
&ast.ExprClosureUse{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -36901,7 +36895,14 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&ast.ExprVariable{
|
&ast.ExprClosureUse{
|
||||||
|
Position: &position.Position{
|
||||||
|
StartLine: 1,
|
||||||
|
EndLine: 1,
|
||||||
|
StartPos: 30,
|
||||||
|
EndPos: 32,
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
StartLine: 1,
|
StartLine: 1,
|
||||||
EndLine: 1,
|
EndLine: 1,
|
||||||
@ -36941,7 +36942,8 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SeparatorTkns: []*token.Token{
|
},
|
||||||
|
UseSeparatorTkns: []*token.Token{
|
||||||
{
|
{
|
||||||
ID: token.ID(44),
|
ID: token.ID(44),
|
||||||
Value: []byte(","),
|
Value: []byte(","),
|
||||||
@ -36953,7 +36955,7 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
CloseParenthesisTkn: &token.Token{
|
UseCloseParenthesisTkn: &token.Token{
|
||||||
ID: token.ID(41),
|
ID: token.ID(41),
|
||||||
Value: []byte(")"),
|
Value: []byte(")"),
|
||||||
Position: &position.Position{
|
Position: &position.Position{
|
||||||
@ -36963,7 +36965,6 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
EndPos: 33,
|
EndPos: 33,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
|
||||||
OpenCurlyBracketTkn: &token.Token{
|
OpenCurlyBracketTkn: &token.Token{
|
||||||
ID: token.ID(123),
|
ID: token.ID(123),
|
||||||
Value: []byte("{"),
|
Value: []byte("{"),
|
||||||
@ -37010,7 +37011,8 @@ func TestExprClosure_Use2(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
EndTkn: &token.Token{},
|
EndTkn: &token.Token{
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
||||||
|
BIN
internal/php7/php7.go
generated
BIN
internal/php7/php7.go
generated
Binary file not shown.
@ -3178,21 +3178,22 @@ expr_without_variable:
|
|||||||
inline_function:
|
inline_function:
|
||||||
T_FUNCTION returns_ref backup_doc_comment '(' parameter_list ')' lexical_vars return_type '{' inner_statement_list '}'
|
T_FUNCTION returns_ref backup_doc_comment '(' parameter_list ')' lexical_vars return_type '{' inner_statement_list '}'
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprClosure{
|
closure := $7.(*ast.ExprClosure)
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($1, $11),
|
|
||||||
FunctionTkn: $1,
|
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $11)
|
||||||
AmpersandTkn: $2,
|
closure.FunctionTkn = $1
|
||||||
OpenParenthesisTkn: $4,
|
closure.AmpersandTkn = $2
|
||||||
Params: $5.(*ast.ParserSeparatedList).Items,
|
closure.OpenParenthesisTkn = $4
|
||||||
SeparatorTkns: $5.(*ast.ParserSeparatedList).SeparatorTkns,
|
closure.Params = $5.(*ast.ParserSeparatedList).Items
|
||||||
CloseParenthesisTkn: $6,
|
closure.SeparatorTkns = $5.(*ast.ParserSeparatedList).SeparatorTkns
|
||||||
ClosureUse: $7,
|
closure.CloseParenthesisTkn = $6
|
||||||
ColonTkn: $8.(*ast.ReturnType).ColonTkn,
|
closure.ColonTkn = $8.(*ast.ReturnType).ColonTkn
|
||||||
ReturnType: $8.(*ast.ReturnType).Type,
|
closure.ReturnType = $8.(*ast.ReturnType).Type
|
||||||
OpenCurlyBracketTkn: $9,
|
closure.OpenCurlyBracketTkn = $9
|
||||||
Stmts: $10,
|
closure.Stmts = $10
|
||||||
CloseCurlyBracketTkn: $11,
|
closure.CloseCurlyBracketTkn = $11
|
||||||
}
|
|
||||||
|
$$ = closure
|
||||||
}
|
}
|
||||||
| T_FN returns_ref '(' parameter_list ')' return_type backup_doc_comment T_DOUBLE_ARROW expr
|
| T_FN returns_ref '(' parameter_list ')' return_type backup_doc_comment T_DOUBLE_ARROW expr
|
||||||
{
|
{
|
||||||
@ -3230,17 +3231,16 @@ returns_ref:
|
|||||||
lexical_vars:
|
lexical_vars:
|
||||||
/* empty */
|
/* empty */
|
||||||
{
|
{
|
||||||
$$ = nil
|
$$ = &ast.ExprClosure{}
|
||||||
}
|
}
|
||||||
| T_USE '(' lexical_var_list ')'
|
| T_USE '(' lexical_var_list ')'
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprClosureUse{
|
$$ = &ast.ExprClosure{
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($1, $4),
|
|
||||||
UseTkn: $1,
|
UseTkn: $1,
|
||||||
OpenParenthesisTkn: $2,
|
UseOpenParenthesisTkn: $2,
|
||||||
Uses: $3.(*ast.ParserSeparatedList).Items,
|
Use: $3.(*ast.ParserSeparatedList).Items,
|
||||||
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
|
UseSeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
|
||||||
CloseParenthesisTkn: $4,
|
UseCloseParenthesisTkn: $4,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
@ -3264,18 +3264,21 @@ lexical_var_list:
|
|||||||
lexical_var:
|
lexical_var:
|
||||||
T_VARIABLE
|
T_VARIABLE
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprVariable{
|
$$ = &ast.ExprClosureUse{
|
||||||
|
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
||||||
VarName: &ast.Identifier{
|
VarName: &ast.Identifier{
|
||||||
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
Position: yylex.(*Parser).builder.NewTokenPosition($1),
|
||||||
IdentifierTkn: $1,
|
IdentifierTkn: $1,
|
||||||
Value: $1.Value,
|
Value: $1.Value,
|
||||||
},
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
| '&' T_VARIABLE
|
| '&' T_VARIABLE
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprReference{
|
$$ = &ast.ExprClosureUse{
|
||||||
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
|
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
|
||||||
AmpersandTkn: $1,
|
AmpersandTkn: $1,
|
||||||
Var: &ast.ExprVariable{
|
Var: &ast.ExprVariable{
|
||||||
|
@ -1282,7 +1282,11 @@ type ExprClosure struct {
|
|||||||
Params []Vertex
|
Params []Vertex
|
||||||
SeparatorTkns []*token.Token
|
SeparatorTkns []*token.Token
|
||||||
CloseParenthesisTkn *token.Token
|
CloseParenthesisTkn *token.Token
|
||||||
ClosureUse Vertex
|
UseTkn *token.Token
|
||||||
|
UseOpenParenthesisTkn *token.Token
|
||||||
|
Use []Vertex
|
||||||
|
UseSeparatorTkns []*token.Token
|
||||||
|
UseCloseParenthesisTkn *token.Token
|
||||||
ColonTkn *token.Token
|
ColonTkn *token.Token
|
||||||
ReturnType Vertex
|
ReturnType Vertex
|
||||||
OpenCurlyBracketTkn *token.Token
|
OpenCurlyBracketTkn *token.Token
|
||||||
@ -1301,11 +1305,8 @@ func (n *ExprClosure) GetPosition() *position.Position {
|
|||||||
// ExprClosureUse node
|
// ExprClosureUse node
|
||||||
type ExprClosureUse struct {
|
type ExprClosureUse struct {
|
||||||
Position *position.Position
|
Position *position.Position
|
||||||
UseTkn *token.Token
|
AmpersandTkn *token.Token
|
||||||
OpenParenthesisTkn *token.Token
|
Var Vertex
|
||||||
Uses []Vertex
|
|
||||||
SeparatorTkns []*token.Token
|
|
||||||
CloseParenthesisTkn *token.Token
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *ExprClosureUse) Accept(v NodeVisitor) {
|
func (n *ExprClosureUse) Accept(v NodeVisitor) {
|
||||||
|
@ -1152,10 +1152,12 @@ func (t *DFS) Traverse(n ast.Vertex) {
|
|||||||
}
|
}
|
||||||
t.visitor.Leave("Params", false)
|
t.visitor.Leave("Params", false)
|
||||||
}
|
}
|
||||||
if nn.ClosureUse != nil {
|
if nn.Use != nil {
|
||||||
t.visitor.Enter("ClosureUse", true)
|
t.visitor.Enter("Use", false)
|
||||||
t.Traverse(nn.ClosureUse)
|
for _, c := range nn.Use {
|
||||||
t.visitor.Leave("ClosureUse", true)
|
t.Traverse(c)
|
||||||
|
}
|
||||||
|
t.visitor.Leave("Use", false)
|
||||||
}
|
}
|
||||||
if nn.ReturnType != nil {
|
if nn.ReturnType != nil {
|
||||||
t.visitor.Enter("ReturnType", true)
|
t.visitor.Enter("ReturnType", true)
|
||||||
@ -1176,12 +1178,10 @@ func (t *DFS) Traverse(n ast.Vertex) {
|
|||||||
if !t.visitor.EnterNode(nn) {
|
if !t.visitor.EnterNode(nn) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if nn.Uses != nil {
|
if nn.Var != nil {
|
||||||
t.visitor.Enter("Uses", false)
|
t.visitor.Enter("Var", true)
|
||||||
for _, c := range nn.Uses {
|
t.Traverse(nn.Var)
|
||||||
t.Traverse(c)
|
t.visitor.Leave("Var", true)
|
||||||
}
|
|
||||||
t.visitor.Leave("Uses", false)
|
|
||||||
}
|
}
|
||||||
case *ast.ExprConstFetch:
|
case *ast.ExprConstFetch:
|
||||||
if nn == nil {
|
if nn == nil {
|
||||||
|
@ -1143,7 +1143,11 @@ func (v *Dumper) ExprClosure(n *ast.ExprClosure) {
|
|||||||
v.dumpVertexList("Params", n.Params)
|
v.dumpVertexList("Params", n.Params)
|
||||||
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
|
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
|
||||||
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
|
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
|
||||||
v.dumpVertex("ClosureUse", n.ClosureUse)
|
v.dumpToken("UseTkn", n.UseTkn)
|
||||||
|
v.dumpToken("UseOpenParenthesisTkn", n.UseOpenParenthesisTkn)
|
||||||
|
v.dumpVertexList("Use", n.Use)
|
||||||
|
v.dumpTokenList("UseSeparatorTkns", n.UseSeparatorTkns)
|
||||||
|
v.dumpToken("UseCloseParenthesisTkn", n.UseCloseParenthesisTkn)
|
||||||
v.dumpToken("ColonTkn", n.ColonTkn)
|
v.dumpToken("ColonTkn", n.ColonTkn)
|
||||||
v.dumpVertex("ReturnType", n.ReturnType)
|
v.dumpVertex("ReturnType", n.ReturnType)
|
||||||
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
|
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
|
||||||
@ -1159,11 +1163,8 @@ func (v *Dumper) ExprClosureUse(n *ast.ExprClosureUse) {
|
|||||||
v.indent++
|
v.indent++
|
||||||
|
|
||||||
v.dumpPosition(n.Position)
|
v.dumpPosition(n.Position)
|
||||||
v.dumpToken("UseTkn", n.UseTkn)
|
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
|
||||||
v.dumpToken("OpenParenthesisTkn", n.OpenParenthesisTkn)
|
v.dumpVertex("Var", n.Var)
|
||||||
v.dumpVertexList("Uses", n.Uses)
|
|
||||||
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
|
|
||||||
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
|
|
||||||
|
|
||||||
v.indent--
|
v.indent--
|
||||||
v.print(v.indent, "},\n")
|
v.print(v.indent, "},\n")
|
||||||
|
@ -1162,9 +1162,16 @@ func (f *formatter) ExprClosure(n *ast.ExprClosure) {
|
|||||||
}
|
}
|
||||||
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
|
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
|
||||||
|
|
||||||
if n.ClosureUse != nil {
|
n.UseTkn = nil
|
||||||
|
n.UseOpenParenthesisTkn = nil
|
||||||
|
n.UseCloseParenthesisTkn = nil
|
||||||
|
n.UseSeparatorTkns = nil
|
||||||
|
if len(n.Use) > 0 {
|
||||||
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
|
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
|
||||||
n.ClosureUse.Accept(f)
|
n.UseTkn = f.newToken(token.T_USE, []byte("use"))
|
||||||
|
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
|
||||||
|
n.SeparatorTkns = f.formatList(n.Use, ',')
|
||||||
|
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
|
||||||
}
|
}
|
||||||
|
|
||||||
n.ColonTkn = nil
|
n.ColonTkn = nil
|
||||||
@ -1189,10 +1196,11 @@ func (f *formatter) ExprClosure(n *ast.ExprClosure) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (f *formatter) ExprClosureUse(n *ast.ExprClosureUse) {
|
func (f *formatter) ExprClosureUse(n *ast.ExprClosureUse) {
|
||||||
n.UseTkn = f.newToken(token.T_USE, []byte("use"))
|
if n.AmpersandTkn != nil {
|
||||||
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
|
n.AmpersandTkn = f.newToken('&', []byte("&"))
|
||||||
n.SeparatorTkns = f.formatList(n.Uses, ',')
|
}
|
||||||
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
|
|
||||||
|
n.Var.Accept(f)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *formatter) ExprConstFetch(n *ast.ExprConstFetch) {
|
func (f *formatter) ExprConstFetch(n *ast.ExprConstFetch) {
|
||||||
|
@ -3714,9 +3714,9 @@ func TestFormatter_ExprClosure_Use(t *testing.T) {
|
|||||||
o := bytes.NewBufferString("")
|
o := bytes.NewBufferString("")
|
||||||
|
|
||||||
n := &ast.ExprClosure{
|
n := &ast.ExprClosure{
|
||||||
ClosureUse: &ast.ExprClosureUse{
|
Use: []ast.Vertex{
|
||||||
Uses: []ast.Vertex{
|
&ast.ExprClosureUse{
|
||||||
&ast.ExprVariable{
|
Var: &ast.ExprVariable{
|
||||||
VarName: &ast.Identifier{
|
VarName: &ast.Identifier{
|
||||||
Value: []byte("$foo"),
|
Value: []byte("$foo"),
|
||||||
},
|
},
|
||||||
@ -3748,18 +3748,11 @@ func TestFormatter_ExprClosureUse(t *testing.T) {
|
|||||||
o := bytes.NewBufferString("")
|
o := bytes.NewBufferString("")
|
||||||
|
|
||||||
n := &ast.ExprClosureUse{
|
n := &ast.ExprClosureUse{
|
||||||
Uses: []ast.Vertex{
|
Var: &ast.ExprVariable{
|
||||||
&ast.ExprVariable{
|
|
||||||
VarName: &ast.Identifier{
|
VarName: &ast.Identifier{
|
||||||
Value: []byte("$a"),
|
Value: []byte("$a"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&ast.ExprVariable{
|
|
||||||
VarName: &ast.Identifier{
|
|
||||||
Value: []byte("$b"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
f := visitor.NewFormatter().WithState(visitor.FormatterStatePHP).WithIndent(1)
|
f := visitor.NewFormatter().WithState(visitor.FormatterStatePHP).WithIndent(1)
|
||||||
@ -3768,7 +3761,33 @@ func TestFormatter_ExprClosureUse(t *testing.T) {
|
|||||||
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
|
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
|
||||||
n.Accept(p)
|
n.Accept(p)
|
||||||
|
|
||||||
expected := `use($a, $b)`
|
expected := `$a`
|
||||||
|
actual := o.String()
|
||||||
|
|
||||||
|
if expected != actual {
|
||||||
|
t.Errorf("\nexpected: %s\ngot: %s\n", expected, actual)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFormatter_ExprClosureUse_Reference(t *testing.T) {
|
||||||
|
o := bytes.NewBufferString("")
|
||||||
|
|
||||||
|
n := &ast.ExprClosureUse{
|
||||||
|
AmpersandTkn: &token.Token{},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
|
VarName: &ast.Identifier{
|
||||||
|
Value: []byte("$a"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
f := visitor.NewFormatter().WithState(visitor.FormatterStatePHP).WithIndent(1)
|
||||||
|
n.Accept(f)
|
||||||
|
|
||||||
|
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
|
||||||
|
n.Accept(p)
|
||||||
|
|
||||||
|
expected := `&$a`
|
||||||
actual := o.String()
|
actual := o.String()
|
||||||
|
|
||||||
if expected != actual {
|
if expected != actual {
|
||||||
|
@ -562,7 +562,6 @@ func TestResolveClosureName(t *testing.T) {
|
|||||||
Var: &ast.ExprVariable{VarName: &ast.Identifier{Value: []byte("foo")}},
|
Var: &ast.ExprVariable{VarName: &ast.Identifier{Value: []byte("foo")}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ClosureUse: nil,
|
|
||||||
ReturnType: &ast.Nullable{Expr: nameBC},
|
ReturnType: &ast.Nullable{Expr: nameBC},
|
||||||
Stmts: []ast.Vertex{},
|
Stmts: []ast.Vertex{},
|
||||||
}
|
}
|
||||||
|
@ -684,7 +684,10 @@ func (p *printer) ExprClosure(n *ast.ExprClosure) {
|
|||||||
p.printToken(n.OpenParenthesisTkn, []byte("("))
|
p.printToken(n.OpenParenthesisTkn, []byte("("))
|
||||||
p.printSeparatedList(n.Params, n.SeparatorTkns, []byte(","))
|
p.printSeparatedList(n.Params, n.SeparatorTkns, []byte(","))
|
||||||
p.printToken(n.CloseParenthesisTkn, []byte(")"))
|
p.printToken(n.CloseParenthesisTkn, []byte(")"))
|
||||||
p.printNode(n.ClosureUse)
|
p.printToken(n.UseTkn, p.ifNodeList(n.Use, []byte("use")))
|
||||||
|
p.printToken(n.UseOpenParenthesisTkn, p.ifNodeList(n.Use, []byte("(")))
|
||||||
|
p.printSeparatedList(n.Use, n.UseSeparatorTkns, []byte(","))
|
||||||
|
p.printToken(n.UseCloseParenthesisTkn, p.ifNodeList(n.Use, []byte(")")))
|
||||||
p.printToken(n.ColonTkn, p.ifNode(n.ReturnType, []byte(":")))
|
p.printToken(n.ColonTkn, p.ifNode(n.ReturnType, []byte(":")))
|
||||||
p.printNode(n.ReturnType)
|
p.printNode(n.ReturnType)
|
||||||
p.printToken(n.OpenCurlyBracketTkn, []byte("{"))
|
p.printToken(n.OpenCurlyBracketTkn, []byte("{"))
|
||||||
@ -693,10 +696,8 @@ func (p *printer) ExprClosure(n *ast.ExprClosure) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *printer) ExprClosureUse(n *ast.ExprClosureUse) {
|
func (p *printer) ExprClosureUse(n *ast.ExprClosureUse) {
|
||||||
p.printToken(n.UseTkn, []byte("use"))
|
p.printToken(n.AmpersandTkn, nil)
|
||||||
p.printToken(n.OpenParenthesisTkn, []byte("("))
|
p.printNode(n.Var)
|
||||||
p.printSeparatedList(n.Uses, n.SeparatorTkns, []byte(","))
|
|
||||||
p.printToken(n.CloseParenthesisTkn, []byte(")"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *printer) ExprConstFetch(n *ast.ExprConstFetch) {
|
func (p *printer) ExprConstFetch(n *ast.ExprConstFetch) {
|
||||||
|
@ -1717,18 +1717,35 @@ func TestPrinterPrintExprClosureUse(t *testing.T) {
|
|||||||
|
|
||||||
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
|
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
|
||||||
n := &ast.ExprClosureUse{
|
n := &ast.ExprClosureUse{
|
||||||
Uses: []ast.Vertex{
|
Var: &ast.ExprVariable{
|
||||||
&ast.ExprReference{Var: &ast.ExprVariable{
|
|
||||||
VarName: &ast.Identifier{Value: []byte("$foo")},
|
VarName: &ast.Identifier{Value: []byte("$foo")},
|
||||||
}},
|
|
||||||
&ast.ExprVariable{
|
|
||||||
VarName: &ast.Identifier{Value: []byte("$bar")},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
n.Accept(p)
|
n.Accept(p)
|
||||||
|
|
||||||
expected := `use(&$foo,$bar)`
|
expected := `$foo`
|
||||||
|
actual := o.String()
|
||||||
|
|
||||||
|
if expected != actual {
|
||||||
|
t.Errorf("\nexpected: %s\ngot: %s\n", expected, actual)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPrinterPrintExprClosureUse_Reference(t *testing.T) {
|
||||||
|
o := bytes.NewBufferString("")
|
||||||
|
|
||||||
|
p := visitor.NewPrinter(o).WithState(visitor.PrinterStatePHP)
|
||||||
|
n := &ast.ExprClosureUse{
|
||||||
|
AmpersandTkn: &token.Token{
|
||||||
|
Value: []byte("&"),
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
|
VarName: &ast.Identifier{Value: []byte("$foo")},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
n.Accept(p)
|
||||||
|
|
||||||
|
expected := `&$foo`
|
||||||
actual := o.String()
|
actual := o.String()
|
||||||
|
|
||||||
if expected != actual {
|
if expected != actual {
|
||||||
@ -1754,12 +1771,17 @@ func TestPrinterPrintExprClosure(t *testing.T) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ClosureUse: &ast.ExprClosureUse{
|
Use: []ast.Vertex{
|
||||||
Uses: []ast.Vertex{
|
&ast.ExprClosureUse{
|
||||||
&ast.ExprReference{Var: &ast.ExprVariable{
|
AmpersandTkn: &token.Token{
|
||||||
|
Value: []byte("&"),
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
VarName: &ast.Identifier{Value: []byte("$a")},
|
VarName: &ast.Identifier{Value: []byte("$a")},
|
||||||
}},
|
},
|
||||||
&ast.ExprVariable{
|
},
|
||||||
|
&ast.ExprClosureUse{
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
VarName: &ast.Identifier{Value: []byte("$b")},
|
VarName: &ast.Identifier{Value: []byte("$b")},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Loading…
Reference in New Issue
Block a user