refactoring: update ast structure of "Closure" and "ClosureUse" nodes

This commit is contained in:
Vadym Slizov
2020-12-26 18:20:10 +02:00
parent 03c7979ccd
commit b85bae2ec1
14 changed files with 1092 additions and 1017 deletions

View File

@@ -31770,7 +31770,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 36,
},
FunctionTkn: &token.Token{
ID: token.T_FUNCTION,
ID: token.T_FUNCTION,
Value: []byte("function"),
Position: &position.Position{
StartLine: 1,
@@ -31780,7 +31780,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_OPEN_TAG,
ID: token.T_OPEN_TAG,
Value: []byte("<?"),
Position: &position.Position{
StartLine: 1,
@@ -31790,7 +31790,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@@ -31802,7 +31802,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
@@ -31834,7 +31834,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 14,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$a"),
Position: &position.Position{
StartLine: 1,
@@ -31869,7 +31869,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 18,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$b"),
Position: &position.Position{
StartLine: 1,
@@ -31879,7 +31879,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@@ -31897,7 +31897,7 @@ func TestExprClosure_Use(t *testing.T) {
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
@@ -31908,7 +31908,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
@@ -31917,59 +31917,59 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 19,
},
},
ClosureUse: &ast.ExprClosureUse{
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 33,
EndPos: 23,
},
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 23,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
UseOpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
},
Uses: []ast.Vertex{
&ast.ExprVariable{
},
Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 27,
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
@@ -31984,7 +31984,7 @@ func TestExprClosure_Use(t *testing.T) {
EndPos: 27,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
@@ -31996,89 +31996,89 @@ func TestExprClosure_Use(t *testing.T) {
Value: []byte("$c"),
},
},
&ast.ExprReference{
},
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 32,
EndPos: 30,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 29,
EndPos: 30,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
Var: &ast.ExprVariable{
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
VarName: &ast.Identifier{
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
},
Value: []byte("$d"),
},
Value: []byte("$d"),
},
},
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 27,
EndPos: 28,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
},
UseSeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 27,
EndPos: 28,
},
},
},
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123),
ID: token.ID(123),
Value: []byte("{"),
Position: &position.Position{
StartLine: 1,
@@ -32088,7 +32088,7 @@ func TestExprClosure_Use(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@@ -32101,7 +32101,7 @@ func TestExprClosure_Use(t *testing.T) {
},
Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125),
ID: token.ID(125),
Value: []byte("}"),
Position: &position.Position{
StartLine: 1,
@@ -32112,7 +32112,7 @@ func TestExprClosure_Use(t *testing.T) {
},
},
SemiColonTkn: &token.Token{
ID: token.ID(59),
ID: token.ID(59),
Value: []byte(";"),
Position: &position.Position{
StartLine: 1,
@@ -32123,7 +32123,8 @@ func TestExprClosure_Use(t *testing.T) {
},
},
},
EndTkn: &token.Token{},
EndTkn: &token.Token{
},
}
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
@@ -32159,7 +32160,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 36,
},
FunctionTkn: &token.Token{
ID: token.T_FUNCTION,
ID: token.T_FUNCTION,
Value: []byte("function"),
Position: &position.Position{
StartLine: 1,
@@ -32169,7 +32170,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_OPEN_TAG,
ID: token.T_OPEN_TAG,
Value: []byte("<?"),
Position: &position.Position{
StartLine: 1,
@@ -32179,7 +32180,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@@ -32191,7 +32192,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
@@ -32223,7 +32224,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 14,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$a"),
Position: &position.Position{
StartLine: 1,
@@ -32258,7 +32259,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 18,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$b"),
Position: &position.Position{
StartLine: 1,
@@ -32268,7 +32269,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@@ -32286,7 +32287,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
@@ -32297,7 +32298,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
@@ -32306,104 +32307,104 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 19,
},
},
ClosureUse: &ast.ExprClosureUse{
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 33,
EndPos: 23,
},
UseTkn: &token.Token{
ID: token.T_USE,
Value: []byte("use"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 20,
EndPos: 23,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 19,
EndPos: 20,
},
},
},
OpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
UseOpenParenthesisTkn: &token.Token{
ID: token.ID(40),
Value: []byte("("),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 24,
EndPos: 25,
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 23,
EndPos: 24,
},
},
},
Uses: []ast.Vertex{
&ast.ExprReference{
},
Use: []ast.Vertex{
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
AmpersandTkn: &token.Token{
ID: token.ID(38),
Value: []byte("&"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 25,
EndPos: 26,
},
},
Var: &ast.ExprVariable{
VarName: &ast.Identifier{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
VarName: &ast.Identifier{
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
Value: []byte("$c"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 26,
EndPos: 28,
},
},
Value: []byte("$c"),
},
Value: []byte("$c"),
},
},
&ast.ExprVariable{
},
&ast.ExprClosureUse{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 30,
EndPos: 32,
},
Var: &ast.ExprVariable{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
@@ -32418,7 +32419,7 @@ func TestExprClosure_Use2(t *testing.T) {
EndPos: 32,
},
IdentifierTkn: &token.Token{
ID: token.T_VARIABLE,
ID: token.T_VARIABLE,
Value: []byte("$d"),
Position: &position.Position{
StartLine: 1,
@@ -32428,7 +32429,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@@ -32443,31 +32444,31 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
},
SeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 28,
EndPos: 29,
},
},
},
CloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
},
UseSeparatorTkns: []*token.Token{
{
ID: token.ID(44),
Value: []byte(","),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
StartPos: 28,
EndPos: 29,
},
},
},
UseCloseParenthesisTkn: &token.Token{
ID: token.ID(41),
Value: []byte(")"),
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 32,
EndPos: 33,
},
},
OpenCurlyBracketTkn: &token.Token{
ID: token.ID(123),
ID: token.ID(123),
Value: []byte("{"),
Position: &position.Position{
StartLine: 1,
@@ -32477,7 +32478,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
@@ -32490,7 +32491,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
Stmts: []ast.Vertex{},
CloseCurlyBracketTkn: &token.Token{
ID: token.ID(125),
ID: token.ID(125),
Value: []byte("}"),
Position: &position.Position{
StartLine: 1,
@@ -32501,7 +32502,7 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
SemiColonTkn: &token.Token{
ID: token.ID(59),
ID: token.ID(59),
Value: []byte(";"),
Position: &position.Position{
StartLine: 1,
@@ -32512,7 +32513,8 @@ func TestExprClosure_Use2(t *testing.T) {
},
},
},
EndTkn: &token.Token{},
EndTkn: &token.Token{
},
}
lexer := scanner.NewLexer([]byte(src), "5.6", nil)

569
internal/php5/php5.go generated

File diff suppressed because it is too large Load Diff

View File

@@ -3367,36 +3367,38 @@ expr_without_variable:
}
| function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
{
$$ = &ast.ExprClosure{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $9),
FunctionTkn: $1,
AmpersandTkn: $2,
OpenParenthesisTkn: $3,
Params: $4.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $4.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $5,
ClosureUse: $6,
OpenCurlyBracketTkn: $7,
Stmts: $8,
CloseCurlyBracketTkn: $9,
}
closure := $6.(*ast.ExprClosure)
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $9)
closure.FunctionTkn = $1
closure.AmpersandTkn = $2
closure.OpenParenthesisTkn = $3
closure.Params = $4.(*ast.ParserSeparatedList).Items
closure.SeparatorTkns = $4.(*ast.ParserSeparatedList).SeparatorTkns
closure.CloseParenthesisTkn = $5
closure.OpenCurlyBracketTkn = $7
closure.Stmts = $8
closure.CloseCurlyBracketTkn = $9
$$ = closure
}
| T_STATIC function is_reference '(' parameter_list ')' lexical_vars '{' inner_statement_list '}'
{
$$ = &ast.ExprClosure{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $10),
StaticTkn: $1,
FunctionTkn: $2,
AmpersandTkn: $3,
OpenParenthesisTkn: $4,
Params: $5.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $5.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $6,
ClosureUse: $7,
OpenCurlyBracketTkn: $8,
Stmts: $9,
CloseCurlyBracketTkn: $10,
}
closure := $7.(*ast.ExprClosure)
closure.Position = yylex.(*Parser).builder.NewTokensPosition($1, $10)
closure.StaticTkn = $1
closure.FunctionTkn = $2
closure.AmpersandTkn = $3
closure.OpenParenthesisTkn = $4
closure.Params = $5.(*ast.ParserSeparatedList).Items
closure.SeparatorTkns = $5.(*ast.ParserSeparatedList).SeparatorTkns
closure.CloseParenthesisTkn = $6
closure.OpenCurlyBracketTkn = $8
closure.Stmts = $9
closure.CloseCurlyBracketTkn = $10
$$ = closure
}
;
@@ -3520,17 +3522,16 @@ function:
lexical_vars:
/* empty */
{
$$ = nil
$$ = &ast.ExprClosure{}
}
| T_USE '(' lexical_var_list ')'
{
$$ = &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $4),
UseTkn: $1,
OpenParenthesisTkn: $2,
Uses: $3.(*ast.ParserSeparatedList).Items,
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
CloseParenthesisTkn: $4,
$$ = &ast.ExprClosure{
UseTkn: $1,
UseOpenParenthesisTkn: $2,
Use: $3.(*ast.ParserSeparatedList).Items,
UseSeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
UseCloseParenthesisTkn: $4,
}
}
;
@@ -3538,12 +3539,15 @@ lexical_vars:
lexical_var_list:
lexical_var_list ',' T_VARIABLE
{
variable := &ast.ExprVariable{
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokenPosition($3),
VarName: &ast.Identifier{
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($3),
IdentifierTkn: $3,
Value: $3.Value,
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($3),
IdentifierTkn: $3,
Value: $3.Value,
},
},
}
@@ -3554,7 +3558,7 @@ lexical_var_list:
}
| lexical_var_list ',' '&' T_VARIABLE
{
reference := &ast.ExprReference{
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($3, $4),
AmpersandTkn: $3,
Var: &ast.ExprVariable{
@@ -3568,43 +3572,46 @@ lexical_var_list:
}
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, reference)
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, variable)
$$ = $1
}
| T_VARIABLE
{
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{
&ast.ExprVariable{
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($1),
IdentifierTkn: $1,
Value: $1.Value,
},
IdentifierTkn: $1,
Value: $1.Value,
},
},
}
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{ variable },
}
}
| '&' T_VARIABLE
{
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{
&ast.ExprReference{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
AmpersandTkn: $1,
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
IdentifierTkn: $2,
Value: $2.Value,
},
},
variable := &ast.ExprClosureUse{
Position: yylex.(*Parser).builder.NewTokensPosition($1, $2),
AmpersandTkn: $1,
Var: &ast.ExprVariable{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
VarName: &ast.Identifier{
Position: yylex.(*Parser).builder.NewTokenPosition($2),
IdentifierTkn: $2,
Value: $2.Value,
},
},
}
$$ = &ast.ParserSeparatedList{
Items: []ast.Vertex{ variable },
}
}
;