[refactoring] update ast structure of "ClosureUse", "ConstFetch", "Empty" and "ErrorSuppress" nodes
This commit is contained in:
parent
b90400d993
commit
2d240e9475
BIN
internal/php5/php5.go
generated
BIN
internal/php5/php5.go
generated
Binary file not shown.
@ -239,7 +239,7 @@ import (
|
|||||||
%type <node> static_scalar_value static_operation static_var_list global_var_list
|
%type <node> static_scalar_value static_operation static_var_list global_var_list
|
||||||
%type <node> ctor_arguments function_call_parameter_list echo_expr_list class_variable_declaration
|
%type <node> ctor_arguments function_call_parameter_list echo_expr_list class_variable_declaration
|
||||||
%type <node> trait_adaptations unset_variables declare_list non_empty_array_pair_list array_pair_list
|
%type <node> trait_adaptations unset_variables declare_list non_empty_array_pair_list array_pair_list
|
||||||
%type <node> switch_case_list non_empty_function_call_parameter_list assignment_list
|
%type <node> switch_case_list non_empty_function_call_parameter_list assignment_list lexical_var_list
|
||||||
%type <node> method_body trait_reference_list static_array_pair_list non_empty_static_array_pair_list
|
%type <node> method_body trait_reference_list static_array_pair_list non_empty_static_array_pair_list
|
||||||
%type <node> foreach_statement for_statement while_statement
|
%type <node> foreach_statement for_statement while_statement
|
||||||
%type <node> foreach_variable foreach_optional_arg
|
%type <node> foreach_variable foreach_optional_arg
|
||||||
@ -250,7 +250,7 @@ import (
|
|||||||
|
|
||||||
%type <list> top_statement_list namespace_name use_declarations use_function_declarations use_const_declarations
|
%type <list> top_statement_list namespace_name use_declarations use_function_declarations use_const_declarations
|
||||||
%type <list> inner_statement_list encaps_list isset_variables
|
%type <list> inner_statement_list encaps_list isset_variables
|
||||||
%type <list> lexical_var_list elseif_list new_elseif_list non_empty_for_expr
|
%type <list> elseif_list new_elseif_list non_empty_for_expr
|
||||||
%type <list> for_expr case_list catch_statement additional_catches
|
%type <list> for_expr case_list catch_statement additional_catches
|
||||||
%type <list> non_empty_additional_catches parameter_list non_empty_parameter_list class_statement_list
|
%type <list> non_empty_additional_catches parameter_list non_empty_parameter_list class_statement_list
|
||||||
%type <list> class_statement_list variable_modifiers method_modifiers
|
%type <list> class_statement_list variable_modifiers method_modifiers
|
||||||
@ -3890,13 +3890,13 @@ expr_without_variable:
|
|||||||
}
|
}
|
||||||
| '@' expr
|
| '@' expr
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprErrorSuppress{ast.Node{}, $2}
|
$$ = &ast.ExprErrorSuppress{
|
||||||
|
Node: ast.Node{
|
||||||
// save position
|
Position: position.NewTokenNodePosition($1, $2),
|
||||||
$$.GetNode().Position = position.NewTokenNodePosition($1, $2)
|
},
|
||||||
|
AtTkn: $1,
|
||||||
// save comments
|
Expr: $2,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens)
|
}
|
||||||
}
|
}
|
||||||
| scalar
|
| scalar
|
||||||
{
|
{
|
||||||
@ -4121,98 +4121,107 @@ lexical_vars:
|
|||||||
}
|
}
|
||||||
| T_USE '(' lexical_var_list ')'
|
| T_USE '(' lexical_var_list ')'
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprClosureUse{ast.Node{}, $3}
|
$$ = &ast.ExprClosureUse{
|
||||||
|
Node: ast.Node{
|
||||||
// save position
|
Position: position.NewTokensPosition($1, $4),
|
||||||
$$.GetNode().Position = position.NewTokensPosition($1, $4)
|
},
|
||||||
|
UseTkn: $1,
|
||||||
// save comments
|
OpenParenthesisTkn: $2,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens)
|
Uses: $3.(*ast.ParserSeparatedList).Items,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Use, $2.SkippedTokens)
|
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.SkippedTokens)
|
CloseParenthesisTkn: $4,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
lexical_var_list:
|
lexical_var_list:
|
||||||
lexical_var_list ',' T_VARIABLE
|
lexical_var_list ',' T_VARIABLE
|
||||||
{
|
{
|
||||||
identifier := &ast.Identifier{
|
variable := &ast.ExprVariable{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokenPosition($3),
|
Position: position.NewTokenPosition($3),
|
||||||
},
|
},
|
||||||
IdentifierTkn: $3,
|
VarName: &ast.Identifier{
|
||||||
Value: $3.Value,
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenPosition($3),
|
||||||
|
},
|
||||||
|
IdentifierTkn: $3,
|
||||||
|
Value: $3.Value,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
variable := &ast.ExprVariable{ast.Node{}, identifier}
|
|
||||||
$$ = append($1, variable)
|
|
||||||
|
|
||||||
// save position
|
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
|
||||||
variable.GetNode().Position = position.NewTokenPosition($3)
|
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, variable)
|
||||||
|
|
||||||
// save comments
|
$$ = $1
|
||||||
yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloating(variable, token.Start, $3.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
| lexical_var_list ',' '&' T_VARIABLE
|
| lexical_var_list ',' '&' T_VARIABLE
|
||||||
{
|
{
|
||||||
identifier := &ast.Identifier{
|
reference := &ast.ExprReference{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokenPosition($4),
|
Position: position.NewTokensPosition($3, $4),
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenPosition($4),
|
||||||
|
},
|
||||||
|
VarName: &ast.Identifier{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenPosition($4),
|
||||||
|
},
|
||||||
|
IdentifierTkn: $4,
|
||||||
|
Value: $4.Value,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
IdentifierTkn: $4,
|
|
||||||
Value: $4.Value,
|
|
||||||
}
|
}
|
||||||
variable := &ast.ExprVariable{ast.Node{}, identifier}
|
|
||||||
reference := &ast.ExprReference{ast.Node{}, variable}
|
|
||||||
$$ = append($1, reference)
|
|
||||||
|
|
||||||
// save position
|
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
|
||||||
variable.GetNode().Position = position.NewTokenPosition($4)
|
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, reference)
|
||||||
reference.GetNode().Position = position.NewTokensPosition($3, $4)
|
|
||||||
|
|
||||||
// save comments
|
$$ = $1
|
||||||
yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloating(reference, token.Start, $3.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloating(variable, token.Start, $4.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
| T_VARIABLE
|
| T_VARIABLE
|
||||||
{
|
{
|
||||||
identifier := &ast.Identifier{
|
$$ = &ast.ParserSeparatedList{
|
||||||
Node: ast.Node{
|
Items: []ast.Vertex{
|
||||||
Position: position.NewTokenPosition($1),
|
&ast.ExprVariable{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenPosition($1),
|
||||||
|
},
|
||||||
|
VarName: &ast.Identifier{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenPosition($1),
|
||||||
|
},
|
||||||
|
IdentifierTkn: $1,
|
||||||
|
Value: $1.Value,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
IdentifierTkn: $1,
|
|
||||||
Value: $1.Value,
|
|
||||||
}
|
}
|
||||||
variable := &ast.ExprVariable{ast.Node{}, identifier}
|
|
||||||
$$ = []ast.Vertex{variable}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
variable.GetNode().Position = position.NewTokenPosition($1)
|
|
||||||
|
|
||||||
// save comments
|
|
||||||
yylex.(*Parser).setFreeFloating(variable, token.Start, $1.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
| '&' T_VARIABLE
|
| '&' T_VARIABLE
|
||||||
{
|
{
|
||||||
identifier := &ast.Identifier{
|
$$ = &ast.ParserSeparatedList{
|
||||||
Node: ast.Node{
|
Items: []ast.Vertex{
|
||||||
Position: position.NewTokenPosition($2),
|
&ast.ExprReference{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokensPosition($1, $2),
|
||||||
|
},
|
||||||
|
Var: &ast.ExprVariable{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenPosition($2),
|
||||||
|
},
|
||||||
|
VarName: &ast.Identifier{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenPosition($2),
|
||||||
|
},
|
||||||
|
IdentifierTkn: $2,
|
||||||
|
Value: $2.Value,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
IdentifierTkn: $2,
|
|
||||||
Value: $2.Value,
|
|
||||||
}
|
}
|
||||||
variable := &ast.ExprVariable{ast.Node{}, identifier}
|
|
||||||
reference := &ast.ExprReference{ast.Node{}, variable}
|
|
||||||
$$ = []ast.Vertex{reference}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
variable.GetNode().Position = position.NewTokenPosition($2)
|
|
||||||
reference.GetNode().Position = position.NewTokensPosition($1, $2)
|
|
||||||
|
|
||||||
// save comments
|
|
||||||
yylex.(*Parser).setFreeFloating(reference, token.Start, $1.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloating(variable, token.Start, $2.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -4700,45 +4709,48 @@ static_scalar_value:
|
|||||||
}
|
}
|
||||||
| namespace_name
|
| namespace_name
|
||||||
{
|
{
|
||||||
name := &ast.NameName{
|
$$ = &ast.ExprConstFetch{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewNodeListPosition($1),
|
Position: position.NewNodeListPosition($1),
|
||||||
},
|
},
|
||||||
Parts: $1,
|
Const: &ast.NameName{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewNodeListPosition($1),
|
||||||
|
},
|
||||||
|
Parts: $1,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprConstFetch{ast.Node{}, name}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewNodePosition(name)
|
|
||||||
}
|
}
|
||||||
| T_NAMESPACE T_NS_SEPARATOR namespace_name
|
| T_NAMESPACE T_NS_SEPARATOR namespace_name
|
||||||
{
|
{
|
||||||
name := &ast.NameRelative{
|
$$ = &ast.ExprConstFetch{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokenNodeListPosition($1, $3),
|
Position: position.NewTokenNodeListPosition($1, $3),
|
||||||
},
|
},
|
||||||
NsTkn: $1,
|
Const: &ast.NameRelative{
|
||||||
NsSeparatorTkn: $2,
|
Node: ast.Node{
|
||||||
Parts: $3,
|
Position: position.NewTokenNodeListPosition($1, $3),
|
||||||
|
},
|
||||||
|
NsTkn: $1,
|
||||||
|
NsSeparatorTkn: $2,
|
||||||
|
Parts: $3,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprConstFetch{ast.Node{}, name}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewTokenNodeListPosition($1, $3)
|
|
||||||
}
|
}
|
||||||
| T_NS_SEPARATOR namespace_name
|
| T_NS_SEPARATOR namespace_name
|
||||||
{
|
{
|
||||||
name := &ast.NameFullyQualified{
|
$$ = &ast.ExprConstFetch{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokenNodeListPosition($1, $2),
|
Position: position.NewTokenNodeListPosition($1, $2),
|
||||||
},
|
},
|
||||||
NsSeparatorTkn: $1,
|
Const: &ast.NameFullyQualified{
|
||||||
Parts: $2,
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenNodeListPosition($1, $2),
|
||||||
|
},
|
||||||
|
NsSeparatorTkn: $1,
|
||||||
|
Parts: $2,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprConstFetch{ast.Node{}, name}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewTokenNodeListPosition($1, $2)
|
|
||||||
}
|
}
|
||||||
| T_ARRAY '(' static_array_pair_list ')'
|
| T_ARRAY '(' static_array_pair_list ')'
|
||||||
{
|
{
|
||||||
@ -5155,45 +5167,48 @@ general_constant:
|
|||||||
}
|
}
|
||||||
| namespace_name
|
| namespace_name
|
||||||
{
|
{
|
||||||
name := &ast.NameName{
|
$$ = &ast.ExprConstFetch{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewNodeListPosition($1),
|
Position: position.NewNodeListPosition($1),
|
||||||
},
|
},
|
||||||
Parts: $1,
|
Const: &ast.NameName{
|
||||||
|
Node: ast.Node{
|
||||||
|
Position: position.NewNodeListPosition($1),
|
||||||
|
},
|
||||||
|
Parts: $1,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprConstFetch{ast.Node{}, name}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewNodePosition(name)
|
|
||||||
}
|
}
|
||||||
| T_NAMESPACE T_NS_SEPARATOR namespace_name
|
| T_NAMESPACE T_NS_SEPARATOR namespace_name
|
||||||
{
|
{
|
||||||
name := &ast.NameRelative{
|
$$ = &ast.ExprConstFetch{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokenNodeListPosition($1, $3),
|
Position: position.NewTokenNodeListPosition($1, $3),
|
||||||
},
|
},
|
||||||
NsTkn: $1,
|
Const: &ast.NameRelative{
|
||||||
NsSeparatorTkn: $2,
|
Node: ast.Node{
|
||||||
Parts: $3,
|
Position: position.NewTokenNodeListPosition($1, $3),
|
||||||
|
},
|
||||||
|
NsTkn: $1,
|
||||||
|
NsSeparatorTkn: $2,
|
||||||
|
Parts: $3,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprConstFetch{ast.Node{}, name}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewNodePosition(name)
|
|
||||||
}
|
}
|
||||||
| T_NS_SEPARATOR namespace_name
|
| T_NS_SEPARATOR namespace_name
|
||||||
{
|
{
|
||||||
name := &ast.NameFullyQualified{
|
$$ = &ast.ExprConstFetch{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokenNodeListPosition($1, $2),
|
Position: position.NewTokenNodeListPosition($1, $2),
|
||||||
},
|
},
|
||||||
NsSeparatorTkn: $1,
|
Const: &ast.NameFullyQualified{
|
||||||
Parts: $2,
|
Node: ast.Node{
|
||||||
|
Position: position.NewTokenNodeListPosition($1, $2),
|
||||||
|
},
|
||||||
|
NsSeparatorTkn: $1,
|
||||||
|
Parts: $2,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprConstFetch{ast.Node{}, name}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewNodePosition(name)
|
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -6316,43 +6331,27 @@ internal_functions_in_yacc:
|
|||||||
}
|
}
|
||||||
| T_EMPTY '(' variable ')'
|
| T_EMPTY '(' variable ')'
|
||||||
{
|
{
|
||||||
exprBrackets := &ast.ParserBrackets{
|
$$ = &ast.ExprEmpty{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokensPosition($2, $4),
|
Position: position.NewTokensPosition($1, $4),
|
||||||
},
|
},
|
||||||
OpenBracketTkn: $2,
|
EmptyTkn: $1,
|
||||||
Child: $3,
|
OpenParenthesisTkn: $2,
|
||||||
CloseBracketTkn: $4,
|
Expr: $3,
|
||||||
|
CloseParenthesisTkn: $4,
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprEmpty{ast.Node{}, exprBrackets}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewTokensPosition($1, $4)
|
|
||||||
|
|
||||||
// save comments
|
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
| T_EMPTY '(' expr ')'
|
| T_EMPTY '(' expr ')'
|
||||||
{
|
{
|
||||||
exprBrackets := &ast.ParserBrackets{
|
$$ = &ast.ExprEmpty{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokensPosition($2, $4),
|
Position: position.NewTokensPosition($1, $4),
|
||||||
},
|
},
|
||||||
OpenBracketTkn: $2,
|
EmptyTkn: $1,
|
||||||
Child: $3,
|
OpenParenthesisTkn: $2,
|
||||||
CloseBracketTkn: $4,
|
Expr: $3,
|
||||||
|
CloseParenthesisTkn: $4,
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprEmpty{ast.Node{}, exprBrackets}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewTokensPosition($1, $4)
|
|
||||||
|
|
||||||
// save comments
|
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
| T_INCLUDE expr
|
| T_INCLUDE expr
|
||||||
{
|
{
|
||||||
|
BIN
internal/php7/php7.go
generated
BIN
internal/php7/php7.go
generated
Binary file not shown.
@ -248,7 +248,7 @@ import (
|
|||||||
%type <node> callable_expr callable_variable static_member new_variable
|
%type <node> callable_expr callable_variable static_member new_variable
|
||||||
%type <node> encaps_var encaps_var_offset echo_expr_list catch_name_list name_list
|
%type <node> encaps_var encaps_var_offset echo_expr_list catch_name_list name_list
|
||||||
%type <node> if_stmt const_list non_empty_argument_list property_list
|
%type <node> if_stmt const_list non_empty_argument_list property_list
|
||||||
%type <node> alt_if_stmt
|
%type <node> alt_if_stmt lexical_var_list
|
||||||
%type <node> if_stmt_without_else
|
%type <node> if_stmt_without_else
|
||||||
%type <node> class_const_decl
|
%type <node> class_const_decl
|
||||||
%type <node> alt_if_stmt_without_else
|
%type <node> alt_if_stmt_without_else
|
||||||
@ -276,7 +276,7 @@ import (
|
|||||||
%type <list> for_exprs non_empty_for_exprs
|
%type <list> for_exprs non_empty_for_exprs
|
||||||
%type <list> unprefixed_use_declarations inline_use_declarations
|
%type <list> unprefixed_use_declarations inline_use_declarations
|
||||||
%type <list> case_list trait_adaptation_list
|
%type <list> case_list trait_adaptation_list
|
||||||
%type <list> use_declarations lexical_var_list isset_variables
|
%type <list> use_declarations isset_variables
|
||||||
%type <list> top_statement_list
|
%type <list> top_statement_list
|
||||||
%type <list> inner_statement_list parameter_list non_empty_parameter_list class_statement_list
|
%type <list> inner_statement_list parameter_list non_empty_parameter_list class_statement_list
|
||||||
%type <list> method_modifiers variable_modifiers
|
%type <list> method_modifiers variable_modifiers
|
||||||
@ -3551,13 +3551,13 @@ expr_without_variable:
|
|||||||
}
|
}
|
||||||
| '@' expr
|
| '@' expr
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprErrorSuppress{ast.Node{}, $2}
|
$$ = &ast.ExprErrorSuppress{
|
||||||
|
Node: ast.Node{
|
||||||
// save position
|
Position: position.NewTokenNodePosition($1, $2),
|
||||||
$$.GetNode().Position = position.NewTokenNodePosition($1, $2)
|
},
|
||||||
|
AtTkn: $1,
|
||||||
// save comments
|
Expr: $2,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens)
|
}
|
||||||
}
|
}
|
||||||
| scalar
|
| scalar
|
||||||
{
|
{
|
||||||
@ -3704,29 +3704,32 @@ lexical_vars:
|
|||||||
}
|
}
|
||||||
| T_USE '(' lexical_var_list ')'
|
| T_USE '(' lexical_var_list ')'
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprClosureUse{ast.Node{}, $3}
|
$$ = &ast.ExprClosureUse{
|
||||||
|
Node: ast.Node{
|
||||||
// save position
|
Position: position.NewTokensPosition($1, $4),
|
||||||
$$.GetNode().Position = position.NewTokensPosition($1, $4)
|
},
|
||||||
|
UseTkn: $1,
|
||||||
// save comments
|
OpenParenthesisTkn: $2,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens)
|
Uses: $3.(*ast.ParserSeparatedList).Items,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Use, $2.SkippedTokens)
|
SeparatorTkns: $3.(*ast.ParserSeparatedList).SeparatorTkns,
|
||||||
yylex.(*Parser).setFreeFloating($$, token.LexicalVarList, $4.SkippedTokens)
|
CloseParenthesisTkn: $4,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
lexical_var_list:
|
lexical_var_list:
|
||||||
lexical_var_list ',' lexical_var
|
lexical_var_list ',' lexical_var
|
||||||
{
|
{
|
||||||
$$ = append($1, $3)
|
$1.(*ast.ParserSeparatedList).SeparatorTkns = append($1.(*ast.ParserSeparatedList).SeparatorTkns, $2)
|
||||||
|
$1.(*ast.ParserSeparatedList).Items = append($1.(*ast.ParserSeparatedList).Items, $3)
|
||||||
|
|
||||||
// save comments
|
$$ = $1
|
||||||
yylex.(*Parser).setFreeFloating(lastNode($1), token.End, $2.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
| lexical_var
|
| lexical_var
|
||||||
{
|
{
|
||||||
$$ = []ast.Vertex{$1}
|
$$ = &ast.ParserSeparatedList{
|
||||||
|
Items: []ast.Vertex{$1},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -4102,13 +4105,12 @@ scalar:
|
|||||||
constant:
|
constant:
|
||||||
name
|
name
|
||||||
{
|
{
|
||||||
$$ = &ast.ExprConstFetch{ast.Node{}, $1}
|
$$ = &ast.ExprConstFetch{
|
||||||
|
Node: ast.Node{
|
||||||
// save position
|
Position: position.NewNodePosition($1),
|
||||||
$$.GetNode().Position = position.NewNodePosition($1)
|
},
|
||||||
|
Const: $1,
|
||||||
// save comments
|
}
|
||||||
yylex.(*Parser).MoveFreeFloating($1, $$)
|
|
||||||
}
|
}
|
||||||
| class_name T_PAAMAYIM_NEKUDOTAYIM identifier
|
| class_name T_PAAMAYIM_NEKUDOTAYIM identifier
|
||||||
{
|
{
|
||||||
@ -4898,23 +4900,15 @@ internal_functions_in_yacc:
|
|||||||
}
|
}
|
||||||
| T_EMPTY '(' expr ')'
|
| T_EMPTY '(' expr ')'
|
||||||
{
|
{
|
||||||
exprBrackets := &ast.ParserBrackets{
|
$$ = &ast.ExprEmpty{
|
||||||
Node: ast.Node{
|
Node: ast.Node{
|
||||||
Position: position.NewTokensPosition($2, $4),
|
Position: position.NewTokensPosition($1, $4),
|
||||||
},
|
},
|
||||||
OpenBracketTkn: $2,
|
EmptyTkn: $1,
|
||||||
Child: $3,
|
OpenParenthesisTkn: $2,
|
||||||
CloseBracketTkn: $4,
|
Expr: $3,
|
||||||
|
CloseParenthesisTkn: $4,
|
||||||
}
|
}
|
||||||
$$ = &ast.ExprEmpty{ast.Node{}, exprBrackets}
|
|
||||||
|
|
||||||
// save position
|
|
||||||
$$.GetNode().Position = position.NewTokensPosition($1, $4)
|
|
||||||
|
|
||||||
// save comments
|
|
||||||
yylex.(*Parser).setFreeFloating($$, token.Start, $1.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.Start, $2.SkippedTokens)
|
|
||||||
yylex.(*Parser).setFreeFloatingTokens(exprBrackets, token.End, $4.SkippedTokens)
|
|
||||||
}
|
}
|
||||||
| T_INCLUDE expr
|
| T_INCLUDE expr
|
||||||
{
|
{
|
||||||
|
@ -1066,7 +1066,11 @@ func (n *ExprClosure) Accept(v NodeVisitor) {
|
|||||||
// ExprClosureUse node
|
// ExprClosureUse node
|
||||||
type ExprClosureUse struct {
|
type ExprClosureUse struct {
|
||||||
Node
|
Node
|
||||||
Uses []Vertex
|
UseTkn *token.Token
|
||||||
|
OpenParenthesisTkn *token.Token
|
||||||
|
Uses []Vertex
|
||||||
|
SeparatorTkns []*token.Token
|
||||||
|
CloseParenthesisTkn *token.Token
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *ExprClosureUse) Accept(v NodeVisitor) {
|
func (n *ExprClosureUse) Accept(v NodeVisitor) {
|
||||||
@ -1086,7 +1090,10 @@ func (n *ExprConstFetch) Accept(v NodeVisitor) {
|
|||||||
// ExprEmpty node
|
// ExprEmpty node
|
||||||
type ExprEmpty struct {
|
type ExprEmpty struct {
|
||||||
Node
|
Node
|
||||||
Expr Vertex
|
EmptyTkn *token.Token
|
||||||
|
OpenParenthesisTkn *token.Token
|
||||||
|
Expr Vertex
|
||||||
|
CloseParenthesisTkn *token.Token
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *ExprEmpty) Accept(v NodeVisitor) {
|
func (n *ExprEmpty) Accept(v NodeVisitor) {
|
||||||
@ -1096,7 +1103,8 @@ func (n *ExprEmpty) Accept(v NodeVisitor) {
|
|||||||
// ExprErrorSuppress node
|
// ExprErrorSuppress node
|
||||||
type ExprErrorSuppress struct {
|
type ExprErrorSuppress struct {
|
||||||
Node
|
Node
|
||||||
Expr Vertex
|
AtTkn *token.Token
|
||||||
|
Expr Vertex
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *ExprErrorSuppress) Accept(v NodeVisitor) {
|
func (n *ExprErrorSuppress) Accept(v NodeVisitor) {
|
||||||
|
Loading…
Reference in New Issue
Block a user