constant nodes

This commit is contained in:
z7zmey 2017-12-05 00:02:24 +02:00
parent e7ba0ca435
commit 347cb09386
12 changed files with 691 additions and 596 deletions

View File

@ -1 +1,3 @@
<? "test"; <? $a = 'test
$a test
test';

View File

@ -1,27 +0,0 @@
package node
import (
"fmt"
"github.com/z7zmey/php-parser/token"
"io"
)
type NodeScalarEncapsedStringPart struct {
*SimpleNode
token token.Token
}
func NewNodeScalarEncapsedStringPart(t token.Token) Node {
return NodeScalarEncapsedStringPart{
&SimpleNode{Name: "NodeScalarEncapsedStringPart", Attributes: make(map[string]string)},
t,
}
}
func (n NodeScalarEncapsedStringPart) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d] %q", indent, n.Name, n.token.StartLine, n.token.EndLine, n.token.Value)
for _, nn := range n.Children {
nn.Print(out, indent+" ")
}
}

View File

@ -1,28 +0,0 @@
package node
import (
"fmt"
"github.com/z7zmey/php-parser/token"
"io"
)
type NodeScalarString struct {
*SimpleNode
token token.Token
}
func NewNodeScalarString(t token.Token) Node {
return NodeScalarString{
&SimpleNode{Name: "NodeScalarString", Attributes: make(map[string]string)},
t,
}
}
func (n NodeScalarString) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d] %q", indent, n.Name, n.token.StartLine, n.token.EndLine, n.token.Value)
for _, nn := range n.Children {
nn.Print(out, indent+" ")
}
}

29
node/scalar/dnumber.go Normal file
View File

@ -0,0 +1,29 @@
package scalar
import (
"fmt"
"github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node"
"io"
)
type Dnumber struct {
node.SimpleNode
token token.Token
}
func NewDnumber(token token.Token) node.Node {
return Dnumber{
node.SimpleNode{Name: "Dnumber", Attributes: make(map[string]string)},
token,
}
}
func (n Dnumber) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d] %q", indent, n.Name, n.token.StartLine, n.token.EndLine, n.token.Value)
for _, nn := range n.Children {
nn.Print(out, indent+" ")
}
}

View File

@ -1,30 +1,31 @@
package node package scalar
import ( import (
"fmt" "fmt"
"github.com/z7zmey/php-parser/token" "github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node"
"io" "io"
) )
type NodeScalarEncapsed struct { type Encapsed struct {
*SimpleNode node.SimpleNode
startToken token.Token startToken token.Token
endToken token.Token endToken token.Token
parts []Node parts []node.Node
} }
func NewNodeScalarEncapsed(startToken token.Token, parts []Node, endToken token.Token) Node { func NewEncapsed(startToken token.Token, parts []node.Node, endToken token.Token) node.Node {
return NodeScalarEncapsed{ return Encapsed{
&SimpleNode{Name: "NodeScalarEncapsed", Attributes: make(map[string]string)}, node.SimpleNode{Name: "Encapsed", Attributes: make(map[string]string)},
startToken, startToken,
endToken, endToken,
parts, parts,
} }
} }
func (n NodeScalarEncapsed) Print(out io.Writer, indent string) { func (n Encapsed) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d]", indent, n.Name, n.startToken.StartLine, n.endToken.EndLine) fmt.Fprintf(out, "\n%v%v [%d %d]", indent, n.Name, n.startToken.StartLine, n.endToken.EndLine)
fmt.Fprintf(out, "\n%vparts:", indent+" ",) fmt.Fprintf(out, "\n%vparts:", indent+" ",)
for _, nn := range n.parts { for _, nn := range n.parts {

View File

@ -0,0 +1,28 @@
package scalar
import (
"fmt"
"github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node"
"io"
)
type EncapsedStringPart struct {
node.SimpleNode
token token.Token
}
func NewEncapsedStringPart(t token.Token) node.Node {
return EncapsedStringPart{
node.SimpleNode{Name: "EncapsedStringPart", Attributes: make(map[string]string)},
t,
}
}
func (n EncapsedStringPart) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d] %q", indent, n.Name, n.token.StartLine, n.token.EndLine, n.token.Value)
for _, nn := range n.Children {
nn.Print(out, indent+" ")
}
}

29
node/scalar/lnumber.go Normal file
View File

@ -0,0 +1,29 @@
package scalar
import (
"fmt"
"github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node"
"io"
)
type Lnumber struct {
node.SimpleNode
token token.Token
}
func NewLnumber(token token.Token) node.Node {
return Lnumber{
node.SimpleNode{Name: "Lnumber", Attributes: make(map[string]string)},
token,
}
}
func (n Lnumber) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d] %q", indent, n.Name, n.token.StartLine, n.token.EndLine, n.token.Value)
for _, nn := range n.Children {
nn.Print(out, indent+" ")
}
}

View File

@ -0,0 +1,29 @@
package scalar
import (
"fmt"
"github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node"
"io"
)
type MagicConstant struct {
node.SimpleNode
token token.Token
}
func NewMagicConstant(token token.Token) node.Node {
return String{
node.SimpleNode{Name: "MagicConstant", Attributes: make(map[string]string)},
token,
}
}
func (n MagicConstant) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d] %q", indent, n.Name, n.token.StartLine, n.token.EndLine, n.token.Value)
for _, nn := range n.Children {
nn.Print(out, indent+" ")
}
}

29
node/scalar/string.go Normal file
View File

@ -0,0 +1,29 @@
package scalar
import (
"fmt"
"github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node"
"io"
)
type String struct {
node.SimpleNode
token token.Token
}
func NewString(token token.Token) node.Node {
return String{
node.SimpleNode{Name: "String", Attributes: make(map[string]string)},
token,
}
}
func (n String) Print(out io.Writer, indent string) {
fmt.Fprintf(out, "\n%v%v [%d %d] %q", indent, n.Name, n.token.StartLine, n.token.EndLine, n.token.Value)
for _, nn := range n.Children {
nn.Print(out, indent+" ")
}
}

File diff suppressed because it is too large Load Diff

View File

@ -5,6 +5,7 @@ import (
"io" "io"
"github.com/z7zmey/php-parser/token" "github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node" "github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/node/scalar"
) )
var rootnode = node.NewSimpleNode("Root") var rootnode = node.NewSimpleNode("Root")
@ -979,7 +980,7 @@ exit_expr:
backticks_expr: backticks_expr:
/* empty */ { $$ = []node.Node{} } /* empty */ { $$ = []node.Node{} }
| T_ENCAPSED_AND_WHITESPACE { $$ = []node.Node{node.NewNodeScalarEncapsedStringPart($1)} } | T_ENCAPSED_AND_WHITESPACE { $$ = []node.Node{scalar.NewEncapsedStringPart($1)} }
| encaps_list { $$ = $1; } | encaps_list { $$ = $1; }
; ;
@ -991,26 +992,26 @@ ctor_arguments:
dereferencable_scalar: dereferencable_scalar:
T_ARRAY '(' array_pair_list ')' { $$ = $3; } T_ARRAY '(' array_pair_list ')' { $$ = $3; }
| '[' array_pair_list ']' { $$ = $2; } | '[' array_pair_list ']' { $$ = $2; }
| T_CONSTANT_ENCAPSED_STRING { $$ = node.NewNodeScalarString($1) } | T_CONSTANT_ENCAPSED_STRING { $$ = scalar.NewString($1) }
; ;
scalar: scalar:
T_LNUMBER { $$ = node.TokenNode("Lnumber", $1) } T_LNUMBER { $$ = scalar.NewLnumber($1) }
| T_DNUMBER { $$ = node.TokenNode("Dnumber", $1) } | T_DNUMBER { $$ = scalar.NewDnumber($1) }
| T_LINE { $$ = node.TokenNode("MagicConst", $1) } | T_LINE { $$ = scalar.NewMagicConstant($1) }
| T_FILE { $$ = node.TokenNode("MagicConst", $1) } | T_FILE { $$ = scalar.NewMagicConstant($1) }
| T_DIR { $$ = node.TokenNode("MagicConst", $1) } | T_DIR { $$ = scalar.NewMagicConstant($1) }
| T_TRAIT_C { $$ = node.TokenNode("MagicConst", $1) } | T_TRAIT_C { $$ = scalar.NewMagicConstant($1) }
| T_METHOD_C { $$ = node.TokenNode("MagicConst", $1) } | T_METHOD_C { $$ = scalar.NewMagicConstant($1) }
| T_FUNC_C { $$ = node.TokenNode("MagicConst", $1) } | T_FUNC_C { $$ = scalar.NewMagicConstant($1) }
| T_NS_C { $$ = node.TokenNode("MagicConst", $1) } | T_NS_C { $$ = scalar.NewMagicConstant($1) }
| T_CLASS_C { $$ = node.TokenNode("MagicConst", $1) } | T_CLASS_C { $$ = scalar.NewMagicConstant($1) }
| T_START_HEREDOC T_ENCAPSED_AND_WHITESPACE T_END_HEREDOC | T_START_HEREDOC T_ENCAPSED_AND_WHITESPACE T_END_HEREDOC
{ $$ = node.NewNodeScalarString($2) /* TODO: mark as Heredoc*/ } { $$ = scalar.NewString($2) /* TODO: mark as Heredoc*/ }
| T_START_HEREDOC T_END_HEREDOC | T_START_HEREDOC T_END_HEREDOC
{ $$ = node.NewSimpleNode("Scalar").Append(node.TokenNode("Heredoc", $1)).Append(node.TokenNode("HeredocEnd", $2)) } { $$ = node.NewSimpleNode("Scalar").Append(node.TokenNode("Heredoc", $1)).Append(node.TokenNode("HeredocEnd", $2)) }
| '"' encaps_list '"' { $$ = node.NewNodeScalarEncapsed($1, $2, $3) } | '"' encaps_list '"' { $$ = scalar.NewEncapsed($1, $2, $3) }
| T_START_HEREDOC encaps_list T_END_HEREDOC { $$ = node.NewNodeScalarEncapsed($1, $2, $3) } | T_START_HEREDOC encaps_list T_END_HEREDOC { $$ = scalar.NewEncapsed($1, $2, $3) }
| dereferencable_scalar { $$ = $1; } | dereferencable_scalar { $$ = $1; }
| constant { $$ = $1; } | constant { $$ = $1; }
; ;
@ -1135,9 +1136,9 @@ array_pair:
encaps_list: encaps_list:
encaps_list encaps_var { $$ = append($1, $2) } encaps_list encaps_var { $$ = append($1, $2) }
| encaps_list T_ENCAPSED_AND_WHITESPACE { $$ = append($1, node.NewNodeScalarEncapsedStringPart($2)) } | encaps_list T_ENCAPSED_AND_WHITESPACE { $$ = append($1, scalar.NewEncapsedStringPart($2)) }
| encaps_var { $$ = []node.Node{$1} } | encaps_var { $$ = []node.Node{$1} }
| T_ENCAPSED_AND_WHITESPACE encaps_var { $$ = []node.Node{node.NewNodeScalarEncapsedStringPart($1), $2} } | T_ENCAPSED_AND_WHITESPACE encaps_var { $$ = []node.Node{scalar.NewEncapsedStringPart($1), $2} }
; ;
encaps_var: encaps_var:

View File

@ -8,14 +8,15 @@ import (
"github.com/z7zmey/php-parser/node" "github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/parser" "github.com/z7zmey/php-parser/parser"
"github.com/z7zmey/php-parser/token" "github.com/z7zmey/php-parser/token"
"github.com/z7zmey/php-parser/node/scalar"
) )
func TestDoubleQuotedScalarString(t *testing.T) { func TestDoubleQuotedScalarString(t *testing.T) {
src := `<? "test";` src := `<? "test";`
strToken := token.NewToken([]byte("\"test\""), 1, 1) strToken := token.NewToken([]byte("\"test\""), 1, 1)
strNode := node.NewNodeScalarString(strToken) strNode := scalar.NewString(strToken)
expected := node.SimpleNode("Statements").Append(strNode) expected := node.NewSimpleNode("Statements").Append(strNode)
node := parser.Parse(bytes.NewBufferString(src), "test.php") node := parser.Parse(bytes.NewBufferString(src), "test.php")
@ -30,8 +31,8 @@ func TestMultilineDoubleQuotedScalarString(t *testing.T) {
";` ";`
strToken := token.NewToken([]byte("\"\n\ttest\n\t\""), 1, 3) strToken := token.NewToken([]byte("\"\n\ttest\n\t\""), 1, 3)
strNode := node.NewNodeScalarString(strToken) strNode := scalar.NewString(strToken)
expected := node.SimpleNode("Statements").Append(strNode) expected := node.NewSimpleNode("Statements").Append(strNode)
node := parser.Parse(bytes.NewBufferString(src), "test.php") node := parser.Parse(bytes.NewBufferString(src), "test.php")
@ -44,8 +45,8 @@ func TestSingleQuotedScalarString(t *testing.T) {
src := `<? '$test';` src := `<? '$test';`
strToken := token.NewToken([]byte("'$test'"), 1, 1) strToken := token.NewToken([]byte("'$test'"), 1, 1)
strNode := node.NewNodeScalarString(strToken) strNode := scalar.NewString(strToken)
expected := node.SimpleNode("Statements").Append(strNode) expected := node.NewSimpleNode("Statements").Append(strNode)
node := parser.Parse(bytes.NewBufferString(src), "test.php") node := parser.Parse(bytes.NewBufferString(src), "test.php")
@ -60,8 +61,8 @@ func TestMultilineSingleQuotedScalarString(t *testing.T) {
';` ';`
strToken := token.NewToken([]byte("'\n\t$test\n\t'"), 1, 3) strToken := token.NewToken([]byte("'\n\t$test\n\t'"), 1, 3)
strNode := node.NewNodeScalarString(strToken) strNode := scalar.NewString(strToken)
expected := node.SimpleNode("Statements").Append(strNode) expected := node.NewSimpleNode("Statements").Append(strNode)
node := parser.Parse(bytes.NewBufferString(src), "test.php") node := parser.Parse(bytes.NewBufferString(src), "test.php")
@ -77,8 +78,8 @@ CAD;
` `
strToken := token.NewToken([]byte("\thello\n"), 2, 3) strToken := token.NewToken([]byte("\thello\n"), 2, 3)
strNode := node.NewNodeScalarString(strToken) strNode := scalar.NewString(strToken)
expected := node.SimpleNode("Statements").Append(strNode) expected := node.NewSimpleNode("Statements").Append(strNode)
node := parser.Parse(bytes.NewBufferString(src), "test.php") node := parser.Parse(bytes.NewBufferString(src), "test.php")
@ -94,8 +95,8 @@ CAD;
` `
strToken := token.NewToken([]byte("\thello\n"), 2, 3) strToken := token.NewToken([]byte("\thello\n"), 2, 3)
strNode := node.NewNodeScalarString(strToken) strNode := scalar.NewString(strToken)
expected := node.SimpleNode("Statements").Append(strNode) expected := node.NewSimpleNode("Statements").Append(strNode)
node := parser.Parse(bytes.NewBufferString(src), "test.php") node := parser.Parse(bytes.NewBufferString(src), "test.php")
@ -111,8 +112,8 @@ CAD;
` `
strToken := token.NewToken([]byte("\thello $world\n"), 2, 3) strToken := token.NewToken([]byte("\thello $world\n"), 2, 3)
strNode := node.NewNodeScalarString(strToken) strNode := scalar.NewString(strToken)
expected := node.SimpleNode("Statements").Append(strNode) expected := node.NewSimpleNode("Statements").Append(strNode)
node := parser.Parse(bytes.NewBufferString(src), "test.php") node := parser.Parse(bytes.NewBufferString(src), "test.php")