refactor php7

This commit is contained in:
Vadym Slizov 2020-05-13 00:16:36 +03:00
parent aab9da03f0
commit 6a84d58ee6
54 changed files with 26702 additions and 26481 deletions

View File

@ -22,21 +22,21 @@ bench:
go test -benchmem -bench=. ./php5 go test -benchmem -bench=. ./php5
go test -benchmem -bench=. ./php7 go test -benchmem -bench=. ./php7
compile: ./php5/php5.go ./php7/php7.go ./scanner/scanner.go fmt compile: ./internal/php5/php5.go ./internal/php7/php7.go ./internal/scanner/scanner.go fmt
sed -i '' -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./php7/php7.go sed -i '' -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./internal/php7/php7.go
sed -i '' -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./php5/php5.go sed -i '' -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./internal/php5/php5.go
sed -i '' -e 's/\/\/line/\/\/ line/g' ./php5/php5.go sed -i '' -e 's/\/\/line/\/\/ line/g' ./internal/php5/php5.go
sed -i '' -e 's/\/\/line/\/\/ line/g' ./php7/php7.go sed -i '' -e 's/\/\/line/\/\/ line/g' ./internal/php7/php7.go
sed -i '' -e 's/\/\/line/\/\/ line/g' ./scanner/scanner.go sed -i '' -e 's/\/\/line/\/\/ line/g' ./internal/scanner/scanner.go
rm -f y.output rm -f y.output
./scanner/scanner.go: ./scanner/scanner.rl ./internal/scanner/scanner.go: ./internal/scanner/scanner.rl
ragel -Z -G2 -o $@ $< ragel -Z -G2 -o $@ $<
./php5/php5.go: ./php5/php5.y ./internal/php5/php5.go: ./internal/php5/php5.y
goyacc -o $@ $< goyacc -o $@ $<
./php7/php7.go: ./php7/php7.y ./internal/php7/php7.go: ./internal/php7/php7.y
goyacc -o $@ $< goyacc -o $@ $<
cpu_pprof: cpu_pprof:

View File

@ -1,13 +1,14 @@
package php5 package php5
import ( import (
"strings"
"github.com/z7zmey/php-parser/errors"
"github.com/z7zmey/php-parser/freefloating" "github.com/z7zmey/php-parser/freefloating"
"github.com/z7zmey/php-parser/node" "github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/position" "strings"
"github.com/z7zmey/php-parser/positionbuilder"
"github.com/z7zmey/php-parser/internal/positionbuilder"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/errors"
"github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/scanner" "github.com/z7zmey/php-parser/scanner"
) )
@ -20,7 +21,7 @@ type Parser struct {
Lexer scanner.Scanner Lexer scanner.Scanner
currentToken *scanner.Token currentToken *scanner.Token
positionBuilder *positionbuilder.PositionBuilder positionBuilder *positionbuilder.PositionBuilder
rootNode node.Node rootNode ast.Vertex
} }
// NewParser creates and returns new Parser // NewParser creates and returns new Parser
@ -54,7 +55,7 @@ func (l *Parser) Error(msg string) {
l.Lexer.AddError(errors.NewError(msg, pos)) l.Lexer.AddError(errors.NewError(msg, pos))
} }
func (l *Parser) WithFreeFloating() { func (l *Parser) WithTokens() {
l.Lexer.SetWithFreeFloating(true) l.Lexer.SetWithFreeFloating(true)
} }
@ -71,7 +72,7 @@ func (l *Parser) Parse() int {
} }
// GetRootNode returns root node // GetRootNode returns root node
func (l *Parser) GetRootNode() node.Node { func (l *Parser) GetRootNode() ast.Vertex {
return l.rootNode return l.rootNode
} }

190
internal/php7/parser.go Normal file
View File

@ -0,0 +1,190 @@
package php7
import (
"bytes"
"github.com/z7zmey/php-parser/internal/positionbuilder"
"github.com/z7zmey/php-parser/internal/scanner"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/errors"
"github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/pkg/token"
)
func (lval *yySymType) Token(t *scanner.Token) {
lval.token = t
}
// Parser structure
type Parser struct {
Lexer scanner.Scanner
currentToken *scanner.Token
positionBuilder *positionbuilder.PositionBuilder
rootNode ast.Vertex
}
// NewParser creates and returns new Parser
func NewParser(src []byte, v string) *Parser {
lexer := scanner.NewLexer(src)
lexer.PHPVersion = v
return &Parser{
lexer,
nil,
nil,
nil,
}
}
func (l *Parser) Lex(lval *yySymType) int {
t := l.Lexer.Lex(lval)
l.currentToken = lval.token
return t
}
func (l *Parser) Error(msg string) {
pos := &position.Position{
StartLine: l.currentToken.StartLine,
EndLine: l.currentToken.EndLine,
StartPos: l.currentToken.StartPos,
EndPos: l.currentToken.EndPos,
}
l.Lexer.AddError(errors.NewError(msg, pos))
}
func (l *Parser) WithTokens() {
l.Lexer.SetWithTokens(true)
}
// Parse the php7 Parser entrypoint
func (l *Parser) Parse() int {
// init
l.Lexer.SetErrors(nil)
l.rootNode = nil
l.positionBuilder = &positionbuilder.PositionBuilder{}
// parse
return yyParse(l)
}
// GetRootNode returns root node
func (l *Parser) GetRootNode() ast.Vertex {
return l.rootNode
}
// GetErrors returns errors list
func (l *Parser) GetErrors() []*errors.Error {
return l.Lexer.GetErrors()
}
// helpers
func lastNode(nn []ast.Vertex) ast.Vertex {
if len(nn) == 0 {
return nil
}
return nn[len(nn)-1]
}
func firstNode(nn []ast.Vertex) ast.Vertex {
return nn[0]
}
func isDollar(r rune) bool {
return r == '$'
}
func (l *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
if l.Lexer.GetWithFreeFloating() == false {
return
}
if src.GetNode().Tokens == nil {
return
}
l.setFreeFloating(dst, token.Start, src.GetNode().Tokens[token.Start])
delete(src.GetNode().Tokens, token.Start)
}
func (l *Parser) setFreeFloating(dst ast.Vertex, p token.Position, strings []token.Token) {
if l.Lexer.GetWithFreeFloating() == false {
return
}
if len(strings) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
(*dstCollection)[p] = strings
}
func (l *Parser) GetFreeFloatingToken(t *scanner.Token) []token.Token {
if l.Lexer.GetWithFreeFloating() == false {
return []token.Token{}
}
tokens := make([]token.Token, len(t.Tokens))
copy(tokens, t.Tokens)
return tokens
}
func (l *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) {
if l.Lexer.GetWithFreeFloating() == false {
return
}
semiColon := prevNode.GetNode().Tokens[token.SemiColon]
delete(prevNode.GetNode().Tokens, token.SemiColon)
if len(semiColon) == 0 {
return
}
if semiColon[0].Value[0] == ';' {
l.setFreeFloating(prevNode, token.SemiColon, []token.Token{
{
ID: token.ID(';'),
Value: semiColon[0].Value[0:1],
},
})
}
vlen := len(semiColon[0].Value)
tlen := 2
if bytes.HasSuffix(semiColon[0].Value, []byte("?>\n")) {
tlen = 3
}
phpCloseTag := []token.Token{}
if vlen-tlen > 1 {
phpCloseTag = append(phpCloseTag, token.Token{
ID: token.T_WHITESPACE,
Value: semiColon[0].Value[1 : vlen-tlen],
})
}
phpCloseTag = append(phpCloseTag, token.Token{
ID: T_CLOSE_TAG,
Value: semiColon[0].Value[vlen-tlen:],
})
l.setFreeFloating(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
}
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {
for i := 1; i < len(yyDollar); i++ {
if yyDollar[i].token != nil {
p.Lexer.ReturnTokenToPool(yyDollar[i].token)
}
yyDollar[i].token = nil
}
yyVAL.token = nil
}

5655
internal/php7/php7.y Normal file

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@ package php7_test
import ( import (
"testing" "testing"
"github.com/z7zmey/php-parser/php7" "github.com/z7zmey/php-parser/internal/php7"
) )
func BenchmarkPhp7(b *testing.B) { func BenchmarkPhp7(b *testing.B) {

20023
internal/php7/php7_test.go Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,9 @@
package positionbuilder package positionbuilder
import ( import (
"github.com/z7zmey/php-parser/node" "github.com/z7zmey/php-parser/internal/scanner"
"github.com/z7zmey/php-parser/position" "github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/scanner" "github.com/z7zmey/php-parser/pkg/position"
) )
// PositionBuilder provide functions to constuct positions // PositionBuilder provide functions to constuct positions
@ -19,7 +19,7 @@ type endPos struct {
endPos int endPos int
} }
func (b *PositionBuilder) getListStartPos(l []node.Node) startPos { func (b *PositionBuilder) getListStartPos(l []ast.Vertex) startPos {
if l == nil { if l == nil {
return startPos{-1, -1} return startPos{-1, -1}
} }
@ -31,7 +31,7 @@ func (b *PositionBuilder) getListStartPos(l []node.Node) startPos {
return b.getNodeStartPos(l[0]) return b.getNodeStartPos(l[0])
} }
func (b *PositionBuilder) getNodeStartPos(n node.Node) startPos { func (b *PositionBuilder) getNodeStartPos(n ast.Vertex) startPos {
sl := -1 sl := -1
sp := -1 sp := -1
@ -39,7 +39,7 @@ func (b *PositionBuilder) getNodeStartPos(n node.Node) startPos {
return startPos{-1, -1} return startPos{-1, -1}
} }
p := n.GetPosition() p := n.GetNode().Position
if p != nil { if p != nil {
sl = p.StartLine sl = p.StartLine
sp = p.StartPos sp = p.StartPos
@ -48,7 +48,7 @@ func (b *PositionBuilder) getNodeStartPos(n node.Node) startPos {
return startPos{sl, sp} return startPos{sl, sp}
} }
func (b *PositionBuilder) getListEndPos(l []node.Node) endPos { func (b *PositionBuilder) getListEndPos(l []ast.Vertex) endPos {
if l == nil { if l == nil {
return endPos{-1, -1} return endPos{-1, -1}
} }
@ -60,7 +60,7 @@ func (b *PositionBuilder) getListEndPos(l []node.Node) endPos {
return b.getNodeEndPos(l[len(l)-1]) return b.getNodeEndPos(l[len(l)-1])
} }
func (b *PositionBuilder) getNodeEndPos(n node.Node) endPos { func (b *PositionBuilder) getNodeEndPos(n ast.Vertex) endPos {
el := -1 el := -1
ep := -1 ep := -1
@ -68,7 +68,7 @@ func (b *PositionBuilder) getNodeEndPos(n node.Node) endPos {
return endPos{-1, -1} return endPos{-1, -1}
} }
p := n.GetPosition() p := n.GetNode().Position
if p != nil { if p != nil {
el = p.EndLine el = p.EndLine
ep = p.EndPos ep = p.EndPos
@ -78,7 +78,7 @@ func (b *PositionBuilder) getNodeEndPos(n node.Node) endPos {
} }
// NewNodeListPosition returns new Position // NewNodeListPosition returns new Position
func (b *PositionBuilder) NewNodeListPosition(list []node.Node) *position.Position { func (b *PositionBuilder) NewNodeListPosition(list []ast.Vertex) *position.Position {
return &position.Position{ return &position.Position{
StartLine: b.getListStartPos(list).startLine, StartLine: b.getListStartPos(list).startLine,
EndLine: b.getListEndPos(list).endLine, EndLine: b.getListEndPos(list).endLine,
@ -88,7 +88,7 @@ func (b *PositionBuilder) NewNodeListPosition(list []node.Node) *position.Positi
} }
// NewNodePosition returns new Position // NewNodePosition returns new Position
func (b *PositionBuilder) NewNodePosition(n node.Node) *position.Position { func (b *PositionBuilder) NewNodePosition(n ast.Vertex) *position.Position {
return &position.Position{ return &position.Position{
StartLine: b.getNodeStartPos(n).startLine, StartLine: b.getNodeStartPos(n).startLine,
EndLine: b.getNodeEndPos(n).endLine, EndLine: b.getNodeEndPos(n).endLine,
@ -118,7 +118,7 @@ func (b *PositionBuilder) NewTokensPosition(startToken *scanner.Token, endToken
} }
// NewTokenNodePosition returns new Position // NewTokenNodePosition returns new Position
func (b *PositionBuilder) NewTokenNodePosition(t *scanner.Token, n node.Node) *position.Position { func (b *PositionBuilder) NewTokenNodePosition(t *scanner.Token, n ast.Vertex) *position.Position {
return &position.Position{ return &position.Position{
StartLine: t.StartLine, StartLine: t.StartLine,
EndLine: b.getNodeEndPos(n).endLine, EndLine: b.getNodeEndPos(n).endLine,
@ -128,7 +128,7 @@ func (b *PositionBuilder) NewTokenNodePosition(t *scanner.Token, n node.Node) *p
} }
// NewNodeTokenPosition returns new Position // NewNodeTokenPosition returns new Position
func (b *PositionBuilder) NewNodeTokenPosition(n node.Node, t *scanner.Token) *position.Position { func (b *PositionBuilder) NewNodeTokenPosition(n ast.Vertex, t *scanner.Token) *position.Position {
return &position.Position{ return &position.Position{
StartLine: b.getNodeStartPos(n).startLine, StartLine: b.getNodeStartPos(n).startLine,
EndLine: t.EndLine, EndLine: t.EndLine,
@ -138,7 +138,7 @@ func (b *PositionBuilder) NewNodeTokenPosition(n node.Node, t *scanner.Token) *p
} }
// NewNodesPosition returns new Position // NewNodesPosition returns new Position
func (b *PositionBuilder) NewNodesPosition(startNode node.Node, endNode node.Node) *position.Position { func (b *PositionBuilder) NewNodesPosition(startNode ast.Vertex, endNode ast.Vertex) *position.Position {
return &position.Position{ return &position.Position{
StartLine: b.getNodeStartPos(startNode).startLine, StartLine: b.getNodeStartPos(startNode).startLine,
EndLine: b.getNodeEndPos(endNode).endLine, EndLine: b.getNodeEndPos(endNode).endLine,
@ -148,7 +148,7 @@ func (b *PositionBuilder) NewNodesPosition(startNode node.Node, endNode node.Nod
} }
// NewNodeListTokenPosition returns new Position // NewNodeListTokenPosition returns new Position
func (b *PositionBuilder) NewNodeListTokenPosition(list []node.Node, t *scanner.Token) *position.Position { func (b *PositionBuilder) NewNodeListTokenPosition(list []ast.Vertex, t *scanner.Token) *position.Position {
return &position.Position{ return &position.Position{
StartLine: b.getListStartPos(list).startLine, StartLine: b.getListStartPos(list).startLine,
EndLine: t.EndLine, EndLine: t.EndLine,
@ -158,7 +158,7 @@ func (b *PositionBuilder) NewNodeListTokenPosition(list []node.Node, t *scanner.
} }
// NewTokenNodeListPosition returns new Position // NewTokenNodeListPosition returns new Position
func (b *PositionBuilder) NewTokenNodeListPosition(t *scanner.Token, list []node.Node) *position.Position { func (b *PositionBuilder) NewTokenNodeListPosition(t *scanner.Token, list []ast.Vertex) *position.Position {
return &position.Position{ return &position.Position{
StartLine: t.StartLine, StartLine: t.StartLine,
EndLine: b.getListEndPos(list).endLine, EndLine: b.getListEndPos(list).endLine,
@ -168,7 +168,7 @@ func (b *PositionBuilder) NewTokenNodeListPosition(t *scanner.Token, list []node
} }
// NewNodeNodeListPosition returns new Position // NewNodeNodeListPosition returns new Position
func (b *PositionBuilder) NewNodeNodeListPosition(n node.Node, list []node.Node) *position.Position { func (b *PositionBuilder) NewNodeNodeListPosition(n ast.Vertex, list []ast.Vertex) *position.Position {
return &position.Position{ return &position.Position{
StartLine: b.getNodeStartPos(n).startLine, StartLine: b.getNodeStartPos(n).startLine,
EndLine: b.getListEndPos(list).endLine, EndLine: b.getListEndPos(list).endLine,
@ -178,7 +178,7 @@ func (b *PositionBuilder) NewNodeNodeListPosition(n node.Node, list []node.Node)
} }
// NewNodeListNodePosition returns new Position // NewNodeListNodePosition returns new Position
func (b *PositionBuilder) NewNodeListNodePosition(list []node.Node, n node.Node) *position.Position { func (b *PositionBuilder) NewNodeListNodePosition(list []ast.Vertex, n ast.Vertex) *position.Position {
return &position.Position{ return &position.Position{
StartLine: b.getListStartPos(list).startLine, StartLine: b.getListStartPos(list).startLine,
EndLine: b.getNodeEndPos(n).endLine, EndLine: b.getNodeEndPos(n).endLine,
@ -188,7 +188,7 @@ func (b *PositionBuilder) NewNodeListNodePosition(list []node.Node, n node.Node)
} }
// NewOptionalListTokensPosition returns new Position // NewOptionalListTokensPosition returns new Position
func (b *PositionBuilder) NewOptionalListTokensPosition(list []node.Node, t *scanner.Token, endToken *scanner.Token) *position.Position { func (b *PositionBuilder) NewOptionalListTokensPosition(list []ast.Vertex, t *scanner.Token, endToken *scanner.Token) *position.Position {
if list == nil { if list == nil {
return &position.Position{ return &position.Position{
StartLine: t.StartLine, StartLine: t.StartLine,

View File

@ -0,0 +1,485 @@
package positionbuilder_test
import (
"gotest.tools/assert"
"testing"
"github.com/z7zmey/php-parser/internal/positionbuilder"
"github.com/z7zmey/php-parser/internal/scanner"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/position"
)
func TestNewTokenPosition(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
tkn := &scanner.Token{
Value: []byte(`foo`),
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
pos := builder.NewTokenPosition(tkn)
assert.DeepEqual(t, &position.Position{1, 1, 0, 3}, pos)
assert.DeepEqual(t, &position.Position{1, 1, 0, 3}, pos)
}
func TestNewTokensPosition(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
token1 := &scanner.Token{
Value: []byte(`foo`),
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
token2 := &scanner.Token{
Value: []byte(`foo`),
StartLine: 2,
EndLine: 2,
StartPos: 4,
EndPos: 6,
}
pos := builder.NewTokensPosition(token1, token2)
assert.DeepEqual(t, &position.Position{1, 2, 0, 6}, pos)
}
func TestNewNodePosition(t *testing.T) {
n := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodePosition(n)
assert.DeepEqual(t, &position.Position{1, 1, 0, 3}, pos)
}
func TestNewTokenNodePosition(t *testing.T) {
tkn := &scanner.Token{
Value: []byte(`foo`),
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
n := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 4,
EndPos: 12,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewTokenNodePosition(tkn, n)
assert.DeepEqual(t, &position.Position{1, 2, 0, 12}, pos)
}
func TestNewNodeTokenPosition(t *testing.T) {
n := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
},
},
}
tkn := &scanner.Token{
Value: []byte(`foo`),
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 12,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeTokenPosition(n, tkn)
assert.DeepEqual(t, &position.Position{1, 2, 0, 12}, pos)
}
func TestNewNodeListPosition(t *testing.T) {
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
},
},
}
n2 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 19,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListPosition([]ast.Vertex{n1, n2})
assert.DeepEqual(t, &position.Position{1, 2, 0, 19}, pos)
}
func TestNewNodesPosition(t *testing.T) {
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
},
},
}
n2 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 19,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodesPosition(n1, n2)
assert.DeepEqual(t, &position.Position{1, 2, 0, 19}, pos)
}
func TestNewNodeListTokenPosition(t *testing.T) {
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
},
},
}
n2 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 19,
},
},
}
tkn := &scanner.Token{
Value: []byte(`foo`),
StartLine: 3,
EndLine: 3,
StartPos: 20,
EndPos: 22,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListTokenPosition([]ast.Vertex{n1, n2}, tkn)
assert.DeepEqual(t, &position.Position{1, 3, 0, 22}, pos)
}
func TestNewTokenNodeListPosition(t *testing.T) {
tkn := &scanner.Token{
Value: []byte(`foo`),
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 2,
}
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 3,
EndPos: 10,
},
},
}
n2 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 11,
EndPos: 20,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewTokenNodeListPosition(tkn, []ast.Vertex{n1, n2})
assert.DeepEqual(t, &position.Position{1, 3, 0, 20}, pos)
}
func TestNewNodeNodeListPosition(t *testing.T) {
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
},
},
}
n2 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 9,
EndPos: 17,
},
},
}
n3 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 18,
EndPos: 26,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeNodeListPosition(n1, []ast.Vertex{n2, n3})
assert.DeepEqual(t, &position.Position{1, 3, 0, 26}, pos)
}
func TestNewNodeListNodePosition(t *testing.T) {
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
},
},
}
n2 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 9,
EndPos: 17,
},
},
}
n3 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 18,
EndPos: 26,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListNodePosition([]ast.Vertex{n1, n2}, n3)
assert.DeepEqual(t, &position.Position{1, 3, 0, 26}, pos)
}
func TestNewOptionalListTokensPosition(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
token1 := &scanner.Token{
Value: []byte(`foo`),
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
token2 := &scanner.Token{
Value: []byte(`foo`),
StartLine: 2,
EndLine: 2,
StartPos: 4,
EndPos: 6,
}
pos := builder.NewOptionalListTokensPosition(nil, token1, token2)
assert.DeepEqual(t, &position.Position{1, 2, 0, 6}, pos)
}
func TestNewOptionalListTokensPosition2(t *testing.T) {
n2 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 9,
EndPos: 17,
},
},
}
n3 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 18,
EndPos: 26,
},
},
}
builder := positionbuilder.PositionBuilder{}
token1 := &scanner.Token{
Value: []byte(`foo`),
StartLine: 4,
EndLine: 4,
StartPos: 27,
EndPos: 29,
}
token2 := &scanner.Token{
Value: []byte(`foo`),
StartLine: 5,
EndLine: 5,
StartPos: 30,
EndPos: 32,
}
pos := builder.NewOptionalListTokensPosition([]ast.Vertex{n2, n3}, token1, token2)
assert.DeepEqual(t, &position.Position{2, 5, 9, 32}, pos)
}
func TestNilNodePos(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodesPosition(nil, nil)
assert.DeepEqual(t, &position.Position{-1, -1, -1, -1}, pos)
}
func TestNilNodeListPos(t *testing.T) {
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeNodeListPosition(n1, nil)
assert.DeepEqual(t, &position.Position{1, -1, 0, -1}, pos)
}
func TestNilNodeListTokenPos(t *testing.T) {
token := &scanner.Token{
Value: []byte(`foo`),
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListTokenPosition(nil, token)
assert.DeepEqual(t, &position.Position{-1, 1, -1, 3}, pos)
}
func TestEmptyNodeListPos(t *testing.T) {
n1 := &ast.Identifier{
Node: ast.Node{
Position: &position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
},
},
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeNodeListPosition(n1, []ast.Vertex{})
assert.DeepEqual(t, &position.Position{1, -1, 0, -1}, pos)
}
func TestEmptyNodeListTokenPos(t *testing.T) {
token := &scanner.Token{
Value: []byte(`foo`),
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListTokenPosition([]ast.Vertex{}, token)
assert.DeepEqual(t, &position.Position{-1, 1, -1, 3}, pos)
}

View File

@ -4,10 +4,10 @@ import (
"bytes" "bytes"
"strings" "strings"
"github.com/z7zmey/php-parser/errors" "github.com/z7zmey/php-parser/internal/version"
"github.com/z7zmey/php-parser/freefloating" "github.com/z7zmey/php-parser/pkg/errors"
"github.com/z7zmey/php-parser/position" "github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/version" "github.com/z7zmey/php-parser/pkg/token"
) )
type Scanner interface { type Scanner interface {
@ -17,7 +17,7 @@ type Scanner interface {
SetPhpDocComment(string) SetPhpDocComment(string)
GetErrors() []*errors.Error GetErrors() []*errors.Error
GetWithFreeFloating() bool GetWithFreeFloating() bool
SetWithFreeFloating(bool) SetWithTokens(bool)
AddError(e *errors.Error) AddError(e *errors.Error)
SetErrors(e []*errors.Error) SetErrors(e []*errors.Error)
} }
@ -36,8 +36,8 @@ type Lexer struct {
heredocLabel []byte heredocLabel []byte
TokenPool *TokenPool TokenPool *TokenPool
FreeFloating []freefloating.String Tokens []token.Token
WithFreeFloating bool WithTokens bool
PhpDocComment string PhpDocComment string
lastToken *Token lastToken *Token
Errors []*errors.Error Errors []*errors.Error
@ -62,11 +62,11 @@ func (l *Lexer) GetErrors() []*errors.Error {
} }
func (l *Lexer) GetWithFreeFloating() bool { func (l *Lexer) GetWithFreeFloating() bool {
return l.WithFreeFloating return l.WithTokens
} }
func (l *Lexer) SetWithFreeFloating(b bool) { func (l *Lexer) SetWithTokens(b bool) {
l.WithFreeFloating = b l.WithTokens = b
} }
func (l *Lexer) AddError(e *errors.Error) { func (l *Lexer) AddError(e *errors.Error) {
@ -84,22 +84,14 @@ func (lex *Lexer) setTokenPosition(token *Token) {
token.EndPos = lex.te token.EndPos = lex.te
} }
func (lex *Lexer) addFreeFloating(t freefloating.StringType, ps, pe int) { func (lex *Lexer) addToken(id TokenID, ps, pe int) {
if !lex.WithFreeFloating { if !lex.WithTokens {
return return
} }
pos := position.NewPosition( lex.Tokens = append(lex.Tokens, token.Token{
lex.NewLines.GetLine(lex.ts), ID: token.ID(id),
lex.NewLines.GetLine(lex.te-1), Value: lex.data[ps:pe],
lex.ts,
lex.te,
)
lex.FreeFloating = append(lex.FreeFloating, freefloating.String{
StringType: t,
Value: string(lex.data[ps:pe]),
Position: pos,
}) })
} }

View File

@ -4,8 +4,6 @@ import (
"fmt" "fmt"
"strconv" "strconv"
"strings" "strings"
"github.com/z7zmey/php-parser/freefloating"
) )
%%{ %%{
@ -30,13 +28,13 @@ func NewLexer(data []byte) *Lexer {
} }
func (lex *Lexer) Lex(lval Lval) int { func (lex *Lexer) Lex(lval Lval) int {
lex.FreeFloating = nil lex.Tokens = nil
eof := lex.pe eof := lex.pe
var tok TokenID var tok TokenID
token := lex.TokenPool.Get() token := lex.TokenPool.Get()
token.FreeFloating = lex.FreeFloating token.Tokens = lex.Tokens
token.Value = string(lex.data[0:0]) token.Value = lex.data[0:0]
lblStart := 0 lblStart := 0
lblEnd := 0 lblEnd := 0
@ -136,7 +134,7 @@ func (lex *Lexer) Lex(lval Lval) int {
main := |* main := |*
"#!" any* :>> newline => { "#!" any* :>> newline => {
lex.addFreeFloating(freefloating.CommentType, lex.ts, lex.te) lex.addToken(T_COMMENT, lex.ts, lex.te)
}; };
any => { any => {
fnext html; fnext html;
@ -152,12 +150,12 @@ func (lex *Lexer) Lex(lval Lval) int {
fbreak; fbreak;
}; };
'<?' => { '<?' => {
lex.addFreeFloating(freefloating.TokenType, lex.ts, lex.te) lex.addToken(T_OPEN_TAG, lex.ts, lex.te)
fnext php; fnext php;
}; };
'<?php'i ( [ \t] | newline ) => { '<?php'i ( [ \t] | newline ) => {
lex.ungetCnt(lex.te - lex.ts - 5) lex.ungetCnt(lex.te - lex.ts - 5)
lex.addFreeFloating(freefloating.TokenType, lex.ts, lex.ts+5) lex.addToken(T_OPEN_TAG, lex.ts, lex.ts+5)
fnext php; fnext php;
}; };
'<?='i => { '<?='i => {
@ -169,7 +167,7 @@ func (lex *Lexer) Lex(lval Lval) int {
*|; *|;
php := |* php := |*
whitespace_line* => {lex.addFreeFloating(freefloating.WhiteSpaceType, lex.ts, lex.te)}; whitespace_line* => {lex.addToken(T_WHITESPACE, lex.ts, lex.te)};
'?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; ';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
@ -329,17 +327,19 @@ func (lex *Lexer) Lex(lval Lval) int {
('#' | '//') any_line* when is_not_comment_end => { ('#' | '//') any_line* when is_not_comment_end => {
lex.ungetStr("?>") lex.ungetStr("?>")
lex.addFreeFloating(freefloating.CommentType, lex.ts, lex.te) lex.addToken(T_COMMENT, lex.ts, lex.te)
}; };
'/*' any_line* :>> '*/' { '/*' any_line* :>> '*/' {
isDocComment := false; isDocComment := false;
if lex.te - lex.ts > 4 && string(lex.data[lex.ts:lex.ts+3]) == "/**" { if lex.te - lex.ts > 4 && string(lex.data[lex.ts:lex.ts+3]) == "/**" {
isDocComment = true; isDocComment = true;
} }
lex.addFreeFloating(freefloating.CommentType, lex.ts, lex.te)
if isDocComment { if isDocComment {
lex.PhpDocComment = string(lex.data[lex.ts:lex.te]) lex.PhpDocComment = string(lex.data[lex.ts:lex.te])
lex.addToken(T_DOC_COMMENT, lex.ts, lex.te)
} else {
lex.addToken(T_COMMENT, lex.ts, lex.te)
} }
}; };
@ -388,7 +388,7 @@ func (lex *Lexer) Lex(lval Lval) int {
*|; *|;
property := |* property := |*
whitespace_line* => {lex.addFreeFloating(freefloating.WhiteSpaceType, lex.ts, lex.te)}; whitespace_line* => {lex.addToken(T_WHITESPACE, lex.ts, lex.te)};
"->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;}; "->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;};
varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;}; varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;};
any => {lex.ungetCnt(1); fgoto php;}; any => {lex.ungetCnt(1); fgoto php;};
@ -484,32 +484,32 @@ func (lex *Lexer) Lex(lval Lval) int {
*|; *|;
halt_compiller_open_parenthesis := |* halt_compiller_open_parenthesis := |*
whitespace_line* => {lex.addFreeFloating(freefloating.WhiteSpaceType, lex.ts, lex.te)}; whitespace_line* => {lex.addToken(T_WHITESPACE, lex.ts, lex.te)};
"(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;}; "(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;};
any => {lex.ungetCnt(1); fnext php;}; any => {lex.ungetCnt(1); fnext php;};
*|; *|;
halt_compiller_close_parenthesis := |* halt_compiller_close_parenthesis := |*
whitespace_line* => {lex.addFreeFloating(freefloating.WhiteSpaceType, lex.ts, lex.te)}; whitespace_line* => {lex.addToken(T_WHITESPACE, lex.ts, lex.te)};
")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;}; ")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;};
any => {lex.ungetCnt(1); fnext php;}; any => {lex.ungetCnt(1); fnext php;};
*|; *|;
halt_compiller_close_semicolon := |* halt_compiller_close_semicolon := |*
whitespace_line* => {lex.addFreeFloating(freefloating.WhiteSpaceType, lex.ts, lex.te)}; whitespace_line* => {lex.addToken(T_WHITESPACE, lex.ts, lex.te)};
";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;}; ";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;};
any => {lex.ungetCnt(1); fnext php;}; any => {lex.ungetCnt(1); fnext php;};
*|; *|;
halt_compiller_end := |* halt_compiller_end := |*
any_line* => { lex.addFreeFloating(freefloating.TokenType, lex.ts, lex.te); }; any_line* => { lex.addToken(T_HALT_COMPILER, lex.ts, lex.te); };
*|; *|;
write exec; write exec;
}%% }%%
token.FreeFloating = lex.FreeFloating token.Tokens = lex.Tokens
token.Value = string(lex.data[lex.ts:lex.te]) token.Value = lex.data[lex.ts:lex.te]
lval.Token(token) lval.Token(token)

View File

@ -3,8 +3,7 @@ package scanner
import ( import (
"testing" "testing"
"github.com/z7zmey/php-parser/freefloating" "github.com/z7zmey/php-parser/pkg/token"
"github.com/z7zmey/php-parser/position"
"gotest.tools/assert" "gotest.tools/assert"
) )
@ -361,7 +360,7 @@ func TestTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -390,15 +389,15 @@ func TestShebang(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
token := lexer.Lex(lv) token := lexer.Lex(lv)
assert.Equal(t, token, int(T_DNUMBER)) assert.Equal(t, token, int(T_DNUMBER))
for _, tt := range lv.Tkn.FreeFloating { for _, tt := range lv.Tkn.Tokens {
actual = append(actual, tt.Value) actual = append(actual, string(tt.Value))
} }
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
@ -411,12 +410,12 @@ func TestShebangHtml(t *testing.T) {
` `
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
token := lexer.Lex(lv) token := lexer.Lex(lv)
assert.Equal(t, token, int(T_INLINE_HTML)) assert.Equal(t, token, int(T_INLINE_HTML))
assert.Equal(t, lv.Tkn.FreeFloating[0].Value, "#!/usr/bin/env php\n") assert.Equal(t, string(lv.Tkn.Tokens[0].Value), "#!/usr/bin/env php\n")
token = lexer.Lex(lv) token = lexer.Lex(lv)
assert.Equal(t, token, int(T_DNUMBER)) assert.Equal(t, token, int(T_DNUMBER))
@ -462,7 +461,7 @@ func TestNumberTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -520,7 +519,7 @@ func TestConstantStrings(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -656,7 +655,7 @@ func TestTeplateStringTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -742,7 +741,7 @@ func TestBackquoteStringTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -837,7 +836,7 @@ CAT;
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -911,7 +910,7 @@ CAT
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -951,7 +950,7 @@ CAT;
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -983,7 +982,7 @@ func TestHereDocTokens73(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -1015,7 +1014,7 @@ CAT;`
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.PHPVersion = "7.2" lexer.PHPVersion = "7.2"
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -1048,7 +1047,7 @@ func TestInlineHtmlNopTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
actual := []string{} actual := []string{}
@ -1094,7 +1093,7 @@ func TestStringTokensAfterVariable(t *testing.T) {
break break
} }
actualTokens = append(actualTokens, lv.Tkn.Value) actualTokens = append(actualTokens, string(lv.Tkn.Value))
actual = append(actual, TokenID(token).String()) actual = append(actual, TokenID(token).String())
} }
@ -1128,7 +1127,7 @@ func TestSlashAfterVariable(t *testing.T) {
break break
} }
actualTokens = append(actualTokens, lv.Tkn.Value) actualTokens = append(actualTokens, string(lv.Tkn.Value))
actual = append(actual, TokenID(token).String()) actual = append(actual, TokenID(token).String())
} }
@ -1139,31 +1138,29 @@ func TestSlashAfterVariable(t *testing.T) {
func TestCommentEnd(t *testing.T) { func TestCommentEnd(t *testing.T) {
src := `<?php //test` src := `<?php //test`
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
}, },
{ {
Value: " ", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("//test"),
Position: position.NewPosition(1, 1, 5, 6),
},
{
Value: "//test",
StringType: freefloating.CommentType,
Position: position.NewPosition(1, 1, 6, 12),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lexer.FreeFloating actual := lexer.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1171,31 +1168,29 @@ func TestCommentEnd(t *testing.T) {
func TestCommentNewLine(t *testing.T) { func TestCommentNewLine(t *testing.T) {
src := "<?php //test\n$a" src := "<?php //test\n$a"
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
}, },
{ {
Value: " ", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("//test\n"),
Position: position.NewPosition(1, 1, 5, 6),
},
{
Value: "//test\n",
StringType: freefloating.CommentType,
Position: position.NewPosition(1, 1, 6, 13),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1203,31 +1198,29 @@ func TestCommentNewLine(t *testing.T) {
func TestCommentNewLine1(t *testing.T) { func TestCommentNewLine1(t *testing.T) {
src := "<?php //test\r$a" src := "<?php //test\r$a"
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
}, },
{ {
Value: " ", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("//test\r"),
Position: position.NewPosition(1, 1, 5, 6),
},
{
Value: "//test\r",
StringType: freefloating.CommentType,
Position: position.NewPosition(1, 1, 6, 13),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1235,31 +1228,29 @@ func TestCommentNewLine1(t *testing.T) {
func TestCommentNewLine2(t *testing.T) { func TestCommentNewLine2(t *testing.T) {
src := "<?php #test\r\n$a" src := "<?php #test\r\n$a"
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
}, },
{ {
Value: " ", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("#test\r\n"),
Position: position.NewPosition(1, 1, 5, 6),
},
{
Value: "#test\r\n",
StringType: freefloating.CommentType,
Position: position.NewPosition(1, 1, 6, 13),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1268,31 +1259,29 @@ func TestCommentWithPhpEndTag(t *testing.T) {
src := `<?php src := `<?php
//test?> test` //test?> test`
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte("\n\t"),
}, },
{ {
Value: "\n\t", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("//test"),
Position: position.NewPosition(1, 2, 5, 7),
},
{
Value: "//test",
StringType: freefloating.CommentType,
Position: position.NewPosition(2, 2, 7, 13),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1301,31 +1290,29 @@ func TestInlineComment(t *testing.T) {
src := `<?php src := `<?php
/*test*/` /*test*/`
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte("\n\t"),
}, },
{ {
Value: "\n\t", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("/*test*/"),
Position: position.NewPosition(1, 2, 5, 7),
},
{
Value: "/*test*/",
StringType: freefloating.CommentType,
Position: position.NewPosition(2, 2, 7, 15),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1334,31 +1321,29 @@ func TestInlineComment2(t *testing.T) {
src := `<?php src := `<?php
/*/*/` /*/*/`
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte("\n\t"),
}, },
{ {
Value: "\n\t", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("/*/*/"),
Position: position.NewPosition(1, 2, 5, 7),
},
{
Value: "/*/*/",
StringType: freefloating.CommentType,
Position: position.NewPosition(2, 2, 7, 12),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lexer.FreeFloating actual := lexer.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1367,36 +1352,33 @@ func TestEmptyInlineComment(t *testing.T) {
src := `<?php src := `<?php
/**/ ` /**/ `
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte("\n\t"),
}, },
{ {
Value: "\n\t", ID: token.T_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("/**/"),
Position: position.NewPosition(1, 2, 5, 7),
}, },
{ {
Value: "/**/", ID: token.T_WHITESPACE,
StringType: freefloating.CommentType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 7, 11),
},
{
Value: " ",
StringType: freefloating.WhiteSpaceType,
Position: position.NewPosition(2, 2, 11, 12),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lexer.FreeFloating actual := lexer.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1405,31 +1387,29 @@ func TestEmptyInlineComment2(t *testing.T) {
src := `<?php src := `<?php
/***/` /***/`
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5), },
{
ID: token.T_WHITESPACE,
Value: []byte("\n\t"),
}, },
{ {
Value: "\n\t", ID: token.T_DOC_COMMENT,
StringType: freefloating.WhiteSpaceType, Value: []byte("/***/"),
Position: position.NewPosition(1, 2, 5, 7),
},
{
Value: "/***/",
StringType: freefloating.CommentType,
Position: position.NewPosition(2, 2, 7, 12),
}, },
} }
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1439,89 +1419,81 @@ func TestMethodCallTokens(t *testing.T) {
$a -> bar ( '' ) ;` $a -> bar ( '' ) ;`
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5),
}, },
{ {
Value: "\n\t", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte("\n\t"),
Position: position.NewPosition(1, 2, 5, 7),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []freefloating.String{ expected = []token.Token{
{ {
Value: " ", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 9, 10),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.FreeFloating actual = lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []freefloating.String{ expected = []token.Token{
{ {
Value: " ", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 12, 13),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.FreeFloating actual = lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []freefloating.String{ expected = []token.Token{
{ {
Value: " ", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 16, 17),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.FreeFloating actual = lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []freefloating.String{ expected = []token.Token{
{ {
Value: " ", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 18, 19),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.FreeFloating actual = lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []freefloating.String{ expected = []token.Token{
{ {
Value: " ", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 21, 22),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.FreeFloating actual = lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []freefloating.String{ expected = []token.Token{
{ {
Value: " ", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 23, 24),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.FreeFloating actual = lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1530,34 +1502,31 @@ func TestYieldFromTokens(t *testing.T) {
yield from $a` yield from $a`
lexer := NewLexer([]byte(src)) lexer := NewLexer([]byte(src))
lexer.WithFreeFloating = true lexer.WithTokens = true
lv := &lval{} lv := &lval{}
expected := []freefloating.String{ expected := []token.Token{
{ {
Value: "<?php", ID: token.T_OPEN_TAG,
StringType: freefloating.TokenType, Value: []byte("<?php"),
Position: position.NewPosition(1, 1, 0, 5),
}, },
{ {
Value: "\n\t", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte("\n\t"),
Position: position.NewPosition(1, 2, 5, 7),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.FreeFloating actual := lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []freefloating.String{ expected = []token.Token{
{ {
Value: " ", ID: token.T_WHITESPACE,
StringType: freefloating.WhiteSpaceType, Value: []byte(" "),
Position: position.NewPosition(2, 2, 17, 18),
}, },
} }
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.FreeFloating actual = lv.Tkn.Tokens
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1568,10 +1537,10 @@ func TestVarNameByteChars(t *testing.T) {
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
assert.Equal(t, "$\x80", lv.Tkn.Value) assert.Equal(t, "$\x80", string(lv.Tkn.Value))
lexer.Lex(lv) lexer.Lex(lv)
assert.Equal(t, "$\xff", lv.Tkn.Value) assert.Equal(t, "$\xff", string(lv.Tkn.Value))
} }
func TestStringVarNameByteChars(t *testing.T) { func TestStringVarNameByteChars(t *testing.T) {
@ -1581,19 +1550,19 @@ func TestStringVarNameByteChars(t *testing.T) {
lv := &lval{} lv := &lval{}
lexer.Lex(lv) lexer.Lex(lv)
assert.Equal(t, "\"", lv.Tkn.Value) assert.Equal(t, "\"", string(lv.Tkn.Value))
lexer.Lex(lv) lexer.Lex(lv)
assert.Equal(t, "$\x80", lv.Tkn.Value) assert.Equal(t, "$\x80", string(lv.Tkn.Value))
lexer.Lex(lv) lexer.Lex(lv)
assert.Equal(t, " ", lv.Tkn.Value) assert.Equal(t, " ", string(lv.Tkn.Value))
lexer.Lex(lv) lexer.Lex(lv)
assert.Equal(t, "$\xff", lv.Tkn.Value) assert.Equal(t, "$\xff", string(lv.Tkn.Value))
lexer.Lex(lv) lexer.Lex(lv)
assert.Equal(t, "\"", lv.Tkn.Value) assert.Equal(t, "\"", string(lv.Tkn.Value))
} }
func TestIgnoreControllCharacters(t *testing.T) { func TestIgnoreControllCharacters(t *testing.T) {
@ -1604,12 +1573,12 @@ func TestIgnoreControllCharacters(t *testing.T) {
expected := "echo" expected := "echo"
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.Value actual := string(lv.Tkn.Value)
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = "$b" expected = "$b"
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.Value actual = string(lv.Tkn.Value)
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1621,26 +1590,26 @@ func TestIgnoreControllCharactersAtStringVarOffset(t *testing.T) {
expected := "\"" expected := "\""
lexer.Lex(lv) lexer.Lex(lv)
actual := lv.Tkn.Value actual := string(lv.Tkn.Value)
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = "$a" expected = "$a"
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.Value actual = string(lv.Tkn.Value)
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = "[" expected = "["
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.Value actual = string(lv.Tkn.Value)
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = "test" expected = "test"
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.Value actual = string(lv.Tkn.Value)
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = "]" expected = "]"
lexer.Lex(lv) lexer.Lex(lv)
actual = lv.Tkn.Value actual = string(lv.Tkn.Value)
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }

15
internal/scanner/token.go Normal file
View File

@ -0,0 +1,15 @@
package scanner
import (
"github.com/z7zmey/php-parser/pkg/token"
)
// Token value returned by lexer
type Token struct {
Value []byte
Tokens []token.Token
StartLine int
EndLine int
StartPos int
EndPos int
}

View File

@ -4,7 +4,7 @@ import (
"reflect" "reflect"
"testing" "testing"
"github.com/z7zmey/php-parser/scanner" "github.com/z7zmey/php-parser/internal/scanner"
) )
func TestTokenPoolGetNew(t *testing.T) { func TestTokenPoolGetNew(t *testing.T) {
@ -21,7 +21,7 @@ func TestTokenPoolGetFromPool(t *testing.T) {
tp := new(scanner.TokenPool) tp := new(scanner.TokenPool)
expectedToken := &scanner.Token{ expectedToken := &scanner.Token{
Value: "test", Value: []byte("test"),
} }
tp.Put(expectedToken) tp.Put(expectedToken)

View File

@ -1,225 +0,0 @@
package php7
import (
"strings"
"github.com/z7zmey/php-parser/errors"
"github.com/z7zmey/php-parser/freefloating"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/position"
"github.com/z7zmey/php-parser/positionbuilder"
"github.com/z7zmey/php-parser/scanner"
)
func (lval *yySymType) Token(t *scanner.Token) {
lval.token = t
}
// Parser structure
type Parser struct {
Lexer scanner.Scanner
currentToken *scanner.Token
positionBuilder *positionbuilder.PositionBuilder
rootNode node.Node
}
// NewParser creates and returns new Parser
func NewParser(src []byte, v string) *Parser {
lexer := scanner.NewLexer(src)
lexer.PHPVersion = v
return &Parser{
lexer,
nil,
nil,
nil,
}
}
func (l *Parser) Lex(lval *yySymType) int {
t := l.Lexer.Lex(lval)
l.currentToken = lval.token
return t
}
func (l *Parser) Error(msg string) {
pos := &position.Position{
StartLine: l.currentToken.StartLine,
EndLine: l.currentToken.EndLine,
StartPos: l.currentToken.StartPos,
EndPos: l.currentToken.EndPos,
}
l.Lexer.AddError(errors.NewError(msg, pos))
}
func (l *Parser) WithFreeFloating() {
l.Lexer.SetWithFreeFloating(true)
}
// Parse the php7 Parser entrypoint
func (l *Parser) Parse() int {
// init
l.Lexer.SetErrors(nil)
l.rootNode = nil
l.positionBuilder = &positionbuilder.PositionBuilder{}
// parse
return yyParse(l)
}
// GetRootNode returns root node
func (l *Parser) GetRootNode() node.Node {
return l.rootNode
}
// GetErrors returns errors list
func (l *Parser) GetErrors() []*errors.Error {
return l.Lexer.GetErrors()
}
// helpers
func lastNode(nn []node.Node) node.Node {
if len(nn) == 0 {
return nil
}
return nn[len(nn)-1]
}
func firstNode(nn []node.Node) node.Node {
return nn[0]
}
func isDollar(r rune) bool {
return r == '$'
}
func (l *Parser) MoveFreeFloating(src node.Node, dst node.Node) {
if l.Lexer.GetWithFreeFloating() == false {
return
}
if src.GetFreeFloating() == nil {
return
}
l.setFreeFloating(dst, freefloating.Start, (*src.GetFreeFloating())[freefloating.Start])
delete((*src.GetFreeFloating()), freefloating.Start)
}
func (l *Parser) setFreeFloating(dst node.Node, p freefloating.Position, strings []freefloating.String) {
if l.Lexer.GetWithFreeFloating() == false {
return
}
if len(strings) == 0 {
return
}
dstCollection := dst.GetFreeFloating()
if *dstCollection == nil {
*dstCollection = make(freefloating.Collection)
}
(*dstCollection)[p] = strings
}
func (l *Parser) GetFreeFloatingToken(t *scanner.Token) []freefloating.String {
if l.Lexer.GetWithFreeFloating() == false {
return []freefloating.String{}
}
return t.GetFreeFloatingToken()
}
func (l *Parser) addDollarToken(v node.Node) {
if l.Lexer.GetWithFreeFloating() == false {
return
}
l.setFreeFloating(v, freefloating.Dollar, []freefloating.String{
{
StringType: freefloating.TokenType,
Value: "$",
Position: &position.Position{
StartLine: v.GetPosition().StartLine,
EndLine: v.GetPosition().StartLine,
StartPos: v.GetPosition().StartPos,
EndPos: v.GetPosition().StartPos + 1,
},
},
})
}
func (l *Parser) splitSemiColonAndPhpCloseTag(htmlNode node.Node, prevNode node.Node) {
if l.Lexer.GetWithFreeFloating() == false {
return
}
semiColon := (*prevNode.GetFreeFloating())[freefloating.SemiColon]
delete((*prevNode.GetFreeFloating()), freefloating.SemiColon)
if len(semiColon) == 0 {
return
}
p := semiColon[0].Position
if semiColon[0].Value[0] == ';' {
l.setFreeFloating(prevNode, freefloating.SemiColon, []freefloating.String{
{
StringType: freefloating.TokenType,
Value: ";",
Position: &position.Position{
StartLine: p.StartLine,
EndLine: p.StartLine,
StartPos: p.StartPos,
EndPos: p.StartPos + 1,
},
},
})
}
vlen := len(semiColon[0].Value)
tlen := 2
if strings.HasSuffix(semiColon[0].Value, "?>\n") {
tlen = 3
}
phpCloseTag := []freefloating.String{}
if vlen-tlen > 1 {
phpCloseTag = append(phpCloseTag, freefloating.String{
StringType: freefloating.WhiteSpaceType,
Value: semiColon[0].Value[1 : vlen-tlen],
Position: &position.Position{
StartLine: p.StartLine,
EndLine: p.EndLine,
StartPos: p.StartPos + 1,
EndPos: p.EndPos - tlen,
},
})
}
phpCloseTag = append(phpCloseTag, freefloating.String{
StringType: freefloating.WhiteSpaceType,
Value: semiColon[0].Value[vlen-tlen:],
Position: &position.Position{
StartLine: p.EndLine,
EndLine: p.EndLine,
StartPos: p.EndPos - tlen,
EndPos: p.EndPos,
},
})
l.setFreeFloating(htmlNode, freefloating.Start, append(phpCloseTag, (*htmlNode.GetFreeFloating())[freefloating.Start]...))
}
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {
for i := 1; i < len(yyDollar); i++ {
if yyDollar[i].token != nil {
p.Lexer.ReturnTokenToPool(yyDollar[i].token)
}
yyDollar[i].token = nil
}
yyVAL.token = nil
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -189,4 +189,9 @@ type NodeVisitor interface {
ScalarLnumber(n *ScalarLnumber) ScalarLnumber(n *ScalarLnumber)
ScalarMagicConstant(n *ScalarMagicConstant) ScalarMagicConstant(n *ScalarMagicConstant)
ScalarString(n *ScalarString) ScalarString(n *ScalarString)
NameName(n *NameName)
NameFullyQualified(n *NameFullyQualified)
NameRelative(n *NameRelative)
NameNamePart(n *NameNamePart)
} }

View File

@ -53,7 +53,6 @@ type testVisitor struct {
depth int depth int
} }
func (v *testVisitor) Enter(key string, _ bool) { func (v *testVisitor) Enter(key string, _ bool) {
v.depth++ v.depth++
fmt.Fprint(os.Stdout, "=>", strings.Repeat(" ", v.depth), key, ":\n") fmt.Fprint(os.Stdout, "=>", strings.Repeat(" ", v.depth), key, ":\n")

View File

@ -8,6 +8,7 @@ import (
type Node struct { type Node struct {
StartTokens []token.Token StartTokens []token.Token
EndTokens []token.Token EndTokens []token.Token
Tokens token.Collection
Position *position.Position Position *position.Position
} }
@ -52,7 +53,7 @@ func (n *Parameter) Accept(v NodeVisitor) {
// Identifier node // Identifier node
type Identifier struct { type Identifier struct {
Node Node
Value string Value []byte
} }
func (n *Identifier) Accept(v NodeVisitor) { func (n *Identifier) Accept(v NodeVisitor) {
@ -84,7 +85,7 @@ func (n *Argument) Accept(v NodeVisitor) {
// ScalarDnumber node // ScalarDnumber node
type ScalarDnumber struct { type ScalarDnumber struct {
Node Node
Value string Value []byte
} }
func (n *ScalarDnumber) Accept(v NodeVisitor) { func (n *ScalarDnumber) Accept(v NodeVisitor) {
@ -104,7 +105,7 @@ func (n *ScalarEncapsed) Accept(v NodeVisitor) {
// ScalarEncapsedStringPart node // ScalarEncapsedStringPart node
type ScalarEncapsedStringPart struct { type ScalarEncapsedStringPart struct {
Node Node
Value string Value []byte
} }
func (n *ScalarEncapsedStringPart) Accept(v NodeVisitor) { func (n *ScalarEncapsedStringPart) Accept(v NodeVisitor) {
@ -114,7 +115,7 @@ func (n *ScalarEncapsedStringPart) Accept(v NodeVisitor) {
// ScalarHeredoc node // ScalarHeredoc node
type ScalarHeredoc struct { type ScalarHeredoc struct {
Node Node
Label string Label []byte
Parts []Vertex Parts []Vertex
} }
@ -125,7 +126,7 @@ func (n *ScalarHeredoc) Accept(v NodeVisitor) {
// ScalarLnumber node // ScalarLnumber node
type ScalarLnumber struct { type ScalarLnumber struct {
Node Node
Value string Value []byte
} }
func (n *ScalarLnumber) Accept(v NodeVisitor) { func (n *ScalarLnumber) Accept(v NodeVisitor) {
@ -135,7 +136,7 @@ func (n *ScalarLnumber) Accept(v NodeVisitor) {
// ScalarMagicConstant node // ScalarMagicConstant node
type ScalarMagicConstant struct { type ScalarMagicConstant struct {
Node Node
Value string Value []byte
} }
func (n *ScalarMagicConstant) Accept(v NodeVisitor) { func (n *ScalarMagicConstant) Accept(v NodeVisitor) {
@ -145,7 +146,7 @@ func (n *ScalarMagicConstant) Accept(v NodeVisitor) {
// ScalarString node // ScalarString node
type ScalarString struct { type ScalarString struct {
Node Node
Value string Value []byte
} }
func (n *ScalarString) Accept(v NodeVisitor) { func (n *ScalarString) Accept(v NodeVisitor) {
@ -550,7 +551,7 @@ func (n *StmtIf) Accept(v NodeVisitor) {
// StmtInlineHtml node // StmtInlineHtml node
type StmtInlineHtml struct { type StmtInlineHtml struct {
Node Node
Value string Value []byte
} }
func (n *StmtInlineHtml) Accept(v NodeVisitor) { func (n *StmtInlineHtml) Accept(v NodeVisitor) {
@ -1803,3 +1804,39 @@ type ExprBinarySpaceship struct {
func (n *ExprBinarySpaceship) Accept(v NodeVisitor) { func (n *ExprBinarySpaceship) Accept(v NodeVisitor) {
v.ExprBinarySpaceship(n) v.ExprBinarySpaceship(n)
} }
type NameName struct {
Node
Parts []Vertex
}
func (n *NameName) Accept(v NodeVisitor) {
v.NameName(n)
}
type NameFullyQualified struct {
Node
Parts []Vertex
}
func (n *NameFullyQualified) Accept(v NodeVisitor) {
v.NameFullyQualified(n)
}
type NameRelative struct {
Node
Parts []Vertex
}
func (n *NameRelative) Accept(v NodeVisitor) {
v.NameRelative(n)
}
type NameNamePart struct {
Node
Value []byte
}
func (n *NameNamePart) Accept(v NodeVisitor) {
v.NameNamePart(n)
}

View File

@ -13,8 +13,7 @@ func ExampleDump() {
&ast.Identifier{}, &ast.Identifier{},
&ast.Parameter{ &ast.Parameter{
Variadic: true, Variadic: true,
Var: &ast.ExprVariable{ Var: &ast.ExprVariable{},
},
}, },
&ast.StmtInlineHtml{ &ast.StmtInlineHtml{
Value: "foo", Value: "foo",

View File

@ -3,7 +3,7 @@ package errors
import ( import (
"fmt" "fmt"
"github.com/z7zmey/php-parser/position" "github.com/z7zmey/php-parser/pkg/position"
) )
// Error parsing error // Error parsing error

View File

@ -1,19 +1,19 @@
package parser package parser
import ( import (
"github.com/z7zmey/php-parser/errors" "github.com/z7zmey/php-parser/internal/php5"
"github.com/z7zmey/php-parser/node" "github.com/z7zmey/php-parser/internal/php7"
"github.com/z7zmey/php-parser/php5" "github.com/z7zmey/php-parser/internal/version"
"github.com/z7zmey/php-parser/php7" "github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/version" "github.com/z7zmey/php-parser/pkg/errors"
) )
// Parser interface // Parser interface
type Parser interface { type Parser interface {
Parse() int Parse() int
GetRootNode() node.Node GetRootNode() ast.Vertex
GetErrors() []*errors.Error GetErrors() []*errors.Error
WithFreeFloating() WithTokens()
} }
func NewParser(src []byte, v string) (Parser, error) { func NewParser(src []byte, v string) (Parser, error) {

View File

@ -1,17 +1,9 @@
package freefloating package token
import "github.com/z7zmey/php-parser/position"
type StringType int
const (
WhiteSpaceType StringType = iota
CommentType
TokenType
)
type Position int type Position int
type Collection map[Position][]Token
//go:generate stringer -type=Position -output ./position_string.go //go:generate stringer -type=Position -output ./position_string.go
const ( const (
Start Position = iota Start Position = iota
@ -94,20 +86,3 @@ const (
OpenParenthesisToken OpenParenthesisToken
CloseParenthesisToken CloseParenthesisToken
) )
type String struct {
StringType StringType
Value string
Position *position.Position
}
type Collection map[Position][]String
func (c Collection) IsEmpty() bool {
for _, v := range c {
if len(v) > 0 {
return false
}
}
return true
}

View File

@ -1,6 +1,6 @@
// Code generated by "stringer -type=Position -output ./position_string.go"; DO NOT EDIT. // Code generated by "stringer -type=Position -output ./position_string.go"; DO NOT EDIT.
package freefloating package token
import "strconv" import "strconv"

View File

@ -1,9 +1,9 @@
package token package token
type TokenID int type ID int
const ( const (
T_INCLUDE TokenID = iota + 57346 T_INCLUDE ID = iota + 57346
T_INCLUDE_ONCE T_INCLUDE_ONCE
T_EXIT T_EXIT
T_IF T_IF
@ -144,6 +144,6 @@ const (
) )
type Token struct { type Token struct {
ID TokenID ID ID
Value []byte Value []byte
} }

View File

@ -1,27 +0,0 @@
package position
import (
"fmt"
)
// Position represents node position
type Position struct {
StartLine int
EndLine int
StartPos int
EndPos int
}
// NewPosition Position constructor
func NewPosition(StartLine int, EndLine int, StartPos int, EndPos int) *Position {
return &Position{
StartLine: StartLine,
EndLine: EndLine,
StartPos: StartPos,
EndPos: EndPos,
}
}
func (p Position) String() string {
return fmt.Sprintf("Pos{Line: %d-%d Pos: %d-%d}", p.StartLine, p.EndLine, p.StartPos, p.EndPos)
}

View File

@ -1,19 +0,0 @@
package position_test
import (
"testing"
"github.com/z7zmey/php-parser/position"
)
func TestPrintPosition(t *testing.T) {
pos := position.NewPosition(1, 1, 2, 5)
expected := "Pos{Line: 1-1 Pos: 2-5}"
actual := pos.String()
if expected != actual {
t.Errorf("expected and actual are not equal\n")
}
}

View File

@ -1,463 +0,0 @@
package positionbuilder_test
import (
"testing"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/position"
"github.com/z7zmey/php-parser/positionbuilder"
"github.com/z7zmey/php-parser/scanner"
)
func TestNewTokenPosition(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
tkn := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
pos := builder.NewTokenPosition(tkn)
if pos.String() != `Pos{Line: 1-1 Pos: 0-3}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewTokensPosition(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
token1 := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
token2 := &scanner.Token{
Value: `foo`,
StartLine: 2,
EndLine: 2,
StartPos: 4,
EndPos: 6,
}
pos := builder.NewTokensPosition(token1, token2)
if pos.String() != `Pos{Line: 1-2 Pos: 0-6}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewNodePosition(t *testing.T) {
n := node.NewIdentifier("test node")
n.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodePosition(n)
if pos.String() != `Pos{Line: 1-1 Pos: 0-3}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewTokenNodePosition(t *testing.T) {
tkn := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
n := node.NewIdentifier("test node")
n.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 4,
EndPos: 12,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewTokenNodePosition(tkn, n)
if pos.String() != `Pos{Line: 1-2 Pos: 0-12}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewNodeTokenPosition(t *testing.T) {
n := node.NewIdentifier("test node")
n.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
})
tkn := &scanner.Token{
Value: `foo`,
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 12,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeTokenPosition(n, tkn)
if pos.String() != `Pos{Line: 1-2 Pos: 0-12}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewNodeListPosition(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
})
n2 := node.NewIdentifier("test node")
n2.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 19,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListPosition([]node.Node{n1, n2})
if pos.String() != `Pos{Line: 1-2 Pos: 0-19}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewNodesPosition(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
})
n2 := node.NewIdentifier("test node")
n2.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 19,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodesPosition(n1, n2)
if pos.String() != `Pos{Line: 1-2 Pos: 0-19}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewNodeListTokenPosition(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 9,
})
n2 := node.NewIdentifier("test node")
n2.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 10,
EndPos: 19,
})
tkn := &scanner.Token{
Value: `foo`,
StartLine: 3,
EndLine: 3,
StartPos: 20,
EndPos: 22,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListTokenPosition([]node.Node{n1, n2}, tkn)
if pos.String() != `Pos{Line: 1-3 Pos: 0-22}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewTokenNodeListPosition(t *testing.T) {
tkn := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 2,
}
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 3,
EndPos: 10,
})
n2 := node.NewIdentifier("test node")
n2.SetPosition(&position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 11,
EndPos: 20,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewTokenNodeListPosition(tkn, []node.Node{n1, n2})
if pos.String() != `Pos{Line: 1-3 Pos: 0-20}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewNodeNodeListPosition(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
})
n2 := node.NewIdentifier("test node")
n2.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 9,
EndPos: 17,
})
n3 := node.NewIdentifier("test node")
n3.SetPosition(&position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 18,
EndPos: 26,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeNodeListPosition(n1, []node.Node{n2, n3})
if pos.String() != `Pos{Line: 1-3 Pos: 0-26}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewNodeListNodePosition(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
})
n2 := node.NewIdentifier("test node")
n2.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 9,
EndPos: 17,
})
n3 := node.NewIdentifier("test node")
n3.SetPosition(&position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 18,
EndPos: 26,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListNodePosition([]node.Node{n1, n2}, n3)
if pos.String() != `Pos{Line: 1-3 Pos: 0-26}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewOptionalListTokensPosition(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
token1 := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
token2 := &scanner.Token{
Value: `foo`,
StartLine: 2,
EndLine: 2,
StartPos: 4,
EndPos: 6,
}
pos := builder.NewOptionalListTokensPosition(nil, token1, token2)
if pos.String() != `Pos{Line: 1-2 Pos: 0-6}` {
t.Errorf("token value is not equal\n")
}
}
func TestNewOptionalListTokensPosition2(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
})
n2 := node.NewIdentifier("test node")
n2.SetPosition(&position.Position{
StartLine: 2,
EndLine: 2,
StartPos: 9,
EndPos: 17,
})
n3 := node.NewIdentifier("test node")
n3.SetPosition(&position.Position{
StartLine: 3,
EndLine: 3,
StartPos: 18,
EndPos: 26,
})
builder := positionbuilder.PositionBuilder{}
token1 := &scanner.Token{
Value: `foo`,
StartLine: 4,
EndLine: 4,
StartPos: 27,
EndPos: 29,
}
token2 := &scanner.Token{
Value: `foo`,
StartLine: 5,
EndLine: 5,
StartPos: 30,
EndPos: 32,
}
pos := builder.NewOptionalListTokensPosition([]node.Node{n2, n3}, token1, token2)
if pos.String() != `Pos{Line: 2-5 Pos: 9-32}` {
t.Errorf("token value is not equal\n")
}
}
func TestNilNodePos(t *testing.T) {
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodesPosition(nil, nil)
if pos.String() != `Pos{Line: -1--1 Pos: -1--1}` {
t.Errorf("token value is not equal\n")
}
}
func TestNilNodeListPos(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeNodeListPosition(n1, nil)
if pos.String() != `Pos{Line: 1--1 Pos: 0--1}` {
t.Errorf("token value is not equal\n")
}
}
func TestNilNodeListTokenPos(t *testing.T) {
token := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListTokenPosition(nil, token)
if pos.String() != `Pos{Line: -1-1 Pos: -1-3}` {
t.Errorf("token value is not equal\n")
}
}
func TestEmptyNodeListPos(t *testing.T) {
n1 := node.NewIdentifier("test node")
n1.SetPosition(&position.Position{
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 8,
})
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeNodeListPosition(n1, []node.Node{})
if pos.String() != `Pos{Line: 1--1 Pos: 0--1}` {
t.Errorf("token value is not equal\n")
}
}
func TestEmptyNodeListTokenPos(t *testing.T) {
token := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
builder := positionbuilder.PositionBuilder{}
pos := builder.NewNodeListTokenPosition([]node.Node{}, token)
if pos.String() != `Pos{Line: -1-1 Pos: -1-3}` {
t.Errorf("token value is not equal\n")
}
}

View File

@ -1,35 +0,0 @@
package scanner
import (
"github.com/z7zmey/php-parser/freefloating"
"github.com/z7zmey/php-parser/position"
)
// Token value returned by lexer
type Token struct {
Value string
FreeFloating []freefloating.String
StartLine int
EndLine int
StartPos int
EndPos int
}
func (t *Token) String() string {
return string(t.Value)
}
func (t *Token) GetFreeFloatingToken() []freefloating.String {
return []freefloating.String{
{
StringType: freefloating.TokenType,
Value: t.Value,
Position: &position.Position{
StartLine: t.StartLine,
EndLine: t.EndLine,
StartPos: t.StartPos,
EndPos: t.EndPos,
},
},
}
}

View File

@ -1,37 +0,0 @@
package scanner_test
import (
"reflect"
"testing"
"github.com/z7zmey/php-parser/freefloating"
"github.com/z7zmey/php-parser/scanner"
)
func TestToken(t *testing.T) {
tkn := &scanner.Token{
Value: `foo`,
StartLine: 1,
EndLine: 1,
StartPos: 0,
EndPos: 3,
}
c := []freefloating.String{
{
Value: "test comment",
StringType: freefloating.CommentType,
Position: nil,
},
}
tkn.FreeFloating = c
if !reflect.DeepEqual(tkn.FreeFloating, c) {
t.Errorf("comments are not equal\n")
}
if tkn.String() != `foo` {
t.Errorf("token value is not equal\n")
}
}

View File

@ -1,83 +0,0 @@
// Package visitor contains walker.visitor implementations
package visitor
import (
"fmt"
"io"
"reflect"
"strings"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/walker"
)
// Dumper writes ast hierarchy to an io.Writer
// Also prints comments and positions attached to nodes
type Dumper struct {
Writer io.Writer
Indent string
NsResolver *NamespaceResolver
}
// EnterNode is invoked at every node in hierarchy
func (d *Dumper) EnterNode(w walker.Walkable) bool {
n := w.(node.Node)
fmt.Fprintf(d.Writer, "%v[%v]\n", d.Indent, reflect.TypeOf(n))
if n.GetPosition() != nil {
fmt.Fprintf(d.Writer, "%v\"Position\": %s\n", d.Indent+" ", n.GetPosition())
}
if d.NsResolver != nil {
if namespacedName, ok := d.NsResolver.ResolvedNames[n]; ok {
fmt.Fprintf(d.Writer, "%v\"NamespacedName\": %q\n", d.Indent+" ", namespacedName)
}
}
if !n.GetFreeFloating().IsEmpty() {
fmt.Fprintf(d.Writer, "%v\"freefloating\":\n", d.Indent+" ")
for key, freeFloatingStrings := range *n.GetFreeFloating() {
for _, freeFloatingString := range freeFloatingStrings {
fmt.Fprintf(d.Writer, "%v%q: %q\n", d.Indent+" ", key.String(), freeFloatingString.Value)
}
}
}
if a := n.Attributes(); len(a) > 0 {
for key, attr := range a {
switch attr.(type) {
case string:
fmt.Fprintf(d.Writer, "%v\"%v\": %q\n", d.Indent+" ", key, attr)
default:
fmt.Fprintf(d.Writer, "%v\"%v\": %v\n", d.Indent+" ", key, attr)
}
}
}
return true
}
// LeaveNode is invoked after node process
func (d *Dumper) LeaveNode(n walker.Walkable) {
// do nothing
}
// GetChildrenVisitor is invoked at every node parameter that contains children nodes
func (d *Dumper) EnterChildNode(key string, w walker.Walkable) {
fmt.Fprintf(d.Writer, "%v%q:\n", d.Indent+" ", key)
d.Indent = d.Indent + " "
}
func (d *Dumper) LeaveChildNode(key string, w walker.Walkable) {
d.Indent = strings.TrimSuffix(d.Indent, " ")
}
func (d *Dumper) EnterChildList(key string, w walker.Walkable) {
fmt.Fprintf(d.Writer, "%v%q:\n", d.Indent+" ", key)
d.Indent = d.Indent + " "
}
func (d *Dumper) LeaveChildList(key string, w walker.Walkable) {
d.Indent = strings.TrimSuffix(d.Indent, " ")
}

View File

@ -1,151 +0,0 @@
package visitor_test
import (
"os"
"github.com/z7zmey/php-parser/php7"
"github.com/z7zmey/php-parser/visitor"
)
func ExampleDumper() {
src := `<?php
namespace Foo {
class Bar {
public function FunctionName(Type $var = null)
{
// some comment
$var;
}
}
}`
php7parser := php7.NewParser([]byte(src), "7.4")
php7parser.WithFreeFloating()
php7parser.Parse()
nodes := php7parser.GetRootNode()
nsResolver := visitor.NewNamespaceResolver()
nodes.Walk(nsResolver)
dumper := &visitor.Dumper{
Writer: os.Stdout,
Indent: "| ",
NsResolver: nsResolver,
}
nodes.Walk(dumper)
// Unordered output:
// | [*node.Root]
// | "Position": Pos{Line: 3-11 Pos: 9-144}
// | "Stmts":
// | [*stmt.Namespace]
// | "Position": Pos{Line: 3-11 Pos: 9-144}
// | "freefloating":
// | "Start": "<?php"
// | "Start": "\n\n\t\t"
// | "Stmts": "\n\t\t"
// | "NamespaceName":
// | [*name.Name]
// | "Position": Pos{Line: 3-3 Pos: 19-22}
// | "freefloating":
// | "Start": " "
// | "End": " "
// | "Parts":
// | [*name.NamePart]
// | "Position": Pos{Line: 3-3 Pos: 19-22}
// | "Value": "Foo"
// | "Stmts":
// | [*stmt.Class]
// | "Position": Pos{Line: 4-10 Pos: 28-140}
// | "NamespacedName": "Foo\\Bar"
// | "freefloating":
// | "Start": "\n\t\t\t"
// | "Name": " "
// | "Stmts": "\n\t\t\t"
// | "PhpDocComment": ""
// | "ClassName":
// | [*node.Identifier]
// | "Position": Pos{Line: 4-4 Pos: 34-37}
// | "freefloating":
// | "Start": " "
// | "Value": "Bar"
// | "Stmts":
// | [*stmt.ClassMethod]
// | "Position": Pos{Line: 5-9 Pos: 45-135}
// | "freefloating":
// | "Start": " \n\t\t\t\t"
// | "ModifierList": " "
// | "Function": " "
// | "ReturnsRef": false
// | "PhpDocComment": ""
// | "MethodName":
// | [*node.Identifier]
// | "Position": Pos{Line: 5-5 Pos: 61-73}
// | "Value": "FunctionName"
// | "Modifiers":
// | [*node.Identifier]
// | "Position": Pos{Line: 5-5 Pos: 45-51}
// | "Value": "public"
// | "Params":
// | [*node.Parameter]
// | "Position": Pos{Line: 5-5 Pos: 74-90}
// | "freefloating":
// | "OptionalType": " "
// | "Var": " "
// | "Variadic": false
// | "ByRef": false
// | "VariableType":
// | [*name.Name]
// | "Position": Pos{Line: 5-5 Pos: 74-78}
// | "NamespacedName": "Foo\\Type"
// | "Parts":
// | [*name.NamePart]
// | "Position": Pos{Line: 5-5 Pos: 74-78}
// | "Value": "Type"
// | "Variable":
// | [*expr.Variable]
// | "Position": Pos{Line: 5-5 Pos: 79-83}
// | "freefloating":
// | "Dollar": "$"
// | "VarName":
// | [*node.Identifier]
// | "Position": Pos{Line: 5-5 Pos: 79-83}
// | "Value": "var"
// | "DefaultValue":
// | [*expr.ConstFetch]
// | "Position": Pos{Line: 5-5 Pos: 86-90}
// | "freefloating":
// | "Start": " "
// | "Constant":
// | [*name.Name]
// | "Position": Pos{Line: 5-5 Pos: 86-90}
// | "NamespacedName": "null"
// | "Parts":
// | [*name.NamePart]
// | "Position": Pos{Line: 5-5 Pos: 86-90}
// | "Value": "null"
// | "Stmt":
// | [*stmt.StmtList]
// | "Position": Pos{Line: 6-9 Pos: 96-135}
// | "freefloating":
// | "Start": "\n\t\t\t\t"
// | "Stmts": "\n\t\t\t\t"
// | "Stmts":
// | [*stmt.Expression]
// | "Position": Pos{Line: 8-8 Pos: 124-129}
// | "freefloating":
// | "SemiColon": ";"
// | "Start": "\n\t\t\t\t\t"
// | "Start": "// some comment\n"
// | "Start": "\t\t\t\t\t"
// | "Expr":
// | [*expr.Variable]
// | "Position": Pos{Line: 8-8 Pos: 124-128}
// | "freefloating":
// | "Dollar": "$"
// | "VarName":
// | [*node.Identifier]
// | "Position": Pos{Line: 8-8 Pos: 124-128}
// | "Value": "var"
}

View File

@ -1,172 +0,0 @@
// Package visitor contains walker.visitor implementations
package visitor
import (
"fmt"
"io"
"reflect"
"strings"
"github.com/z7zmey/php-parser/freefloating"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/walker"
)
// GoDumper writes ast hierarchy to an io.Writer as native Golang struct
type GoDumper struct {
Writer io.Writer
depth int
isChildNode bool
}
func printIndent(w io.Writer, d int) {
for i := 0; i < d; i++ {
io.WriteString(w, "\t")
}
}
// EnterNode is invoked at every node in hierarchy
func (d *GoDumper) EnterNode(w walker.Walkable) bool {
n := w.(node.Node)
nodeType := reflect.TypeOf(n).String()
nodeType = strings.Replace(nodeType, "*", "&", 1)
if d.isChildNode {
d.isChildNode = false
} else {
printIndent(d.Writer, d.depth)
}
io.WriteString(d.Writer, nodeType+"{\n")
d.depth++
if p := n.GetPosition(); p != nil {
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "Position: &position.Position{\n")
d.depth++
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "StartLine: %d,\n", p.StartLine)
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "EndLine: %d,\n", p.EndLine)
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "StartPos: %d,\n", p.StartPos)
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "EndPos: %d,\n", p.EndPos)
d.depth--
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "},\n")
}
if !n.GetFreeFloating().IsEmpty() {
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "FreeFloating: freefloating.Collection{\n")
d.depth++
for key, freeFloatingStrings := range *n.GetFreeFloating() {
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "%q: []freefloating.String{\n", key)
d.depth++
for _, freeFloatingString := range freeFloatingStrings {
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "freefloating.String{\n")
d.depth++
printIndent(d.Writer, d.depth)
switch freeFloatingString.StringType {
case freefloating.CommentType:
fmt.Fprint(d.Writer, "Type: freefloating.CommentType,\n")
case freefloating.WhiteSpaceType:
fmt.Fprint(d.Writer, "Type: freefloating.WhiteSpaceType,\n")
case freefloating.TokenType:
fmt.Fprint(d.Writer, "Type: freefloating.TokenType,\n")
}
printIndent(d.Writer, d.depth)
if freeFloatingString.Position != nil {
fmt.Fprint(d.Writer, "Position: &position.Position{\n")
d.depth++
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "StartLine: %d,\n", freeFloatingString.Position.StartLine)
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "EndLine: %d,\n", freeFloatingString.Position.EndLine)
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "StartPos: %d,\n", freeFloatingString.Position.StartPos)
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "EndPos: %d,\n", freeFloatingString.Position.EndPos)
d.depth--
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "},\n")
} else {
fmt.Fprint(d.Writer, "Position: nil,\n")
}
printIndent(d.Writer, d.depth)
fmt.Fprintf(d.Writer, "Value: %q,\n", freeFloatingString.Value)
d.depth--
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "},\n")
}
d.depth--
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "},\n")
}
d.depth--
printIndent(d.Writer, d.depth)
fmt.Fprint(d.Writer, "},\n")
}
if a := n.Attributes(); len(a) > 0 {
for key, attr := range a {
printIndent(d.Writer, d.depth)
switch attr.(type) {
case string:
fmt.Fprintf(d.Writer, "%s: %q,\n", key, attr)
default:
fmt.Fprintf(d.Writer, "%s: %v,\n", key, attr)
}
}
}
return true
}
// LeaveNode is invoked after node process
func (d *GoDumper) LeaveNode(n walker.Walkable) {
d.depth--
printIndent(d.Writer, d.depth)
if d.depth != 0 {
io.WriteString(d.Writer, "},\n")
} else {
io.WriteString(d.Writer, "}\n")
}
}
func (d *GoDumper) EnterChildNode(key string, w walker.Walkable) {
printIndent(d.Writer, d.depth)
io.WriteString(d.Writer, key+": ")
d.isChildNode = true
}
func (d *GoDumper) LeaveChildNode(key string, w walker.Walkable) {
// do nothing
}
func (d *GoDumper) EnterChildList(key string, w walker.Walkable) {
printIndent(d.Writer, d.depth)
io.WriteString(d.Writer, key+": []node.Node{\n")
d.depth++
}
func (d *GoDumper) LeaveChildList(key string, w walker.Walkable) {
d.depth--
printIndent(d.Writer, d.depth)
if d.depth != 0 {
io.WriteString(d.Writer, "},\n")
}
}

View File

@ -1,528 +0,0 @@
package visitor_test
import (
"os"
"github.com/z7zmey/php-parser/php7"
"github.com/z7zmey/php-parser/visitor"
)
func ExampleGoDumper() {
src := `<?php
namespace Foo {
class Bar {
public function FunctionName(Type $var = null)
{
// some comment
$var;
}
}
}`
php7parser := php7.NewParser([]byte(src), "7.4")
php7parser.WithFreeFloating()
php7parser.Parse()
nodes := php7parser.GetRootNode()
nsResolver := visitor.NewNamespaceResolver()
nodes.Walk(nsResolver)
dumper := &visitor.GoDumper{
Writer: os.Stdout,
}
nodes.Walk(dumper)
// Unordered output:
// &node.Root{
// Position: &position.Position{
// StartLine: 3,
// EndLine: 11,
// StartPos: 9,
// EndPos: 143,
// },
// Stmts: []node.Node{
// &stmt.Namespace{
// Position: &position.Position{
// StartLine: 3,
// EndLine: 11,
// StartPos: 9,
// EndPos: 143,
// },
// FreeFloating: freefloating.Collection{
// "Stmts": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 10,
// EndLine: 11,
// StartPos: 139,
// EndPos: 142,
// },
// Value: "\n\t\t",
// },
// },
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.TokenType,
// Position: &position.Position{
// StartLine: 1,
// EndLine: 1,
// StartPos: 0,
// EndPos: 5,
// },
// Value: "<?php",
// },
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 1,
// EndLine: 3,
// StartPos: 5,
// EndPos: 9,
// },
// Value: "\n\n\t\t",
// },
// },
// },
// NamespaceName: &name.Name{
// Position: &position.Position{
// StartLine: 3,
// EndLine: 3,
// StartPos: 19,
// EndPos: 22,
// },
// FreeFloating: freefloating.Collection{
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 3,
// EndLine: 3,
// StartPos: 18,
// EndPos: 19,
// },
// Value: " ",
// },
// },
// "End": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 3,
// EndLine: 3,
// StartPos: 22,
// EndPos: 23,
// },
// Value: " ",
// },
// },
// },
// Parts: []node.Node{
// &name.NamePart{
// Position: &position.Position{
// StartLine: 3,
// EndLine: 3,
// StartPos: 19,
// EndPos: 22,
// },
// Value: "Foo",
// },
// },
// },
// Stmts: []node.Node{
// &stmt.Class{
// Position: &position.Position{
// StartLine: 4,
// EndLine: 10,
// StartPos: 28,
// EndPos: 139,
// },
// FreeFloating: freefloating.Collection{
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 3,
// EndLine: 4,
// StartPos: 24,
// EndPos: 28,
// },
// Value: "\n\t\t\t",
// },
// },
// "Name": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 4,
// EndLine: 4,
// StartPos: 37,
// EndPos: 38,
// },
// Value: " ",
// },
// },
// "Stmts": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 9,
// EndLine: 10,
// StartPos: 134,
// EndPos: 138,
// },
// Value: "\n\t\t\t",
// },
// },
// },
// PhpDocComment: "",
// ClassName: &node.Identifier{
// Position: &position.Position{
// StartLine: 4,
// EndLine: 4,
// StartPos: 34,
// EndPos: 37,
// },
// FreeFloating: freefloating.Collection{
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 4,
// EndLine: 4,
// StartPos: 33,
// EndPos: 34,
// },
// Value: " ",
// },
// },
// },
// Value: "Bar",
// },
// Stmts: []node.Node{
// &stmt.ClassMethod{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 9,
// StartPos: 44,
// EndPos: 134,
// },
// FreeFloating: freefloating.Collection{
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 4,
// EndLine: 5,
// StartPos: 39,
// EndPos: 44,
// },
// Value: "\n\t\t\t\t",
// },
// },
// "ModifierList": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 50,
// EndPos: 51,
// },
// Value: " ",
// },
// },
// "Function": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 59,
// EndPos: 60,
// },
// Value: " ",
// },
// },
// },
// ReturnsRef: false,
// PhpDocComment: "",
// MethodName: &node.Identifier{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 60,
// EndPos: 72,
// },
// Value: "FunctionName",
// },
// Modifiers: []node.Node{
// &node.Identifier{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 44,
// EndPos: 50,
// },
// Value: "public",
// },
// },
// Params: []node.Node{
// &node.Parameter{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 73,
// EndPos: 89,
// },
// FreeFloating: freefloating.Collection{
// "OptionalType": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 77,
// EndPos: 78,
// },
// Value: " ",
// },
// },
// "Var": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 82,
// EndPos: 83,
// },
// Value: " ",
// },
// },
// },
// ByRef: false,
// Variadic: false,
// VariableType: &name.Name{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 73,
// EndPos: 77,
// },
// Parts: []node.Node{
// &name.NamePart{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 73,
// EndPos: 77,
// },
// Value: "Type",
// },
// },
// },
// Variable: &expr.Variable{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 78,
// EndPos: 82,
// },
// FreeFloating: freefloating.Collection{
// "Dollar": []freefloating.String{
// freefloating.String{
// Type: freefloating.TokenType,
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 78,
// EndPos: 79,
// },
// Value: "$",
// },
// },
// },
// VarName: &node.Identifier{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 78,
// EndPos: 82,
// },
// Value: "var",
// },
// },
// DefaultValue: &expr.ConstFetch{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 85,
// EndPos: 89,
// },
// FreeFloating: freefloating.Collection{
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 84,
// EndPos: 85,
// },
// Value: " ",
// },
// },
// },
// Constant: &name.Name{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 85,
// EndPos: 89,
// },
// Parts: []node.Node{
// &name.NamePart{
// Position: &position.Position{
// StartLine: 5,
// EndLine: 5,
// StartPos: 85,
// EndPos: 89,
// },
// Value: "null",
// },
// },
// },
// },
// },
// },
// Stmt: &stmt.StmtList{
// Position: &position.Position{
// StartLine: 6,
// EndLine: 9,
// StartPos: 95,
// EndPos: 134,
// },
// FreeFloating: freefloating.Collection{
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 5,
// EndLine: 6,
// StartPos: 90,
// EndPos: 95,
// },
// Value: "\n\t\t\t\t",
// },
// },
// "Stmts": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 8,
// EndLine: 9,
// StartPos: 128,
// EndPos: 133,
// },
// Value: "\n\t\t\t\t",
// },
// },
// },
// Stmts: []node.Node{
// &stmt.Expression{
// Position: &position.Position{
// StartLine: 8,
// EndLine: 8,
// StartPos: 123,
// EndPos: 128,
// },
// FreeFloating: freefloating.Collection{
// "Start": []freefloating.String{
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 6,
// EndLine: 7,
// StartPos: 96,
// EndPos: 102,
// },
// Value: "\n\t\t\t\t\t",
// },
// freefloating.String{
// Type: freefloating.CommentType,
// Position: &position.Position{
// StartLine: 7,
// EndLine: 7,
// StartPos: 102,
// EndPos: 118,
// },
// Value: "// some comment\n",
// },
// freefloating.String{
// Type: freefloating.WhiteSpaceType,
// Position: &position.Position{
// StartLine: 8,
// EndLine: 8,
// StartPos: 118,
// EndPos: 123,
// },
// Value: "\t\t\t\t\t",
// },
// },
// "SemiColon": []freefloating.String{
// freefloating.String{
// Type: freefloating.TokenType,
// Position: &position.Position{
// StartLine: 8,
// EndLine: 8,
// StartPos: 127,
// EndPos: 128,
// },
// Value: ";",
// },
// },
// },
// Expr: &expr.Variable{
// Position: &position.Position{
// StartLine: 8,
// EndLine: 8,
// StartPos: 123,
// EndPos: 127,
// },
// FreeFloating: freefloating.Collection{
// "Dollar": []freefloating.String{
// freefloating.String{
// Type: freefloating.TokenType,
// Position: &position.Position{
// StartLine: 8,
// EndLine: 8,
// StartPos: 123,
// EndPos: 124,
// },
// Value: "$",
// },
// },
// },
// VarName: &node.Identifier{
// Position: &position.Position{
// StartLine: 8,
// EndLine: 8,
// StartPos: 123,
// EndPos: 127,
// },
// Value: "var",
// },
// },
// },
// },
// },
// },
// },
// },
// },
// },
// },
// }
}

View File

@ -1,142 +0,0 @@
// Package visitor contains walker.visitor implementations
package visitor
import (
"fmt"
"io"
"reflect"
"sort"
"github.com/z7zmey/php-parser/freefloating"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/walker"
)
type JsonDumper struct {
Writer io.Writer
NsResolver *NamespaceResolver
isChildNode bool
isNotFirstNode bool
}
// EnterNode is invoked at every node in hierarchy
func (d *JsonDumper) EnterNode(w walker.Walkable) bool {
n := w.(node.Node)
nodeType := reflect.TypeOf(n).String()
if d.isChildNode {
d.isChildNode = false
} else if d.isNotFirstNode {
fmt.Fprint(d.Writer, ",")
} else {
d.isNotFirstNode = true
}
fmt.Fprintf(d.Writer, "{%q:%q", "type", nodeType)
if p := n.GetPosition(); p != nil {
p := n.GetPosition()
fmt.Fprintf(d.Writer, ",%q:{%q:%d,%q:%d,%q:%d,%q:%d}",
"position",
"startPos", p.StartPos,
"endPos", p.EndPos,
"startLine", p.StartLine,
"endLine", p.EndLine)
}
if d.NsResolver != nil {
if namespacedName, ok := d.NsResolver.ResolvedNames[n]; ok {
fmt.Fprintf(d.Writer, ",%q:%q", "namespacedName", namespacedName)
}
}
if !n.GetFreeFloating().IsEmpty() {
fmt.Fprintf(d.Writer, ",%q:{", "freefloating")
var freefloatingStringsKeys []int
for key := range *n.GetFreeFloating() {
freefloatingStringsKeys = append(freefloatingStringsKeys, int(key))
}
sort.Ints(freefloatingStringsKeys)
i := 0
for _, k := range freefloatingStringsKeys {
key := freefloating.Position(k)
freeFloatingStrings := (*n.GetFreeFloating())[key]
if i != 0 {
fmt.Fprint(d.Writer, ",")
}
i++
fmt.Fprintf(d.Writer, "%q: [", key.String())
j := 0
for _, freeFloatingString := range freeFloatingStrings {
if j != 0 {
fmt.Fprint(d.Writer, ",")
}
j++
switch freeFloatingString.StringType {
case freefloating.CommentType:
fmt.Fprintf(d.Writer, "{%q:%q,%q:%q}", "type", "freefloating.CommentType", "value", freeFloatingString.Value)
case freefloating.WhiteSpaceType:
fmt.Fprintf(d.Writer, "{%q:%q,%q:%q}", "type", "freefloating.WhiteSpaceType", "value", freeFloatingString.Value)
case freefloating.TokenType:
fmt.Fprintf(d.Writer, "{%q:%q,%q:%q}", "type", "freefloating.TokenType", "value", freeFloatingString.Value)
}
}
fmt.Fprint(d.Writer, "]")
}
fmt.Fprint(d.Writer, "}")
}
if a := n.Attributes(); len(a) > 0 {
var attributes []string
for key := range n.Attributes() {
attributes = append(attributes, key)
}
sort.Strings(attributes)
for _, attributeName := range attributes {
attr := a[attributeName]
switch attr.(type) {
case string:
fmt.Fprintf(d.Writer, ",\"%s\":%q", attributeName, attr)
default:
fmt.Fprintf(d.Writer, ",\"%s\":%v", attributeName, attr)
}
}
}
return true
}
// LeaveNode is invoked after node process
func (d *JsonDumper) LeaveNode(n walker.Walkable) {
fmt.Fprint(d.Writer, "}")
}
func (d *JsonDumper) EnterChildNode(key string, w walker.Walkable) {
fmt.Fprintf(d.Writer, ",%q:", key)
d.isChildNode = true
}
func (d *JsonDumper) LeaveChildNode(key string, w walker.Walkable) {
// do nothing
}
func (d *JsonDumper) EnterChildList(key string, w walker.Walkable) {
fmt.Fprintf(d.Writer, ",%q:[", key)
d.isNotFirstNode = false
}
func (d *JsonDumper) LeaveChildList(key string, w walker.Walkable) {
fmt.Fprint(d.Writer, "]")
}

View File

@ -1,41 +0,0 @@
package visitor_test
import (
"os"
"github.com/z7zmey/php-parser/php7"
"github.com/z7zmey/php-parser/visitor"
)
func ExampleJsonDumper() {
src := `<?php
namespace Foo {
class Bar {
public function FunctionName(Type $var = null)
{
// some comment
// second comment
$var1;
$var2;
}
}
}`
php7parser := php7.NewParser([]byte(src), "7.4")
php7parser.WithFreeFloating()
php7parser.Parse()
nodes := php7parser.GetRootNode()
nsResolver := visitor.NewNamespaceResolver()
nodes.Walk(nsResolver)
dumper := &visitor.JsonDumper{
Writer: os.Stdout,
NsResolver: nsResolver,
}
nodes.Walk(dumper)
// Output:
// {"type":"*node.Root","position":{"startPos":9,"endPos":179,"startLine":3,"endLine":13},"Stmts":[{"type":"*stmt.Namespace","position":{"startPos":9,"endPos":179,"startLine":3,"endLine":13},"freefloating":{"Start": [{"type":"freefloating.TokenType","value":"<?php"},{"type":"freefloating.WhiteSpaceType","value":"\n\n\t\t"}],"Stmts": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t"}]},"NamespaceName":{"type":"*name.Name","position":{"startPos":19,"endPos":22,"startLine":3,"endLine":3},"freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":" "}],"End": [{"type":"freefloating.WhiteSpaceType","value":" "}]},"Parts":[{"type":"*name.NamePart","position":{"startPos":19,"endPos":22,"startLine":3,"endLine":3},"Value":"Foo"}]},"Stmts":[{"type":"*stmt.Class","position":{"startPos":28,"endPos":175,"startLine":4,"endLine":12},"namespacedName":"Foo\\Bar","freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t\t"}],"Name": [{"type":"freefloating.WhiteSpaceType","value":" "}],"Stmts": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t\t"}]},"PhpDocComment":"","ClassName":{"type":"*node.Identifier","position":{"startPos":34,"endPos":37,"startLine":4,"endLine":4},"freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":" "}]},"Value":"Bar"},"Stmts":[{"type":"*stmt.ClassMethod","position":{"startPos":44,"endPos":170,"startLine":5,"endLine":11},"freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t\t\t"}],"Function": [{"type":"freefloating.WhiteSpaceType","value":" "}],"ModifierList": [{"type":"freefloating.WhiteSpaceType","value":" "}]},"PhpDocComment":"","ReturnsRef":false,"MethodName":{"type":"*node.Identifier","position":{"startPos":60,"endPos":72,"startLine":5,"endLine":5},"Value":"FunctionName"},"Modifiers":[{"type":"*node.Identifier","position":{"startPos":44,"endPos":50,"startLine":5,"endLine":5},"Value":"public"}],"Params":[{"type":"*node.Parameter","position":{"startPos":73,"endPos":89,"startLine":5,"endLine":5},"freefloating":{"Var": [{"type":"freefloating.WhiteSpaceType","value":" "}],"OptionalType": [{"type":"freefloating.WhiteSpaceType","value":" "}]},"ByRef":false,"Variadic":false,"VariableType":{"type":"*name.Name","position":{"startPos":73,"endPos":77,"startLine":5,"endLine":5},"namespacedName":"Foo\\Type","Parts":[{"type":"*name.NamePart","position":{"startPos":73,"endPos":77,"startLine":5,"endLine":5},"Value":"Type"}]},"Variable":{"type":"*expr.Variable","position":{"startPos":78,"endPos":82,"startLine":5,"endLine":5},"freefloating":{"Dollar": [{"type":"freefloating.TokenType","value":"$"}]},"VarName":{"type":"*node.Identifier","position":{"startPos":78,"endPos":82,"startLine":5,"endLine":5},"Value":"var"}},"DefaultValue":{"type":"*expr.ConstFetch","position":{"startPos":85,"endPos":89,"startLine":5,"endLine":5},"freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":" "}]},"Constant":{"type":"*name.Name","position":{"startPos":85,"endPos":89,"startLine":5,"endLine":5},"namespacedName":"null","Parts":[{"type":"*name.NamePart","position":{"startPos":85,"endPos":89,"startLine":5,"endLine":5},"Value":"null"}]}}}],"Stmt":{"type":"*stmt.StmtList","position":{"startPos":95,"endPos":170,"startLine":6,"endLine":11},"freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t\t\t"}],"Stmts": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t\t\t"}]},"Stmts":[{"type":"*stmt.Expression","position":{"startPos":146,"endPos":152,"startLine":9,"endLine":9},"freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t\t\t\t"},{"type":"freefloating.CommentType","value":"// some comment\n"},{"type":"freefloating.WhiteSpaceType","value":"\t\t\t\t\t"},{"type":"freefloating.CommentType","value":"// second comment\n"},{"type":"freefloating.WhiteSpaceType","value":"\t\t\t\t\t"}],"SemiColon": [{"type":"freefloating.TokenType","value":";"}]},"Expr":{"type":"*expr.Variable","position":{"startPos":146,"endPos":151,"startLine":9,"endLine":9},"freefloating":{"Dollar": [{"type":"freefloating.TokenType","value":"$"}]},"VarName":{"type":"*node.Identifier","position":{"startPos":146,"endPos":151,"startLine":9,"endLine":9},"Value":"var1"}}},{"type":"*stmt.Expression","position":{"startPos":158,"endPos":164,"startLine":10,"endLine":10},"freefloating":{"Start": [{"type":"freefloating.WhiteSpaceType","value":"\n\t\t\t\t\t"}],"SemiColon": [{"type":"freefloating.TokenType","value":";"}]},"Expr":{"type":"*expr.Variable","position":{"startPos":158,"endPos":163,"startLine":10,"endLine":10},"freefloating":{"Dollar": [{"type":"freefloating.TokenType","value":"$"}]},"VarName":{"type":"*node.Identifier","position":{"startPos":158,"endPos":163,"startLine":10,"endLine":10},"Value":"var2"}}}]}}]}]}]}
}

View File

@ -1,392 +0,0 @@
// Package visitor contains walker.visitor implementations
package visitor
import (
"errors"
"strings"
"github.com/z7zmey/php-parser/node/expr"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/node/name"
"github.com/z7zmey/php-parser/node/stmt"
"github.com/z7zmey/php-parser/walker"
)
// NamespaceResolver visitor
type NamespaceResolver struct {
Namespace *Namespace
ResolvedNames map[node.Node]string
}
// NewNamespaceResolver NamespaceResolver type constructor
func NewNamespaceResolver() *NamespaceResolver {
return &NamespaceResolver{
Namespace: NewNamespace(""),
ResolvedNames: map[node.Node]string{},
}
}
// EnterNode is invoked at every node in heirerchy
func (nsr *NamespaceResolver) EnterNode(w walker.Walkable) bool {
switch n := w.(type) {
case *stmt.Namespace:
if n.NamespaceName == nil {
nsr.Namespace = NewNamespace("")
} else {
NSParts := n.NamespaceName.(*name.Name).Parts
nsr.Namespace = NewNamespace(concatNameParts(NSParts))
}
case *stmt.UseList:
useType := ""
if n.UseType != nil {
useType = n.UseType.(*node.Identifier).Value
}
for _, nn := range n.Uses {
nsr.AddAlias(useType, nn, nil)
}
// no reason to iterate into depth
return false
case *stmt.GroupUse:
useType := ""
if n.UseType != nil {
useType = n.UseType.(*node.Identifier).Value
}
for _, nn := range n.UseList {
nsr.AddAlias(useType, nn, n.Prefix.(*name.Name).Parts)
}
// no reason to iterate into depth
return false
case *stmt.Class:
if n.Extends != nil {
nsr.ResolveName(n.Extends.ClassName, "")
}
if n.Implements != nil {
for _, interfaceName := range n.Implements.InterfaceNames {
nsr.ResolveName(interfaceName, "")
}
}
if n.ClassName != nil {
nsr.AddNamespacedName(n, n.ClassName.(*node.Identifier).Value)
}
case *stmt.Interface:
if n.Extends != nil {
for _, interfaceName := range n.Extends.InterfaceNames {
nsr.ResolveName(interfaceName, "")
}
}
nsr.AddNamespacedName(n, n.InterfaceName.(*node.Identifier).Value)
case *stmt.Trait:
nsr.AddNamespacedName(n, n.TraitName.(*node.Identifier).Value)
case *stmt.Function:
nsr.AddNamespacedName(n, n.FunctionName.(*node.Identifier).Value)
for _, parameter := range n.Params {
nsr.ResolveType(parameter.(*node.Parameter).VariableType)
}
if n.ReturnType != nil {
nsr.ResolveType(n.ReturnType)
}
case *stmt.ClassMethod:
for _, parameter := range n.Params {
nsr.ResolveType(parameter.(*node.Parameter).VariableType)
}
if n.ReturnType != nil {
nsr.ResolveType(n.ReturnType)
}
case *expr.Closure:
for _, parameter := range n.Params {
nsr.ResolveType(parameter.(*node.Parameter).VariableType)
}
if n.ReturnType != nil {
nsr.ResolveType(n.ReturnType)
}
case *stmt.ConstList:
for _, constant := range n.Consts {
nsr.AddNamespacedName(constant, constant.(*stmt.Constant).ConstantName.(*node.Identifier).Value)
}
case *expr.StaticCall:
nsr.ResolveName(n.Class, "")
case *expr.StaticPropertyFetch:
nsr.ResolveName(n.Class, "")
case *expr.ClassConstFetch:
nsr.ResolveName(n.Class, "")
case *expr.New:
nsr.ResolveName(n.Class, "")
case *expr.InstanceOf:
nsr.ResolveName(n.Class, "")
case *stmt.Catch:
for _, t := range n.Types {
nsr.ResolveName(t, "")
}
case *expr.FunctionCall:
nsr.ResolveName(n.Function, "function")
case *expr.ConstFetch:
nsr.ResolveName(n.Constant, "const")
case *stmt.TraitUse:
for _, t := range n.Traits {
nsr.ResolveName(t, "")
}
if adaptationList, ok := n.TraitAdaptationList.(*stmt.TraitAdaptationList); ok {
for _, a := range adaptationList.Adaptations {
switch aa := a.(type) {
case *stmt.TraitUsePrecedence:
refTrait := aa.Ref.(*stmt.TraitMethodRef).Trait
if refTrait != nil {
nsr.ResolveName(refTrait, "")
}
for _, insteadOf := range aa.Insteadof {
nsr.ResolveName(insteadOf, "")
}
case *stmt.TraitUseAlias:
refTrait := aa.Ref.(*stmt.TraitMethodRef).Trait
if refTrait != nil {
nsr.ResolveName(refTrait, "")
}
}
}
}
}
return true
}
// LeaveNode is invoked after node process
func (nsr *NamespaceResolver) LeaveNode(w walker.Walkable) {
switch n := w.(type) {
case *stmt.Namespace:
if n.Stmts != nil {
nsr.Namespace = NewNamespace("")
}
}
}
func (nsr *NamespaceResolver) EnterChildNode(key string, w walker.Walkable) {
// do nothing
}
func (nsr *NamespaceResolver) LeaveChildNode(key string, w walker.Walkable) {
// do nothing
}
func (nsr *NamespaceResolver) EnterChildList(key string, w walker.Walkable) {
// do nothing
}
func (nsr *NamespaceResolver) LeaveChildList(key string, w walker.Walkable) {
// do nothing
}
// AddAlias adds a new alias
func (nsr *NamespaceResolver) AddAlias(useType string, nn node.Node, prefix []node.Node) {
switch use := nn.(type) {
case *stmt.Use:
if use.UseType != nil {
useType = use.UseType.(*node.Identifier).Value
}
useNameParts := use.Use.(*name.Name).Parts
var alias string
if use.Alias == nil {
alias = useNameParts[len(useNameParts)-1].(*name.NamePart).Value
} else {
alias = use.Alias.(*node.Identifier).Value
}
nsr.Namespace.AddAlias(useType, concatNameParts(prefix, useNameParts), alias)
}
}
// AddNamespacedName adds namespaced name by node
func (nsr *NamespaceResolver) AddNamespacedName(nn node.Node, nodeName string) {
if nsr.Namespace.Namespace == "" {
nsr.ResolvedNames[nn] = nodeName
} else {
nsr.ResolvedNames[nn] = nsr.Namespace.Namespace + "\\" + nodeName
}
}
// ResolveName adds a resolved fully qualified name by node
func (nsr *NamespaceResolver) ResolveName(nameNode node.Node, aliasType string) {
resolved, err := nsr.Namespace.ResolveName(nameNode, aliasType)
if err == nil {
nsr.ResolvedNames[nameNode] = resolved
}
}
// ResolveType adds a resolved fully qualified type name
func (nsr *NamespaceResolver) ResolveType(n node.Node) {
switch nn := n.(type) {
case *node.Nullable:
nsr.ResolveType(nn.Expr)
case name.Names:
nsr.ResolveName(n, "")
}
}
// Namespace context
type Namespace struct {
Namespace string
Aliases map[string]map[string]string
}
// NewNamespace constructor
func NewNamespace(NSName string) *Namespace {
return &Namespace{
Namespace: NSName,
Aliases: map[string]map[string]string{
"": {},
"const": {},
"function": {},
},
}
}
// AddAlias adds a new alias
func (ns *Namespace) AddAlias(aliasType string, aliasName string, alias string) {
aliasType = strings.ToLower(aliasType)
if aliasType == "const" {
ns.Aliases[aliasType][alias] = aliasName
} else {
ns.Aliases[aliasType][strings.ToLower(alias)] = aliasName
}
}
// ResolveName returns a resolved fully qualified name
func (ns *Namespace) ResolveName(nameNode node.Node, aliasType string) (string, error) {
switch n := nameNode.(type) {
case *name.FullyQualified:
// Fully qualifid name is already resolved
return concatNameParts(n.Parts), nil
case *name.Relative:
if ns.Namespace == "" {
return concatNameParts(n.Parts), nil
}
return ns.Namespace + "\\" + concatNameParts(n.Parts), nil
case *name.Name:
if aliasType == "const" && len(n.Parts) == 1 {
part := strings.ToLower(n.Parts[0].(*name.NamePart).Value)
if part == "true" || part == "false" || part == "null" {
return part, nil
}
}
if aliasType == "" && len(n.Parts) == 1 {
part := strings.ToLower(n.Parts[0].(*name.NamePart).Value)
switch part {
case "self":
fallthrough
case "static":
fallthrough
case "parent":
fallthrough
case "int":
fallthrough
case "float":
fallthrough
case "bool":
fallthrough
case "string":
fallthrough
case "void":
fallthrough
case "iterable":
fallthrough
case "object":
return part, nil
}
}
aliasName, err := ns.ResolveAlias(nameNode, aliasType)
if err != nil {
// resolve as relative name if alias not found
if ns.Namespace == "" {
return concatNameParts(n.Parts), nil
}
return ns.Namespace + "\\" + concatNameParts(n.Parts), nil
}
if len(n.Parts) > 1 {
// if name qualified, replace first part by alias
return aliasName + "\\" + concatNameParts(n.Parts[1:]), nil
}
return aliasName, nil
}
return "", errors.New("must be instance of name.Names")
}
// ResolveAlias returns alias or error if not found
func (ns *Namespace) ResolveAlias(nameNode node.Node, aliasType string) (string, error) {
aliasType = strings.ToLower(aliasType)
nameParts := nameNode.(*name.Name).Parts
firstPartStr := nameParts[0].(*name.NamePart).Value
if len(nameParts) > 1 { // resolve aliases for qualified names, always against class alias type
firstPartStr = strings.ToLower(firstPartStr)
aliasType = ""
} else {
if aliasType != "const" { // constants are case-sensitive
firstPartStr = strings.ToLower(firstPartStr)
}
}
aliasName, ok := ns.Aliases[aliasType][firstPartStr]
if !ok {
return "", errors.New("Not found")
}
return aliasName, nil
}
func concatNameParts(parts ...[]node.Node) string {
str := ""
for _, p := range parts {
for _, n := range p {
if str == "" {
str = n.(*name.NamePart).Value
} else {
str = str + "\\" + n.(*name.NamePart).Value
}
}
}
return str
}

View File

@ -1,976 +0,0 @@
package visitor_test
import (
"testing"
"gotest.tools/assert"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/node/expr"
"github.com/z7zmey/php-parser/node/name"
"github.com/z7zmey/php-parser/node/scalar"
"github.com/z7zmey/php-parser/node/stmt"
"github.com/z7zmey/php-parser/visitor"
)
func TestResolveStaticCall(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&expr.StaticCall{
Class: nameBC,
Call: &node.Identifier{Value: "foo"},
ArgumentList: &node.ArgumentList{},
},
},
}
expected := map[node.Node]string{
nameBC: "A\\B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveStaticPropertyFetch(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&expr.StaticPropertyFetch{
Class: nameBC,
Property: &node.Identifier{Value: "foo"},
},
},
}
expected := map[node.Node]string{
nameBC: "A\\B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveClassConstFetch(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&expr.ClassConstFetch{
Class: nameBC,
ConstantName: &node.Identifier{Value: "FOO"},
},
},
}
expected := map[node.Node]string{
nameBC: "A\\B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveNew(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&expr.New{
Class: nameBC,
ArgumentList: &node.ArgumentList{},
},
},
}
expected := map[node.Node]string{
nameBC: "A\\B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveInstanceOf(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&expr.InstanceOf{
Expr: &expr.Variable{VarName: &node.Identifier{Value: "foo"}},
Class: nameBC,
},
},
}
expected := map[node.Node]string{
nameBC: "A\\B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveInstanceCatch(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
nameDE := &name.Name{Parts: []node.Node{&name.NamePart{Value: "D"}, &name.NamePart{Value: "E"}}}
nameF := &name.Name{Parts: []node.Node{&name.NamePart{Value: "F"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
&stmt.Use{
Use: nameDE,
Alias: &node.Identifier{Value: "F"},
},
},
},
&stmt.Try{
Stmts: []node.Node{},
Catches: []node.Node{
&stmt.Catch{
Types: []node.Node{
nameBC,
nameF,
},
Variable: &expr.Variable{VarName: &node.Identifier{Value: "foo"}},
Stmts: []node.Node{},
},
},
},
},
}
expected := map[node.Node]string{
nameBC: "A\\B\\C",
nameF: "D\\E",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveFunctionCall(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
UseType: &node.Identifier{Value: "function"},
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&expr.FunctionCall{
Function: nameB,
ArgumentList: &node.ArgumentList{},
},
},
}
expected := map[node.Node]string{
nameB: "A\\B",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveConstFetch(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
UseType: &node.Identifier{Value: "const"},
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&expr.ConstFetch{
Constant: nameB,
},
},
}
expected := map[node.Node]string{
nameB: "A\\B",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveGroupUse(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBD := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "D"}}}
nameE := &name.Name{Parts: []node.Node{&name.NamePart{Value: "E"}}}
nameC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "C"}}}
nameF := &name.Name{Parts: []node.Node{&name.NamePart{Value: "F"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.GroupUse{
Prefix: nameAB,
UseList: []node.Node{
&stmt.Use{
UseType: &node.Identifier{Value: "Function"},
Use: nameF,
},
&stmt.Use{
UseType: &node.Identifier{Value: "const"},
Use: nameC,
},
},
},
&stmt.GroupUse{
Prefix: nameBD,
UseType: &node.Identifier{Value: "Function"},
UseList: []node.Node{
&stmt.Use{
Use: nameE,
},
},
},
&expr.ConstFetch{
Constant: nameC,
},
&expr.FunctionCall{
Function: nameF,
ArgumentList: &node.ArgumentList{},
},
&expr.FunctionCall{
Function: nameE,
ArgumentList: &node.ArgumentList{},
},
},
}
expected := map[node.Node]string{
nameC: "A\\B\\C",
nameF: "A\\B\\F",
nameE: "B\\D\\E",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveTraitUse(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}}}
nameD := &name.Name{Parts: []node.Node{&name.NamePart{Value: "D"}}}
fullyQualifiedNameB := &name.FullyQualified{Parts: []node.Node{&name.NamePart{Value: "B"}}}
fullyQualifiedNameBC := &name.FullyQualified{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
relativeNameB := &name.Relative{Parts: []node.Node{&name.NamePart{Value: "B"}}}
relativeNameBC := &name.Relative{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAB,
},
},
},
&stmt.TraitUse{
Traits: []node.Node{
nameB,
relativeNameB,
},
TraitAdaptationList: &stmt.TraitAdaptationList{
Adaptations: []node.Node{
&stmt.TraitUsePrecedence{
Ref: &stmt.TraitMethodRef{
Trait: fullyQualifiedNameB,
Method: &node.Identifier{Value: "foo"},
},
Insteadof: []node.Node{fullyQualifiedNameBC},
},
&stmt.TraitUseAlias{
Ref: &stmt.TraitMethodRef{
Trait: relativeNameBC,
Method: &node.Identifier{Value: "foo"},
},
Alias: &node.Identifier{Value: "bar"},
},
},
},
},
&stmt.TraitUse{
Traits: []node.Node{
nameD,
},
},
},
}
expected := map[node.Node]string{
nameB: "A\\B",
nameD: "D",
relativeNameB: "B",
fullyQualifiedNameB: "B",
fullyQualifiedNameBC: "B\\C",
relativeNameBC: "B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveClassName(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
class := &stmt.Class{
PhpDocComment: "",
ClassName: &node.Identifier{Value: "A"},
Extends: &stmt.ClassExtends{
ClassName: nameAB,
},
Implements: &stmt.ClassImplements{
InterfaceNames: []node.Node{
nameBC,
},
},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
class,
},
}
expected := map[node.Node]string{
class: "A",
nameAB: "A\\B",
nameBC: "B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveInterfaceName(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
interfaceNode := &stmt.Interface{
PhpDocComment: "",
InterfaceName: &node.Identifier{Value: "A"},
Extends: &stmt.InterfaceExtends{
InterfaceNames: []node.Node{
nameAB,
nameBC,
},
},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
interfaceNode,
},
}
expected := map[node.Node]string{
interfaceNode: "A",
nameAB: "A\\B",
nameBC: "B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveTraitName(t *testing.T) {
traitNode := &stmt.Trait{
PhpDocComment: "",
TraitName: &node.Identifier{Value: "A"},
Stmts: []node.Node{},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
traitNode,
},
}
expected := map[node.Node]string{
traitNode: "A",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveFunctionName(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
functionNode := &stmt.Function{
ReturnsRef: false,
PhpDocComment: "",
FunctionName: &node.Identifier{Value: "A"},
Params: []node.Node{
&node.Parameter{
ByRef: false,
Variadic: false,
VariableType: nameAB,
Variable: &expr.Variable{VarName: &node.Identifier{Value: "foo"}},
},
},
ReturnType: &node.Nullable{Expr: nameBC},
Stmts: []node.Node{},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
functionNode,
},
}
expected := map[node.Node]string{
functionNode: "A",
nameAB: "A\\B",
nameBC: "B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveMethodName(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
methodNode := &stmt.ClassMethod{
ReturnsRef: false,
PhpDocComment: "",
MethodName: &node.Identifier{Value: "A"},
Params: []node.Node{
&node.Parameter{
ByRef: false,
Variadic: false,
VariableType: nameAB,
Variable: &expr.Variable{VarName: &node.Identifier{Value: "foo"}},
},
},
ReturnType: &node.Nullable{Expr: nameBC},
Stmt: &stmt.StmtList{
Stmts: []node.Node{},
},
}
expected := map[node.Node]string{
nameAB: "A\\B",
nameBC: "B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
methodNode.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveClosureName(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
nameBC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "B"}, &name.NamePart{Value: "C"}}}
closureNode := &expr.Closure{
ReturnsRef: false,
Static: false,
PhpDocComment: "",
Params: []node.Node{
&node.Parameter{
ByRef: false,
Variadic: false,
VariableType: nameAB,
Variable: &expr.Variable{VarName: &node.Identifier{Value: "foo"}},
},
},
ReturnType: &node.Nullable{Expr: nameBC},
Stmts: []node.Node{},
}
expected := map[node.Node]string{
nameAB: "A\\B",
nameBC: "B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
closureNode.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveConstantsName(t *testing.T) {
nameAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
constantB := &stmt.Constant{
PhpDocComment: "",
ConstantName: &node.Identifier{Value: "B"},
Expr: &scalar.Lnumber{Value: "1"},
}
constantC := &stmt.Constant{
PhpDocComment: "",
ConstantName: &node.Identifier{Value: "C"},
Expr: &scalar.Lnumber{Value: "1"},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.Namespace{
NamespaceName: nameAB,
},
&stmt.ConstList{
Consts: []node.Node{
constantB,
constantC,
},
},
},
}
expected := map[node.Node]string{
constantB: "A\\B\\B",
constantC: "A\\B\\C",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveNamespaces(t *testing.T) {
namespaceAB := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "B"}}}
namespaceCD := &name.Name{Parts: []node.Node{&name.NamePart{Value: "C"}, &name.NamePart{Value: "D"}}}
nameAC := &name.Name{Parts: []node.Node{&name.NamePart{Value: "A"}, &name.NamePart{Value: "C"}}}
nameCF := &name.Name{Parts: []node.Node{&name.NamePart{Value: "C"}, &name.NamePart{Value: "F"}}}
nameFG := &name.Name{Parts: []node.Node{&name.NamePart{Value: "F"}, &name.NamePart{Value: "G"}}}
relativeNameCE := &name.Relative{Parts: []node.Node{&name.NamePart{Value: "C"}, &name.NamePart{Value: "E"}}}
constantB := &stmt.Constant{
PhpDocComment: "",
ConstantName: &node.Identifier{Value: "B"},
Expr: &scalar.Lnumber{Value: "1"},
}
constantC := &stmt.Constant{
PhpDocComment: "",
ConstantName: &node.Identifier{Value: "C"},
Expr: &scalar.Lnumber{Value: "1"},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.Namespace{
NamespaceName: namespaceAB,
},
&stmt.ConstList{
Consts: []node.Node{
constantB,
constantC,
},
},
&expr.StaticCall{
Class: nameFG,
Call: &node.Identifier{Value: "foo"},
ArgumentList: &node.ArgumentList{},
},
&stmt.Namespace{
Stmts: []node.Node{},
},
&stmt.Namespace{
NamespaceName: namespaceCD,
Stmts: []node.Node{
&stmt.UseList{
Uses: []node.Node{
&stmt.Use{
Use: nameAC,
},
},
},
&expr.StaticCall{
Class: relativeNameCE,
Call: &node.Identifier{Value: "foo"},
ArgumentList: &node.ArgumentList{},
},
&expr.StaticCall{
Class: nameCF,
Call: &node.Identifier{Value: "foo"},
ArgumentList: &node.ArgumentList{},
},
},
},
},
}
expected := map[node.Node]string{
constantB: "A\\B\\B",
constantC: "A\\B\\C",
nameFG: "A\\B\\F\\G",
relativeNameCE: "C\\D\\C\\E",
nameCF: "A\\C\\F",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestResolveStaticCallDinamicClassName(t *testing.T) {
ast := &stmt.StmtList{
Stmts: []node.Node{
&expr.StaticCall{
Class: &expr.Variable{VarName: &node.Identifier{Value: "foo"}},
Call: &node.Identifier{Value: "foo"},
ArgumentList: &node.ArgumentList{},
},
},
}
expected := map[node.Node]string{}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestDoNotResolveReservedConstants(t *testing.T) {
namespaceName := &name.Name{Parts: []node.Node{&name.NamePart{Value: "Foo"}}}
constantTrue := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "True"},
},
}
constantFalse := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "False"},
},
}
constantNull := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "NULL"},
},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.Namespace{
NamespaceName: namespaceName,
},
&stmt.Expression{
Expr: &expr.ConstFetch{
Constant: constantTrue,
},
},
&stmt.Expression{
Expr: &expr.ConstFetch{
Constant: constantFalse,
},
},
&stmt.Expression{
Expr: &expr.ConstFetch{
Constant: constantNull,
},
},
},
}
expected := map[node.Node]string{
constantTrue: "true",
constantFalse: "false",
constantNull: "null",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestDoNotResolveReservedNames(t *testing.T) {
nameInt := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "int"},
},
}
nameFloat := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "float"},
},
}
nameBool := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "bool"},
},
}
nameString := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "string"},
},
}
nameVoid := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "void"},
},
}
nameIterable := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "iterable"},
},
}
nameObject := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "object"},
},
}
function := &stmt.Function{
FunctionName: &node.Identifier{Value: "bar"},
Params: []node.Node{
&node.Parameter{
VariableType: nameInt,
Variable: &expr.Variable{
VarName: &node.Identifier{Value: "Int"},
},
},
&node.Parameter{
VariableType: nameFloat,
Variable: &expr.Variable{
VarName: &node.Identifier{Value: "Float"},
},
},
&node.Parameter{
VariableType: nameBool,
Variable: &expr.Variable{
VarName: &node.Identifier{Value: "Bool"},
},
},
&node.Parameter{
VariableType: nameString,
Variable: &expr.Variable{
VarName: &node.Identifier{Value: "String"},
},
},
&node.Parameter{
VariableType: nameVoid,
Variable: &expr.Variable{
VarName: &node.Identifier{Value: "Void"},
},
},
&node.Parameter{
VariableType: nameIterable,
Variable: &expr.Variable{
VarName: &node.Identifier{Value: "Iterable"},
},
},
&node.Parameter{
VariableType: nameObject,
Variable: &expr.Variable{
VarName: &node.Identifier{Value: "Object"},
},
},
},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.Namespace{
NamespaceName: &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "Foo"},
},
},
},
function,
},
}
expected := map[node.Node]string{
function: "Foo\\bar",
nameInt: "int",
nameFloat: "float",
nameBool: "bool",
nameString: "string",
nameVoid: "void",
nameIterable: "iterable",
nameObject: "object",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}
func TestDoNotResolveReservedSpecialNames(t *testing.T) {
nameSelf := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "Self"},
},
}
nameStatic := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "Static"},
},
}
nameParent := &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "Parent"},
},
}
cls := &stmt.Class{
ClassName: &node.Identifier{Value: "Bar"},
Stmts: []node.Node{
&stmt.Expression{
Expr: &expr.StaticCall{
Class: nameSelf,
Call: &node.Identifier{Value: "func"},
ArgumentList: &node.ArgumentList{},
},
},
&stmt.Expression{
Expr: &expr.StaticCall{
Class: nameStatic,
Call: &node.Identifier{Value: "func"},
ArgumentList: &node.ArgumentList{},
},
},
&stmt.Expression{
Expr: &expr.StaticCall{
Class: nameParent,
Call: &node.Identifier{Value: "func"},
ArgumentList: &node.ArgumentList{},
},
},
},
}
ast := &stmt.StmtList{
Stmts: []node.Node{
&stmt.Namespace{
NamespaceName: &name.Name{
Parts: []node.Node{
&name.NamePart{Value: "Foo"},
},
},
},
cls,
},
}
expected := map[node.Node]string{
cls: "Foo\\Bar",
nameSelf: "self",
nameStatic: "static",
nameParent: "parent",
}
nsResolver := visitor.NewNamespaceResolver()
ast.Walk(nsResolver)
assert.DeepEqual(t, expected, nsResolver.ResolvedNames)
}

View File

@ -1,187 +0,0 @@
// Package visitor contains walker.visitor implementations
package visitor
import (
"fmt"
"io"
"reflect"
"github.com/z7zmey/php-parser/freefloating"
"github.com/z7zmey/php-parser/node"
"github.com/z7zmey/php-parser/walker"
)
type PrettyJsonDumper struct {
Writer io.Writer
NsResolver *NamespaceResolver
depth int
isChildNode bool
isNotFirstNode bool
}
func NewPrettyJsonDumper(Writer io.Writer, NsResolver *NamespaceResolver) *PrettyJsonDumper {
return &PrettyJsonDumper{
Writer: Writer,
NsResolver: NsResolver,
depth: 0,
isChildNode: false,
isNotFirstNode: false,
}
}
func (d *PrettyJsonDumper) printIndent(w io.Writer) {
for i := 0; i < d.depth; i++ {
fmt.Fprint(d.Writer, " ")
}
}
// EnterNode is invoked at every node in hierarchy
func (d *PrettyJsonDumper) EnterNode(w walker.Walkable) bool {
n := w.(node.Node)
nodeType := reflect.TypeOf(n).String()
if d.isChildNode {
d.isChildNode = false
} else if d.isNotFirstNode {
fmt.Fprint(d.Writer, ",\n")
d.printIndent(d.Writer)
} else {
d.printIndent(d.Writer)
d.isNotFirstNode = true
}
fmt.Fprint(d.Writer, "{\n")
d.depth++
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: %q", "type", nodeType)
if p := n.GetPosition(); p != nil {
fmt.Fprint(d.Writer, ",\n")
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: {\n", "position")
d.depth++
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: %d,\n", "startPos", p.StartPos)
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: %d,\n", "endPos", p.EndPos)
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: %d,\n", "startLine", p.StartLine)
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: %d\n", "endLine", p.EndLine)
d.depth--
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "}")
}
if d.NsResolver != nil {
if namespacedName, ok := d.NsResolver.ResolvedNames[n]; ok {
fmt.Fprint(d.Writer, ",\n")
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "\"namespacedName\": %q", namespacedName)
}
}
if !n.GetFreeFloating().IsEmpty() {
fmt.Fprint(d.Writer, ",\n")
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "\"freefloating\": {\n")
d.depth++
i := 0
for key, freeFloatingStrings := range *n.GetFreeFloating() {
if i != 0 {
fmt.Fprint(d.Writer, ",\n")
}
i++
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: [\n", key)
d.depth++
j := 0
for _, freeFloatingString := range freeFloatingStrings {
if j != 0 {
fmt.Fprint(d.Writer, ",\n")
}
j++
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "{\n")
d.depth++
d.printIndent(d.Writer)
switch freeFloatingString.StringType {
case freefloating.CommentType:
fmt.Fprintf(d.Writer, "%q: %q,\n", "type", "freefloating.CommentType")
case freefloating.WhiteSpaceType:
fmt.Fprintf(d.Writer, "%q: %q,\n", "type", "freefloating.WhiteSpaceType")
case freefloating.TokenType:
fmt.Fprintf(d.Writer, "%q: %q,\n", "type", "freefloating.TokenType")
}
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: %q\n", "value", freeFloatingString.Value)
d.depth--
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "}")
}
d.depth--
fmt.Fprint(d.Writer, "\n")
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "]")
}
d.depth--
fmt.Fprint(d.Writer, "\n")
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "}")
}
if a := n.Attributes(); len(a) > 0 {
for key, attr := range a {
fmt.Fprint(d.Writer, ",\n")
d.printIndent(d.Writer)
switch attr.(type) {
case string:
fmt.Fprintf(d.Writer, "\"%s\": %q", key, attr)
default:
fmt.Fprintf(d.Writer, "\"%s\": %v", key, attr)
}
}
}
return true
}
// LeaveNode is invoked after node process
func (d *PrettyJsonDumper) LeaveNode(n walker.Walkable) {
d.depth--
fmt.Fprint(d.Writer, "\n")
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "}")
}
func (d *PrettyJsonDumper) EnterChildNode(key string, w walker.Walkable) {
fmt.Fprint(d.Writer, ",\n")
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: ", key)
d.isChildNode = true
}
func (d *PrettyJsonDumper) LeaveChildNode(key string, w walker.Walkable) {
// do nothing
}
func (d *PrettyJsonDumper) EnterChildList(key string, w walker.Walkable) {
fmt.Fprint(d.Writer, ",\n")
d.printIndent(d.Writer)
fmt.Fprintf(d.Writer, "%q: [\n", key)
d.depth++
d.isNotFirstNode = false
}
func (d *PrettyJsonDumper) LeaveChildList(key string, w walker.Walkable) {
d.depth--
fmt.Fprint(d.Writer, "\n")
d.printIndent(d.Writer)
fmt.Fprint(d.Writer, "]")
}

View File

@ -1,509 +0,0 @@
package visitor_test
import (
"os"
"github.com/z7zmey/php-parser/php7"
"github.com/z7zmey/php-parser/visitor"
)
func ExamplePrettyJsonDumper() {
src := `<?php
namespace Foo {
class Bar {
public function FunctionName(Type $var = null)
{
// some comment
// second comment
$var;
}
}
function foo() {
;
}
}
`
php7parser := php7.NewParser([]byte(src), "7.4")
php7parser.WithFreeFloating()
php7parser.Parse()
nodes := php7parser.GetRootNode()
nsResolver := visitor.NewNamespaceResolver()
nodes.Walk(nsResolver)
dumper := visitor.NewPrettyJsonDumper(os.Stdout, nsResolver)
nodes.Walk(dumper)
// Unordered output:
// {
// "type": "*node.Root",
// "position": {
// "startPos": 9,
// "endPos": 198,
// "startLine": 3,
// "endLine": 16
// },
// "freefloating": {
// "End": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t"
// }
// ]
// },
// "Stmts": [
// {
// "type": "*stmt.Namespace",
// "position": {
// "startPos": 9,
// "endPos": 198,
// "startLine": 3,
// "endLine": 16
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.TokenType",
// "value": "<?php"
// },
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\n\t\t"
// }
// ],
// "Stmts": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t"
// }
// ]
// },
// "NamespaceName": {
// "type": "*name.Name",
// "position": {
// "startPos": 19,
// "endPos": 22,
// "startLine": 3,
// "endLine": 3
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ],
// "End": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ]
// },
// "Parts": [
// {
// "type": "*name.NamePart",
// "position": {
// "startPos": 19,
// "endPos": 22,
// "startLine": 3,
// "endLine": 3
// },
// "Value": "Foo"
// }
// ]
// },
// "Stmts": [
// {
// "type": "*stmt.Class",
// "position": {
// "startPos": 28,
// "endPos": 162,
// "startLine": 4,
// "endLine": 11
// },
// "namespacedName": "Foo\\Bar",
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t"
// }
// ],
// "Name": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ],
// "Stmts": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t"
// }
// ]
// },
// "PhpDocComment": "",
// "ClassName": {
// "type": "*node.Identifier",
// "position": {
// "startPos": 34,
// "endPos": 37,
// "startLine": 4,
// "endLine": 4
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ]
// },
// "Value": "Bar"
// },
// "Stmts": [
// {
// "type": "*stmt.ClassMethod",
// "position": {
// "startPos": 44,
// "endPos": 157,
// "startLine": 5,
// "endLine": 10
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t\t"
// }
// ],
// "ModifierList": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ],
// "Function": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ]
// },
// "ReturnsRef": false,
// "PhpDocComment": "",
// "MethodName": {
// "type": "*node.Identifier",
// "position": {
// "startPos": 60,
// "endPos": 72,
// "startLine": 5,
// "endLine": 5
// },
// "Value": "FunctionName"
// },
// "Modifiers": [
// {
// "type": "*node.Identifier",
// "position": {
// "startPos": 44,
// "endPos": 50,
// "startLine": 5,
// "endLine": 5
// },
// "Value": "public"
// }
// ],
// "Params": [
// {
// "type": "*node.Parameter",
// "position": {
// "startPos": 73,
// "endPos": 89,
// "startLine": 5,
// "endLine": 5
// },
// "freefloating": {
// "OptionalType": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ],
// "Var": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ]
// },
// "ByRef": false,
// "Variadic": false,
// "VariableType": {
// "type": "*name.Name",
// "position": {
// "startPos": 73,
// "endPos": 77,
// "startLine": 5,
// "endLine": 5
// },
// "namespacedName": "Foo\\Type",
// "Parts": [
// {
// "type": "*name.NamePart",
// "position": {
// "startPos": 73,
// "endPos": 77,
// "startLine": 5,
// "endLine": 5
// },
// "Value": "Type"
// }
// ]
// },
// "Variable": {
// "type": "*expr.Variable",
// "position": {
// "startPos": 78,
// "endPos": 82,
// "startLine": 5,
// "endLine": 5
// },
// "freefloating": {
// "Dollar": [
// {
// "type": "freefloating.TokenType",
// "value": "$"
// }
// ]
// },
// "VarName": {
// "type": "*node.Identifier",
// "position": {
// "startPos": 78,
// "endPos": 82,
// "startLine": 5,
// "endLine": 5
// },
// "Value": "var"
// }
// },
// "DefaultValue": {
// "type": "*expr.ConstFetch",
// "position": {
// "startPos": 85,
// "endPos": 89,
// "startLine": 5,
// "endLine": 5
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ]
// },
// "Constant": {
// "type": "*name.Name",
// "position": {
// "startPos": 85,
// "endPos": 89,
// "startLine": 5,
// "endLine": 5
// },
// "namespacedName": "null",
// "Parts": [
// {
// "type": "*name.NamePart",
// "position": {
// "startPos": 85,
// "endPos": 89,
// "startLine": 5,
// "endLine": 5
// },
// "Value": "null"
// }
// ]
// }
// }
// }
// ],
// "Stmt": {
// "type": "*stmt.StmtList",
// "position": {
// "startPos": 95,
// "endPos": 157,
// "startLine": 6,
// "endLine": 10
// },
// "freefloating": {
// "Stmts": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t\t"
// }
// ],
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t\t"
// }
// ]
// },
// "Stmts": [
// {
// "type": "*stmt.Expression",
// "position": {
// "startPos": 146,
// "endPos": 151,
// "startLine": 9,
// "endLine": 9
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t\t\t"
// },
// {
// "type": "freefloating.CommentType",
// "value": "// some comment\n"
// },
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\t\t\t\t\t"
// },
// {
// "type": "freefloating.CommentType",
// "value": "// second comment\n"
// },
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\t\t\t\t\t"
// }
// ],
// "SemiColon": [
// {
// "type": "freefloating.TokenType",
// "value": ";"
// }
// ]
// },
// "Expr": {
// "type": "*expr.Variable",
// "position": {
// "startPos": 146,
// "endPos": 150,
// "startLine": 9,
// "endLine": 9
// },
// "freefloating": {
// "Dollar": [
// {
// "type": "freefloating.TokenType",
// "value": "$"
// }
// ]
// },
// "VarName": {
// "type": "*node.Identifier",
// "position": {
// "startPos": 146,
// "endPos": 150,
// "startLine": 9,
// "endLine": 9
// },
// "Value": "var"
// }
// }
// }
// ]
// }
// }
// ]
// },
// {
// "type": "*stmt.Function",
// "position": {
// "startPos": 167,
// "endPos": 194,
// "startLine": 13,
// "endLine": 15
// },
// "namespacedName": "Foo\\foo",
// "freefloating": {
// "Params": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ],
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\n\t\t\t"
// }
// ],
// "Stmts": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t"
// }
// ]
// },
// "ReturnsRef": false,
// "PhpDocComment": "",
// "FunctionName": {
// "type": "*node.Identifier",
// "position": {
// "startPos": 176,
// "endPos": 179,
// "startLine": 13,
// "endLine": 13
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": " "
// }
// ]
// },
// "Value": "foo"
// },
// "Stmts": [
// {
// "type": "*stmt.Nop",
// "position": {
// "startPos": 188,
// "endPos": 189,
// "startLine": 14,
// "endLine": 14
// },
// "freefloating": {
// "Start": [
// {
// "type": "freefloating.WhiteSpaceType",
// "value": "\n\t\t\t\t"
// }
// ],
// "SemiColon": [
// {
// "type": "freefloating.TokenType",
// "value": ";"
// }
// ]
// }
// }
// ]
// }
// ]
// }
// ]
// }
}

View File

@ -1,21 +0,0 @@
// Package walker declares walking behavior
package walker
// Walkable interface
//
// Every node must implement this interface
type Walkable interface {
Walk(v Visitor)
}
// Visitor interface
type Visitor interface {
EnterNode(w Walkable) bool
LeaveNode(w Walkable)
EnterChildNode(key string, w Walkable)
LeaveChildNode(key string, w Walkable)
EnterChildList(key string, w Walkable)
LeaveChildList(key string, w Walkable)
}