[refactoring] update dumper

This commit is contained in:
Vadym Slizov 2020-12-11 13:28:16 +02:00
parent 60433615a9
commit 632146f98e
No known key found for this signature in database
GPG Key ID: AEA2A9388EF42A4A
6 changed files with 1937 additions and 536 deletions

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"bytes"
"flag" "flag"
"io" "io"
"io/ioutil" "io/ioutil"
@ -163,12 +164,12 @@ func printerWorker(r <-chan result) {
} }
if *printBack { if *printBack {
//o := bytes.NewBuffer([]byte{}) o := bytes.NewBuffer([]byte{})
//p := printer.NewPrinter(o) p := visitor.NewPrinter(o)
//p.Print(res.rootNode) res.rootNode.Accept(p)
//
//err := ioutil.WriteFile(res.path, o.Bytes(), 0644) err := ioutil.WriteFile(res.path, o.Bytes(), 0644)
//checkErr(err) checkErr(err)
} }
if *showResolvedNs { if *showResolvedNs {
@ -181,9 +182,7 @@ func printerWorker(r <-chan result) {
} }
if *dump == true { if *dump == true {
v := visitor.NewDump(os.Stdout) visitor.NewDump(os.Stdout).WithPositions().WithTokens().Dump(res.rootNode)
t := traverser.NewDFS(v)
t.Traverse(res.rootNode)
} }
wg.Done() wg.Done()

Binary file not shown.

View File

@ -255,7 +255,7 @@ func (lex *Lexer) Lex() *token.Token {
'use'i => {lex.setTokenPosition(tkn); tok = token.T_USE; fbreak;}; 'use'i => {lex.setTokenPosition(tkn); tok = token.T_USE; fbreak;};
'var'i => {lex.setTokenPosition(tkn); tok = token.T_VAR; fbreak;}; 'var'i => {lex.setTokenPosition(tkn); tok = token.T_VAR; fbreak;};
'while'i => {lex.setTokenPosition(tkn); tok = token.T_WHILE; fbreak;}; 'while'i => {lex.setTokenPosition(tkn); tok = token.T_WHILE; fbreak;};
'yield'i whitespace_line* 'from'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD_FROM; fbreak;}; 'yield'i whitespace_line+ 'from'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD_FROM; fbreak;};
'yield'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD; fbreak;}; 'yield'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD; fbreak;};
'include'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE; fbreak;}; 'include'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE; fbreak;};
'include_once'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE_ONCE; fbreak;}; 'include_once'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE_ONCE; fbreak;};

File diff suppressed because it is too large Load Diff

View File

@ -1,62 +1,76 @@
package visitor_test package visitor_test
import ( import (
"os" "bytes"
"github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/pkg/token"
"testing"
"github.com/z7zmey/php-parser/pkg/ast" "github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/ast/traverser"
"github.com/z7zmey/php-parser/pkg/ast/visitor" "github.com/z7zmey/php-parser/pkg/ast/visitor"
"github.com/z7zmey/php-parser/pkg/token"
) )
func ExampleDump() { func TestDumper_root(t *testing.T) {
stxTree := &ast.Root{ o := bytes.NewBufferString("")
Stmts: []ast.Vertex{
&ast.Identifier{}, p := visitor.NewDump(o)
&ast.Parameter{ n := &ast.Root{
Var: &ast.ExprVariable{}, Position: &position.Position{
StartLine: 1,
EndLine: 2,
StartPos: 3,
EndPos: 4,
}, },
&ast.StmtInlineHtml{ Stmts: []ast.Vertex{
Value: []byte("foo"), &ast.StmtNop{},
},
EndTkn: &token.Token{
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 2,
StartPos: 3,
EndPos: 4,
},
},
},
},
}
n.Accept(p)
expected := `&ast.Root{
Position: &position.Position{
StartLine: 1,
EndLine: 2,
StartPos: 3,
EndPos: 4,
},
Stmts: []ast.Vertex{
&ast.StmtNop{
}, },
}, },
EndTkn: &token.Token{ EndTkn: &token.Token{
FreeFloating: []*token.Token{
{
ID: token.T_WHITESPACE, ID: token.T_WHITESPACE,
Value: []byte(" "), Value: []byte(" "),
Position: &position.Position{
StartLine: 1,
EndLine: 2,
StartPos: 3,
EndPos: 4,
}, },
} },
},
},
},
`
actual := o.String()
traverser.NewDFS(visitor.NewDump(os.Stdout)).Traverse(stxTree) if expected != actual {
t.Errorf("\nexpected: %s\ngot: %s\n", expected, actual)
//output: }
//&ast.Root{
// Node: ast.Node{
// Tokens: token.Collection{
// token.Start: []*token.Token{
// {
// ID: token.T_WHITESPACE,
// Value: []byte(" "),
// },
// },
// },
// Position: &position.Position{
// StartLine: 1,
// EndLine: 1,
// StartPos: 0,
// EndPos: 1,
// },
// },
// Stmts: []ast.Vertex{
// &ast.Identifier{
// Value: []byte(""),
// },
// &ast.Parameter{
// Var: &ast.ExprVariable{
// },
// },
// &ast.StmtInlineHtml{
// Value: []byte("foo"),
// },
// },
//}
} }