prepare php5 parser files
This commit is contained in:
parent
1a045403db
commit
e1993b7b5c
7
Makefile
7
Makefile
@ -1,6 +1,6 @@
|
||||
PHPFILE=example.php
|
||||
|
||||
all: ./php7parser/parser.go ./scanner/scanner.go
|
||||
all: ./php5/php5.go ./php7/php7.go ./scanner/scanner.go
|
||||
rm -f y.output
|
||||
gofmt -l -s -w *.go
|
||||
go build
|
||||
@ -14,5 +14,8 @@ test: all
|
||||
./scanner/scanner.go: ./scanner/scanner.l
|
||||
golex -o $@ $<
|
||||
|
||||
./php7parser/parser.go: ./php7parser/parser.y
|
||||
./php5/php5.go: ./php5/php5.y
|
||||
goyacc -o $@ $<
|
||||
|
||||
./php7/php7.go: ./php7/php7.y
|
||||
goyacc -o $@ $<
|
||||
|
4
main.go
4
main.go
@ -8,7 +8,7 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/yookoala/realpath"
|
||||
php7parser "github.com/z7zmey/php-parser/php7parser"
|
||||
"github.com/z7zmey/php-parser/php5"
|
||||
)
|
||||
|
||||
func main() {
|
||||
@ -23,7 +23,7 @@ func main() {
|
||||
fmt.Printf("==> %s\n", path)
|
||||
|
||||
src, _ := os.Open(string(path))
|
||||
nodes, comments, positions := php7parser.Parse(src, path)
|
||||
nodes, comments, positions := php5.Parse(src, path)
|
||||
|
||||
visitor := Dumper{
|
||||
Indent: " | ",
|
||||
|
41
php5/lexer.go
Normal file
41
php5/lexer.go
Normal file
@ -0,0 +1,41 @@
|
||||
package php5
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
goToken "go/token"
|
||||
"io"
|
||||
|
||||
"github.com/cznic/golex/lex"
|
||||
|
||||
"github.com/z7zmey/php-parser/scanner"
|
||||
"github.com/z7zmey/php-parser/token"
|
||||
)
|
||||
|
||||
type lexer struct {
|
||||
scanner.Lexer
|
||||
}
|
||||
|
||||
func (l *lexer) Lex(lval *yySymType) int {
|
||||
return l.Lexer.Lex(lval)
|
||||
}
|
||||
|
||||
func (lval *yySymType) Token(t token.Token) {
|
||||
lval.token = t
|
||||
}
|
||||
|
||||
func newLexer(src io.Reader, fName string) *lexer {
|
||||
file := goToken.NewFileSet().AddFile(fName, -1, 1<<31-1)
|
||||
lx, err := lex.New(file, bufio.NewReader(src), lex.RuneClass(scanner.Rune2Class))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return &lexer{
|
||||
scanner.Lexer{
|
||||
Lexer: lx,
|
||||
StateStack: []int{0},
|
||||
PhpDocComment: "",
|
||||
Comments: nil,
|
||||
},
|
||||
}
|
||||
}
|
52
php5/parser.go
Normal file
52
php5/parser.go
Normal file
@ -0,0 +1,52 @@
|
||||
package php5
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/z7zmey/php-parser/comment"
|
||||
"github.com/z7zmey/php-parser/node"
|
||||
"github.com/z7zmey/php-parser/node/stmt"
|
||||
"github.com/z7zmey/php-parser/position"
|
||||
"github.com/z7zmey/php-parser/token"
|
||||
)
|
||||
|
||||
var rootnode node.Node
|
||||
var comments comment.Comments
|
||||
var positions position.Positions
|
||||
var positionBuilder position.Builder
|
||||
|
||||
func Parse(src io.Reader, fName string) (node.Node, comment.Comments, position.Positions) {
|
||||
yyDebug = 0
|
||||
yyErrorVerbose = true
|
||||
rootnode = stmt.NewStmtList([]node.Node{}) //reset
|
||||
comments = comment.Comments{}
|
||||
positions = position.Positions{}
|
||||
positionBuilder = position.Builder{&positions}
|
||||
yyParse(newLexer(src, fName))
|
||||
return rootnode, comments, positions
|
||||
}
|
||||
|
||||
func ListGetFirstNodeComments(list []node.Node) []comment.Comment {
|
||||
if len(list) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
node := list[0]
|
||||
|
||||
return comments[node]
|
||||
}
|
||||
|
||||
type foreachVariable struct {
|
||||
node node.Node
|
||||
byRef bool
|
||||
}
|
||||
|
||||
type nodesWithEndToken struct {
|
||||
nodes []node.Node
|
||||
endToken token.Token
|
||||
}
|
||||
|
||||
type boolWithToken struct {
|
||||
value bool
|
||||
token *token.Token
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,14 +1,9 @@
|
||||
%{
|
||||
package php7parser
|
||||
package php5
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
"strconv"
|
||||
"bufio"
|
||||
goToken "go/token"
|
||||
|
||||
"github.com/cznic/golex/lex"
|
||||
|
||||
"github.com/z7zmey/php-parser/token"
|
||||
"github.com/z7zmey/php-parser/node"
|
||||
@ -19,73 +14,8 @@ import (
|
||||
"github.com/z7zmey/php-parser/node/expr/assign_op"
|
||||
"github.com/z7zmey/php-parser/node/expr/binary_op"
|
||||
"github.com/z7zmey/php-parser/node/expr/cast"
|
||||
"github.com/z7zmey/php-parser/comment"
|
||||
"github.com/z7zmey/php-parser/position"
|
||||
"github.com/z7zmey/php-parser/scanner"
|
||||
)
|
||||
|
||||
var rootnode node.Node
|
||||
var comments comment.Comments
|
||||
var positions position.Positions
|
||||
var positionBuilder position.Builder
|
||||
|
||||
type lexer struct {
|
||||
scanner.Lexer
|
||||
}
|
||||
|
||||
func (l *lexer) Lex(lval *yySymType) int {
|
||||
return l.Lexer.Lex(lval)
|
||||
}
|
||||
|
||||
func (lval *yySymType) Token(t token.Token) {
|
||||
lval.token = t
|
||||
}
|
||||
|
||||
func newLexer(src io.Reader, fName string) *lexer {
|
||||
file := goToken.NewFileSet().AddFile(fName, -1, 1<<31-1)
|
||||
lx, err := lex.New(file, bufio.NewReader(src), lex.RuneClass(scanner.Rune2Class))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return &lexer{scanner.Lexer{lx, []int{0}, "", nil}}
|
||||
}
|
||||
|
||||
func Parse(src io.Reader, fName string) (node.Node, comment.Comments, position.Positions) {
|
||||
yyDebug = 0
|
||||
yyErrorVerbose = true
|
||||
rootnode = stmt.NewStmtList([]node.Node{}) //reset
|
||||
comments = comment.Comments{}
|
||||
positions = position.Positions{}
|
||||
positionBuilder = position.Builder{&positions}
|
||||
yyParse(newLexer(src, fName))
|
||||
return rootnode, comments, positions
|
||||
}
|
||||
|
||||
func ListGetFirstNodeComments(list []node.Node) []comment.Comment {
|
||||
if len(list) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
node := list[0]
|
||||
|
||||
return comments[node]
|
||||
}
|
||||
|
||||
type foreachVariable struct {
|
||||
node node.Node
|
||||
byRef bool
|
||||
}
|
||||
|
||||
type nodesWithEndToken struct {
|
||||
nodes []node.Node
|
||||
endToken token.Token
|
||||
}
|
||||
|
||||
type boolWithToken struct {
|
||||
value bool
|
||||
token *token.Token
|
||||
}
|
||||
|
||||
%}
|
||||
|
||||
%union{
|
41
php7/lexer.go
Normal file
41
php7/lexer.go
Normal file
@ -0,0 +1,41 @@
|
||||
package php7
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
goToken "go/token"
|
||||
"io"
|
||||
|
||||
"github.com/cznic/golex/lex"
|
||||
|
||||
"github.com/z7zmey/php-parser/scanner"
|
||||
"github.com/z7zmey/php-parser/token"
|
||||
)
|
||||
|
||||
type lexer struct {
|
||||
scanner.Lexer
|
||||
}
|
||||
|
||||
func (l *lexer) Lex(lval *yySymType) int {
|
||||
return l.Lexer.Lex(lval)
|
||||
}
|
||||
|
||||
func (lval *yySymType) Token(t token.Token) {
|
||||
lval.token = t
|
||||
}
|
||||
|
||||
func newLexer(src io.Reader, fName string) *lexer {
|
||||
file := goToken.NewFileSet().AddFile(fName, -1, 1<<31-1)
|
||||
lx, err := lex.New(file, bufio.NewReader(src), lex.RuneClass(scanner.Rune2Class))
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return &lexer{
|
||||
scanner.Lexer{
|
||||
Lexer: lx,
|
||||
StateStack: []int{0},
|
||||
PhpDocComment: "",
|
||||
Comments: nil,
|
||||
},
|
||||
}
|
||||
}
|
52
php7/parser.go
Normal file
52
php7/parser.go
Normal file
@ -0,0 +1,52 @@
|
||||
package php7
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/z7zmey/php-parser/comment"
|
||||
"github.com/z7zmey/php-parser/node"
|
||||
"github.com/z7zmey/php-parser/node/stmt"
|
||||
"github.com/z7zmey/php-parser/position"
|
||||
"github.com/z7zmey/php-parser/token"
|
||||
)
|
||||
|
||||
var rootnode node.Node
|
||||
var comments comment.Comments
|
||||
var positions position.Positions
|
||||
var positionBuilder position.Builder
|
||||
|
||||
func Parse(src io.Reader, fName string) (node.Node, comment.Comments, position.Positions) {
|
||||
yyDebug = 0
|
||||
yyErrorVerbose = true
|
||||
rootnode = stmt.NewStmtList([]node.Node{}) //reset
|
||||
comments = comment.Comments{}
|
||||
positions = position.Positions{}
|
||||
positionBuilder = position.Builder{&positions}
|
||||
yyParse(newLexer(src, fName))
|
||||
return rootnode, comments, positions
|
||||
}
|
||||
|
||||
func ListGetFirstNodeComments(list []node.Node) []comment.Comment {
|
||||
if len(list) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
node := list[0]
|
||||
|
||||
return comments[node]
|
||||
}
|
||||
|
||||
type foreachVariable struct {
|
||||
node node.Node
|
||||
byRef bool
|
||||
}
|
||||
|
||||
type nodesWithEndToken struct {
|
||||
nodes []node.Node
|
||||
endToken token.Token
|
||||
}
|
||||
|
||||
type boolWithToken struct {
|
||||
value bool
|
||||
token *token.Token
|
||||
}
|
5763
php7/php7.go
Normal file
5763
php7/php7.go
Normal file
File diff suppressed because it is too large
Load Diff
2585
php7/php7.y
Normal file
2585
php7/php7.y
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user