[refactoring] remove param withTokens from parser

This commit is contained in:
Vadym Slizov 2020-07-03 00:20:32 +03:00
parent ee3fe3b5c0
commit 06d202e71c
20 changed files with 4207 additions and 4157 deletions

View File

@ -14,14 +14,12 @@ type Parser struct {
Lexer *scanner.Lexer Lexer *scanner.Lexer
currentToken *scanner.Token currentToken *scanner.Token
rootNode ast.Vertex rootNode ast.Vertex
withTokens bool
errHandlerFunc func(*errors.Error) errHandlerFunc func(*errors.Error)
} }
// NewParser creates and returns new Parser // NewParser creates and returns new Parser
func NewParser(lexer *scanner.Lexer, withTokens bool, errHandlerFunc func(*errors.Error)) *Parser { func NewParser(lexer *scanner.Lexer, errHandlerFunc func(*errors.Error)) *Parser {
return &Parser{ return &Parser{
withTokens: withTokens,
Lexer: lexer, Lexer: lexer,
errHandlerFunc: errHandlerFunc, errHandlerFunc: errHandlerFunc,
} }
@ -63,7 +61,7 @@ func lastNode(nn []ast.Vertex) ast.Vertex {
} }
func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) { func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
if p.withTokens == false { if _, ok := src.GetNode().Tokens[token.Start]; !ok {
return return
} }
@ -71,42 +69,64 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
return return
} }
p.setFreeFloating(dst, token.Start, src.GetNode().Tokens[token.Start])
delete(src.GetNode().Tokens, token.Start)
}
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, strings []token.Token) {
if p.withTokens == false {
return
}
if len(strings) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil { if *dstCollection == nil {
*dstCollection = make(token.Collection) *dstCollection = make(token.Collection)
} }
(*dstCollection)[pos] = strings (*dstCollection)[token.Start] = src.GetNode().Tokens[token.Start]
delete(src.GetNode().Tokens, token.Start)
} }
func (p *Parser) GetFreeFloatingToken(t *scanner.Token) []token.Token { func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if p.withTokens == false { if len(tokens) == 0 {
return []token.Token{} return
} }
return []token.Token{ dstCollection := &dst.GetNode().Tokens
{ if *dstCollection == nil {
ID: token.ID(t.ID), *dstCollection = make(token.Collection)
Value: t.Value,
},
} }
l := len(tokens)
for _, v := range tokens[0 : l-1] {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
(*dstCollection)[pos] = make([]token.Token, 0)
for _, v := range tokens {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
l := len(tokens)
(*dstCollection)[pos] = append((*dstCollection)[pos], tokens[l-1])
} }
func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) { func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) {
if p.withTokens == false { if _, ok := prevNode.GetNode().Tokens[token.SemiColon]; !ok {
return return
} }
@ -117,7 +137,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
} }
if semiColon[0].Value[0] == ';' { if semiColon[0].Value[0] == ';' {
p.setFreeFloating(prevNode, token.SemiColon, []token.Token{ p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{
{ {
ID: token.ID(';'), ID: token.ID(';'),
Value: semiColon[0].Value[0:1], Value: semiColon[0].Value[0:1],
@ -126,7 +146,6 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
} }
vlen := len(semiColon[0].Value) vlen := len(semiColon[0].Value)
tlen := 2 tlen := 2
if bytes.HasSuffix(semiColon[0].Value, []byte("?>\n")) { if bytes.HasSuffix(semiColon[0].Value, []byte("?>\n")) {
tlen = 3 tlen = 3
@ -145,7 +164,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
Value: semiColon[0].Value[vlen-tlen:], Value: semiColon[0].Value[vlen-tlen:],
}) })
p.setFreeFloating(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...)) p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
} }
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) { func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -415,7 +415,7 @@ CAD;
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil) php5parser := php5.NewParser(lexer, nil)
php5parser.Parse() php5parser.Parse()
} }
} }

View File

@ -22458,7 +22458,7 @@ func TestPhp5(t *testing.T) {
} }
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil) php5parser := php5.NewParser(lexer, nil)
php5parser.Parse() php5parser.Parse()
actual := php5parser.GetRootNode() actual := php5parser.GetRootNode()
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
@ -22596,7 +22596,7 @@ func TestPhp5Strings(t *testing.T) {
} }
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil) php5parser := php5.NewParser(lexer, nil)
php5parser.Parse() php5parser.Parse()
actual := php5parser.GetRootNode() actual := php5parser.GetRootNode()
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
@ -22823,7 +22823,7 @@ CAD;
} }
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil) lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil) php5parser := php5.NewParser(lexer, nil)
php5parser.Parse() php5parser.Parse()
actual := php5parser.GetRootNode() actual := php5parser.GetRootNode()
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
@ -22849,7 +22849,7 @@ func TestPhp5ControlCharsErrors(t *testing.T) {
} }
lexer := scanner.NewLexer([]byte(src), "5.6", false, errorHandlerFunc) lexer := scanner.NewLexer([]byte(src), "5.6", false, errorHandlerFunc)
php5parser := php5.NewParser(lexer, false, errorHandlerFunc) php5parser := php5.NewParser(lexer, errorHandlerFunc)
php5parser.Parse() php5parser.Parse()
assert.DeepEqual(t, expected, parserErrors) assert.DeepEqual(t, expected, parserErrors)
} }

View File

@ -14,14 +14,12 @@ type Parser struct {
Lexer *scanner.Lexer Lexer *scanner.Lexer
currentToken *scanner.Token currentToken *scanner.Token
rootNode ast.Vertex rootNode ast.Vertex
withTokens bool
errHandlerFunc func(*errors.Error) errHandlerFunc func(*errors.Error)
} }
// NewParser creates and returns new Parser // NewParser creates and returns new Parser
func NewParser(lexer *scanner.Lexer, withTokens bool, errHandlerFunc func(*errors.Error)) *Parser { func NewParser(lexer *scanner.Lexer, errHandlerFunc func(*errors.Error)) *Parser {
return &Parser{ return &Parser{
withTokens: withTokens,
Lexer: lexer, Lexer: lexer,
errHandlerFunc: errHandlerFunc, errHandlerFunc: errHandlerFunc,
} }
@ -63,7 +61,7 @@ func lastNode(nn []ast.Vertex) ast.Vertex {
} }
func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) { func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
if p.withTokens == false { if _, ok := src.GetNode().Tokens[token.Start]; !ok {
return return
} }
@ -71,42 +69,64 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
return return
} }
p.setFreeFloating(dst, token.Start, src.GetNode().Tokens[token.Start])
delete(src.GetNode().Tokens, token.Start)
}
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, strings []token.Token) {
if p.withTokens == false {
return
}
if len(strings) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil { if *dstCollection == nil {
*dstCollection = make(token.Collection) *dstCollection = make(token.Collection)
} }
(*dstCollection)[pos] = strings (*dstCollection)[token.Start] = src.GetNode().Tokens[token.Start]
delete(src.GetNode().Tokens, token.Start)
} }
func (p *Parser) GetFreeFloatingToken(t *scanner.Token) []token.Token { func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if p.withTokens == false { if len(tokens) == 0 {
return []token.Token{} return
} }
return []token.Token{ dstCollection := &dst.GetNode().Tokens
{ if *dstCollection == nil {
ID: token.ID(t.ID), *dstCollection = make(token.Collection)
Value: t.Value,
},
} }
l := len(tokens)
for _, v := range tokens[0 : l-1] {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
(*dstCollection)[pos] = make([]token.Token, 0)
for _, v := range tokens {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
l := len(tokens)
(*dstCollection)[pos] = append((*dstCollection)[pos], tokens[l-1])
} }
func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) { func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) {
if p.withTokens == false { if _, ok := prevNode.GetNode().Tokens[token.SemiColon]; !ok {
return return
} }
@ -117,7 +137,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
} }
if semiColon[0].Value[0] == ';' { if semiColon[0].Value[0] == ';' {
p.setFreeFloating(prevNode, token.SemiColon, []token.Token{ p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{
{ {
ID: token.ID(';'), ID: token.ID(';'),
Value: semiColon[0].Value[0:1], Value: semiColon[0].Value[0:1],
@ -144,7 +164,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
Value: semiColon[0].Value[vlen-tlen:], Value: semiColon[0].Value[vlen-tlen:],
}) })
p.setFreeFloating(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...)) p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
} }
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) { func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -383,7 +383,7 @@ CAD;
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil) php7parser := php7.NewParser(lexer, nil)
php7parser.Parse() php7parser.Parse()
} }
} }

View File

@ -19635,7 +19635,7 @@ func TestPhp7(t *testing.T) {
} }
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil) php7parser := php7.NewParser(lexer, nil)
php7parser.Parse() php7parser.Parse()
actual := php7parser.GetRootNode() actual := php7parser.GetRootNode()
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
@ -19773,7 +19773,7 @@ func TestPhp5Strings(t *testing.T) {
} }
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil) php7parser := php7.NewParser(lexer, nil)
php7parser.Parse() php7parser.Parse()
actual := php7parser.GetRootNode() actual := php7parser.GetRootNode()
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
@ -20000,7 +20000,7 @@ CAD;
} }
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil) lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil) php7parser := php7.NewParser(lexer, nil)
php7parser.Parse() php7parser.Parse()
actual := php7parser.GetRootNode() actual := php7parser.GetRootNode()
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
@ -20026,7 +20026,7 @@ func TestPhp7ControlCharsErrors(t *testing.T) {
} }
lexer := scanner.NewLexer([]byte(src), "7.4", false, errorHandlerFunc) lexer := scanner.NewLexer([]byte(src), "7.4", false, errorHandlerFunc)
php7parser := php7.NewParser(lexer, false, errorHandlerFunc) php7parser := php7.NewParser(lexer, errorHandlerFunc)
php7parser.Parse() php7parser.Parse()
assert.DeepEqual(t, expected, parserErrors) assert.DeepEqual(t, expected, parserErrors)
} }

View File

@ -11,10 +11,10 @@ import (
) )
type Lexer struct { type Lexer struct {
data []byte data []byte
phpVersion string phpVersion string
withHiddenTokens bool withTokens bool
errHandlerFunc func(*errors.Error) errHandlerFunc func(*errors.Error)
p, pe, cs int p, pe, cs int
ts, te, act int ts, te, act int
@ -23,16 +23,15 @@ type Lexer struct {
heredocLabel []byte heredocLabel []byte
tokenPool *TokenPool tokenPool *TokenPool
hiddenTokens []token.Token
newLines NewLines newLines NewLines
} }
func NewLexer(data []byte, phpVersion string, withHiddenTokens bool, errHandlerFunc func(*errors.Error)) *Lexer { func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc func(*errors.Error)) *Lexer {
lex := &Lexer{ lex := &Lexer{
data: data, data: data,
phpVersion: phpVersion, phpVersion: phpVersion,
withHiddenTokens: withHiddenTokens, withTokens: withTokens,
errHandlerFunc: errHandlerFunc, errHandlerFunc: errHandlerFunc,
pe: len(data), pe: len(data),
stack: make([]int, 0), stack: make([]int, 0),
@ -57,12 +56,12 @@ func (lex *Lexer) setTokenPosition(token *Token) {
token.Position.EndPos = lex.te token.Position.EndPos = lex.te
} }
func (lex *Lexer) addHiddenToken(id TokenID, ps, pe int) { func (lex *Lexer) addHiddenToken(t *Token, id TokenID, ps, pe int) {
if !lex.withHiddenTokens { if !lex.withTokens {
return return
} }
lex.hiddenTokens = append(lex.hiddenTokens, token.Token{ t.Tokens = append(t.Tokens, token.Token{
ID: token.ID(id), ID: token.ID(id),
Value: lex.data[ps:pe], Value: lex.data[ps:pe],
}) })
@ -236,9 +235,9 @@ func (lex *Lexer) error(msg string) {
} }
func isValidVarNameStart(r byte) bool { func isValidVarNameStart(r byte) bool {
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' || (r >= 0x80 && r <= 0xff) return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' || r >= 0x80
} }
func isValidVarName(r byte) bool { func isValidVarName(r byte) bool {
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '_' || (r >= 0x80 && r <= 0xff) return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '_' || r >= 0x80
} }

File diff suppressed because it is too large Load Diff

View File

@ -19,12 +19,11 @@ func initLexer(lex *Lexer) {
} }
func (lex *Lexer) Lex() *Token { func (lex *Lexer) Lex() *Token {
lex.hiddenTokens = nil
eof := lex.pe eof := lex.pe
var tok TokenID var tok TokenID
token := lex.tokenPool.Get() token := lex.tokenPool.Get()
token.Hidden = nil token.Tokens = token.Tokens[:0]
token.Value = lex.data[0:0] token.Value = lex.data[0:0]
lblStart := 0 lblStart := 0
@ -125,7 +124,7 @@ func (lex *Lexer) Lex() *Token {
main := |* main := |*
"#!" any* :>> newline => { "#!" any* :>> newline => {
lex.addHiddenToken(T_COMMENT, lex.ts, lex.te) lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
}; };
any => { any => {
fnext html; fnext html;
@ -141,12 +140,12 @@ func (lex *Lexer) Lex() *Token {
fbreak; fbreak;
}; };
'<?' => { '<?' => {
lex.addHiddenToken(T_OPEN_TAG, lex.ts, lex.te) lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.te)
fnext php; fnext php;
}; };
'<?php'i ( [ \t] | newline ) => { '<?php'i ( [ \t] | newline ) => {
lex.ungetCnt(lex.te - lex.ts - 5) lex.ungetCnt(lex.te - lex.ts - 5)
lex.addHiddenToken(T_OPEN_TAG, lex.ts, lex.ts+5) lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.ts+5)
fnext php; fnext php;
}; };
'<?='i => { '<?='i => {
@ -158,7 +157,7 @@ func (lex *Lexer) Lex() *Token {
*|; *|;
php := |* php := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)}; whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
'?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;}; ';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
@ -318,7 +317,7 @@ func (lex *Lexer) Lex() *Token {
('#' | '//') any_line* when is_not_comment_end => { ('#' | '//') any_line* when is_not_comment_end => {
lex.ungetStr("?>") lex.ungetStr("?>")
lex.addHiddenToken(T_COMMENT, lex.ts, lex.te) lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
}; };
'/*' any_line* :>> '*/' { '/*' any_line* :>> '*/' {
isDocComment := false; isDocComment := false;
@ -327,9 +326,9 @@ func (lex *Lexer) Lex() *Token {
} }
if isDocComment { if isDocComment {
lex.addHiddenToken(T_DOC_COMMENT, lex.ts, lex.te) lex.addHiddenToken(token, T_DOC_COMMENT, lex.ts, lex.te)
} else { } else {
lex.addHiddenToken(T_COMMENT, lex.ts, lex.te) lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
} }
}; };
@ -378,7 +377,7 @@ func (lex *Lexer) Lex() *Token {
*|; *|;
property := |* property := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)}; whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
"->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;}; "->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;};
varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;}; varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;};
any => {lex.ungetCnt(1); fgoto php;}; any => {lex.ungetCnt(1); fgoto php;};
@ -474,33 +473,33 @@ func (lex *Lexer) Lex() *Token {
*|; *|;
halt_compiller_open_parenthesis := |* halt_compiller_open_parenthesis := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)}; whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
"(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;}; "(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;};
any => {lex.ungetCnt(1); fnext php;}; any => {lex.ungetCnt(1); fnext php;};
*|; *|;
halt_compiller_close_parenthesis := |* halt_compiller_close_parenthesis := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)}; whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;}; ")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;};
any => {lex.ungetCnt(1); fnext php;}; any => {lex.ungetCnt(1); fnext php;};
*|; *|;
halt_compiller_close_semicolon := |* halt_compiller_close_semicolon := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)}; whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;}; ";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;};
any => {lex.ungetCnt(1); fnext php;}; any => {lex.ungetCnt(1); fnext php;};
*|; *|;
halt_compiller_end := |* halt_compiller_end := |*
any_line* => { lex.addHiddenToken(T_HALT_COMPILER, lex.ts, lex.te); }; any_line* => { lex.addHiddenToken(token, T_HALT_COMPILER, lex.ts, lex.te); };
*|; *|;
write exec; write exec;
}%% }%%
token.Hidden = lex.hiddenTokens
token.Value = lex.data[lex.ts:lex.te] token.Value = lex.data[lex.ts:lex.te]
token.ID = tok token.ID = tok
lex.addHiddenToken(token, tok, lex.ts, lex.te);
return token return token
} }

View File

@ -354,7 +354,7 @@ func TestTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -382,13 +382,14 @@ func TestShebang(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
tkn := lexer.Lex() tkn := lexer.Lex()
assert.Equal(t, tkn.ID, T_DNUMBER) assert.Equal(t, tkn.ID, T_DNUMBER)
for _, tt := range tkn.Hidden { l := len(tkn.Tokens)
for _, tt := range tkn.Tokens[:l-1] {
actual = append(actual, string(tt.Value)) actual = append(actual, string(tt.Value))
} }
@ -402,11 +403,11 @@ func TestShebangHtml(t *testing.T) {
` `
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
tkn := lexer.Lex() tkn := lexer.Lex()
assert.Equal(t, tkn.ID, T_INLINE_HTML) assert.Equal(t, tkn.ID, T_INLINE_HTML)
assert.Equal(t, string(tkn.Hidden[0].Value), "#!/usr/bin/env php\n") assert.Equal(t, string(tkn.Tokens[0].Value), "#!/usr/bin/env php\n")
tkn = lexer.Lex() tkn = lexer.Lex()
assert.Equal(t, tkn.ID, T_DNUMBER) assert.Equal(t, tkn.ID, T_DNUMBER)
@ -452,7 +453,7 @@ func TestNumberTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -509,7 +510,7 @@ func TestConstantStrings(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -643,7 +644,7 @@ func TestTeplateStringTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -728,7 +729,7 @@ func TestBackquoteStringTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -822,7 +823,7 @@ CAT;
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -895,7 +896,7 @@ CAT
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -934,7 +935,7 @@ CAT;
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -965,7 +966,7 @@ func TestHereDocTokens73(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -996,7 +997,7 @@ CAT;`
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.phpVersion = "7.2" lexer.phpVersion = "7.2"
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -1028,7 +1029,7 @@ func TestInlineHtmlNopTokens(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
actual := []string{} actual := []string{}
for { for {
@ -1133,11 +1134,12 @@ func TestCommentEnd(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
lexer.Lex() tkn := lexer.Lex()
actual := lexer.hiddenTokens l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1162,11 +1164,12 @@ func TestCommentNewLine(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1191,11 +1194,12 @@ func TestCommentNewLine1(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1220,11 +1224,12 @@ func TestCommentNewLine2(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1250,11 +1255,12 @@ func TestCommentWithPhpEndTag(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1280,11 +1286,12 @@ func TestInlineComment(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1310,11 +1317,12 @@ func TestInlineComment2(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
lexer.Lex() tkn := lexer.Lex()
actual := lexer.hiddenTokens l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1344,11 +1352,12 @@ func TestEmptyInlineComment(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
lexer.Lex() tkn := lexer.Lex()
actual := lexer.hiddenTokens l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1374,11 +1383,12 @@ func TestEmptyInlineComment2(t *testing.T) {
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1388,7 +1398,7 @@ func TestMethodCallTokens(t *testing.T) {
$a -> bar ( '' ) ;` $a -> bar ( '' ) ;`
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
expected := []token.Token{ expected := []token.Token{
{ {
@ -1401,7 +1411,8 @@ func TestMethodCallTokens(t *testing.T) {
}, },
} }
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []token.Token{ expected = []token.Token{
@ -1411,7 +1422,8 @@ func TestMethodCallTokens(t *testing.T) {
}, },
} }
tkn = lexer.Lex() tkn = lexer.Lex()
actual = tkn.Hidden l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []token.Token{ expected = []token.Token{
@ -1421,7 +1433,8 @@ func TestMethodCallTokens(t *testing.T) {
}, },
} }
tkn = lexer.Lex() tkn = lexer.Lex()
actual = tkn.Hidden l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []token.Token{ expected = []token.Token{
@ -1431,7 +1444,8 @@ func TestMethodCallTokens(t *testing.T) {
}, },
} }
tkn = lexer.Lex() tkn = lexer.Lex()
actual = tkn.Hidden l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []token.Token{ expected = []token.Token{
@ -1441,7 +1455,8 @@ func TestMethodCallTokens(t *testing.T) {
}, },
} }
tkn = lexer.Lex() tkn = lexer.Lex()
actual = tkn.Hidden l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []token.Token{ expected = []token.Token{
@ -1451,7 +1466,8 @@ func TestMethodCallTokens(t *testing.T) {
}, },
} }
tkn = lexer.Lex() tkn = lexer.Lex()
actual = tkn.Hidden l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []token.Token{ expected = []token.Token{
@ -1461,7 +1477,8 @@ func TestMethodCallTokens(t *testing.T) {
}, },
} }
tkn = lexer.Lex() tkn = lexer.Lex()
actual = tkn.Hidden l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }
@ -1470,7 +1487,7 @@ func TestYieldFromTokens(t *testing.T) {
yield from $a` yield from $a`
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true lexer.withTokens = true
expected := []token.Token{ expected := []token.Token{
{ {
@ -1483,7 +1500,8 @@ func TestYieldFromTokens(t *testing.T) {
}, },
} }
tkn := lexer.Lex() tkn := lexer.Lex()
actual := tkn.Hidden l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
expected = []token.Token{ expected = []token.Token{
@ -1493,7 +1511,8 @@ func TestYieldFromTokens(t *testing.T) {
}, },
} }
tkn = lexer.Lex() tkn = lexer.Lex()
actual = tkn.Hidden l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual) assert.DeepEqual(t, expected, actual)
} }

View File

@ -9,6 +9,6 @@ import (
type Token struct { type Token struct {
ID TokenID ID TokenID
Value []byte Value []byte
Hidden []token.Token Tokens []token.Token
Position position.Position Position position.Position
} }

View File

@ -32,9 +32,9 @@ func Parse(src []byte, ver string, cfg Config) (ast.Vertex, error) {
lexer := scanner.NewLexer(src, ver, cfg.WithTokens, cfg.ErrorHandlerFunc) lexer := scanner.NewLexer(src, ver, cfg.WithTokens, cfg.ErrorHandlerFunc)
if r == -1 { if r == -1 {
parser = php5.NewParser(lexer, cfg.WithTokens, cfg.ErrorHandlerFunc) parser = php5.NewParser(lexer, cfg.ErrorHandlerFunc)
} else { } else {
parser = php7.NewParser(lexer, cfg.WithTokens, cfg.ErrorHandlerFunc) parser = php7.NewParser(lexer, cfg.ErrorHandlerFunc)
} }
parser.Parse() parser.Parse()

View File

@ -13,7 +13,7 @@ import (
func parsePhp5(src string) ast.Vertex { func parsePhp5(src string) ast.Vertex {
lexer := scanner.NewLexer([]byte(src), "5.6", true, nil) lexer := scanner.NewLexer([]byte(src), "5.6", true, nil)
php5parser := php5.NewParser(lexer, true, nil) php5parser := php5.NewParser(lexer, nil)
php5parser.Parse() php5parser.Parse()
return php5parser.GetRootNode() return php5parser.GetRootNode()
@ -832,7 +832,8 @@ func TestParseAndPrintPhp5Break(t *testing.T) {
break ( 2 ) ; break ( 2 ) ;
` `
actual := printPhp5(parsePhp5(src)) root := parsePhp5(src)
actual := printPhp5(root)
if src != actual { if src != actual {
t.Errorf("\nexpected: %s\ngot: %s\n", src, actual) t.Errorf("\nexpected: %s\ngot: %s\n", src, actual)

View File

@ -30,7 +30,7 @@ abstract class Bar extends Baz
// parse // parse
lexer := scanner.NewLexer([]byte(src), "7.4", true, nil) lexer := scanner.NewLexer([]byte(src), "7.4", true, nil)
php7parser := php7.NewParser(lexer, true, nil) php7parser := php7.NewParser(lexer, nil)
php7parser.Parse() php7parser.Parse()
rootNode := php7parser.GetRootNode() rootNode := php7parser.GetRootNode()
@ -62,7 +62,7 @@ abstract class Bar extends Baz
func parse(src string) ast.Vertex { func parse(src string) ast.Vertex {
lexer := scanner.NewLexer([]byte(src), "7.4", true, nil) lexer := scanner.NewLexer([]byte(src), "7.4", true, nil)
php7parser := php7.NewParser(lexer, true, nil) php7parser := php7.NewParser(lexer, nil)
php7parser.Parse() php7parser.Parse()
return php7parser.GetRootNode() return php7parser.GetRootNode()