[refactoring] remove param withTokens from parser

This commit is contained in:
Vadym Slizov 2020-07-03 00:20:32 +03:00
parent ee3fe3b5c0
commit 06d202e71c
20 changed files with 4207 additions and 4157 deletions

View File

@ -14,14 +14,12 @@ type Parser struct {
Lexer *scanner.Lexer
currentToken *scanner.Token
rootNode ast.Vertex
withTokens bool
errHandlerFunc func(*errors.Error)
}
// NewParser creates and returns new Parser
func NewParser(lexer *scanner.Lexer, withTokens bool, errHandlerFunc func(*errors.Error)) *Parser {
func NewParser(lexer *scanner.Lexer, errHandlerFunc func(*errors.Error)) *Parser {
return &Parser{
withTokens: withTokens,
Lexer: lexer,
errHandlerFunc: errHandlerFunc,
}
@ -63,7 +61,7 @@ func lastNode(nn []ast.Vertex) ast.Vertex {
}
func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
if p.withTokens == false {
if _, ok := src.GetNode().Tokens[token.Start]; !ok {
return
}
@ -71,42 +69,64 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
return
}
p.setFreeFloating(dst, token.Start, src.GetNode().Tokens[token.Start])
delete(src.GetNode().Tokens, token.Start)
}
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, strings []token.Token) {
if p.withTokens == false {
return
}
if len(strings) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
(*dstCollection)[pos] = strings
(*dstCollection)[token.Start] = src.GetNode().Tokens[token.Start]
delete(src.GetNode().Tokens, token.Start)
}
func (p *Parser) GetFreeFloatingToken(t *scanner.Token) []token.Token {
if p.withTokens == false {
return []token.Token{}
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
return []token.Token{
{
ID: token.ID(t.ID),
Value: t.Value,
},
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
l := len(tokens)
for _, v := range tokens[0 : l-1] {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
(*dstCollection)[pos] = make([]token.Token, 0)
for _, v := range tokens {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
l := len(tokens)
(*dstCollection)[pos] = append((*dstCollection)[pos], tokens[l-1])
}
func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) {
if p.withTokens == false {
if _, ok := prevNode.GetNode().Tokens[token.SemiColon]; !ok {
return
}
@ -117,7 +137,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
}
if semiColon[0].Value[0] == ';' {
p.setFreeFloating(prevNode, token.SemiColon, []token.Token{
p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{
{
ID: token.ID(';'),
Value: semiColon[0].Value[0:1],
@ -126,7 +146,6 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
}
vlen := len(semiColon[0].Value)
tlen := 2
if bytes.HasSuffix(semiColon[0].Value, []byte("?>\n")) {
tlen = 3
@ -145,7 +164,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
Value: semiColon[0].Value[vlen-tlen:],
})
p.setFreeFloating(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
}
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -415,7 +415,7 @@ CAD;
for n := 0; n < b.N; n++ {
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil)
php5parser := php5.NewParser(lexer, nil)
php5parser.Parse()
}
}

View File

@ -22458,7 +22458,7 @@ func TestPhp5(t *testing.T) {
}
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil)
php5parser := php5.NewParser(lexer, nil)
php5parser.Parse()
actual := php5parser.GetRootNode()
assert.DeepEqual(t, expected, actual)
@ -22596,7 +22596,7 @@ func TestPhp5Strings(t *testing.T) {
}
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil)
php5parser := php5.NewParser(lexer, nil)
php5parser.Parse()
actual := php5parser.GetRootNode()
assert.DeepEqual(t, expected, actual)
@ -22823,7 +22823,7 @@ CAD;
}
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
php5parser := php5.NewParser(lexer, false, nil)
php5parser := php5.NewParser(lexer, nil)
php5parser.Parse()
actual := php5parser.GetRootNode()
assert.DeepEqual(t, expected, actual)
@ -22849,7 +22849,7 @@ func TestPhp5ControlCharsErrors(t *testing.T) {
}
lexer := scanner.NewLexer([]byte(src), "5.6", false, errorHandlerFunc)
php5parser := php5.NewParser(lexer, false, errorHandlerFunc)
php5parser := php5.NewParser(lexer, errorHandlerFunc)
php5parser.Parse()
assert.DeepEqual(t, expected, parserErrors)
}

View File

@ -14,14 +14,12 @@ type Parser struct {
Lexer *scanner.Lexer
currentToken *scanner.Token
rootNode ast.Vertex
withTokens bool
errHandlerFunc func(*errors.Error)
}
// NewParser creates and returns new Parser
func NewParser(lexer *scanner.Lexer, withTokens bool, errHandlerFunc func(*errors.Error)) *Parser {
func NewParser(lexer *scanner.Lexer, errHandlerFunc func(*errors.Error)) *Parser {
return &Parser{
withTokens: withTokens,
Lexer: lexer,
errHandlerFunc: errHandlerFunc,
}
@ -63,7 +61,7 @@ func lastNode(nn []ast.Vertex) ast.Vertex {
}
func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
if p.withTokens == false {
if _, ok := src.GetNode().Tokens[token.Start]; !ok {
return
}
@ -71,42 +69,64 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
return
}
p.setFreeFloating(dst, token.Start, src.GetNode().Tokens[token.Start])
delete(src.GetNode().Tokens, token.Start)
}
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, strings []token.Token) {
if p.withTokens == false {
return
}
if len(strings) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
(*dstCollection)[pos] = strings
(*dstCollection)[token.Start] = src.GetNode().Tokens[token.Start]
delete(src.GetNode().Tokens, token.Start)
}
func (p *Parser) GetFreeFloatingToken(t *scanner.Token) []token.Token {
if p.withTokens == false {
return []token.Token{}
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
return []token.Token{
{
ID: token.ID(t.ID),
Value: t.Value,
},
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
l := len(tokens)
for _, v := range tokens[0 : l-1] {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
(*dstCollection)[pos] = make([]token.Token, 0)
for _, v := range tokens {
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
}
}
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) {
if len(tokens) == 0 {
return
}
dstCollection := &dst.GetNode().Tokens
if *dstCollection == nil {
*dstCollection = make(token.Collection)
}
l := len(tokens)
(*dstCollection)[pos] = append((*dstCollection)[pos], tokens[l-1])
}
func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) {
if p.withTokens == false {
if _, ok := prevNode.GetNode().Tokens[token.SemiColon]; !ok {
return
}
@ -117,7 +137,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
}
if semiColon[0].Value[0] == ';' {
p.setFreeFloating(prevNode, token.SemiColon, []token.Token{
p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{
{
ID: token.ID(';'),
Value: semiColon[0].Value[0:1],
@ -144,7 +164,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
Value: semiColon[0].Value[vlen-tlen:],
})
p.setFreeFloating(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
}
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -383,7 +383,7 @@ CAD;
for n := 0; n < b.N; n++ {
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil)
php7parser := php7.NewParser(lexer, nil)
php7parser.Parse()
}
}

View File

@ -19635,7 +19635,7 @@ func TestPhp7(t *testing.T) {
}
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil)
php7parser := php7.NewParser(lexer, nil)
php7parser.Parse()
actual := php7parser.GetRootNode()
assert.DeepEqual(t, expected, actual)
@ -19773,7 +19773,7 @@ func TestPhp5Strings(t *testing.T) {
}
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil)
php7parser := php7.NewParser(lexer, nil)
php7parser.Parse()
actual := php7parser.GetRootNode()
assert.DeepEqual(t, expected, actual)
@ -20000,7 +20000,7 @@ CAD;
}
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
php7parser := php7.NewParser(lexer, false, nil)
php7parser := php7.NewParser(lexer, nil)
php7parser.Parse()
actual := php7parser.GetRootNode()
assert.DeepEqual(t, expected, actual)
@ -20026,7 +20026,7 @@ func TestPhp7ControlCharsErrors(t *testing.T) {
}
lexer := scanner.NewLexer([]byte(src), "7.4", false, errorHandlerFunc)
php7parser := php7.NewParser(lexer, false, errorHandlerFunc)
php7parser := php7.NewParser(lexer, errorHandlerFunc)
php7parser.Parse()
assert.DeepEqual(t, expected, parserErrors)
}

View File

@ -11,10 +11,10 @@ import (
)
type Lexer struct {
data []byte
phpVersion string
withHiddenTokens bool
errHandlerFunc func(*errors.Error)
data []byte
phpVersion string
withTokens bool
errHandlerFunc func(*errors.Error)
p, pe, cs int
ts, te, act int
@ -23,16 +23,15 @@ type Lexer struct {
heredocLabel []byte
tokenPool *TokenPool
hiddenTokens []token.Token
newLines NewLines
}
func NewLexer(data []byte, phpVersion string, withHiddenTokens bool, errHandlerFunc func(*errors.Error)) *Lexer {
func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc func(*errors.Error)) *Lexer {
lex := &Lexer{
data: data,
phpVersion: phpVersion,
withHiddenTokens: withHiddenTokens,
errHandlerFunc: errHandlerFunc,
data: data,
phpVersion: phpVersion,
withTokens: withTokens,
errHandlerFunc: errHandlerFunc,
pe: len(data),
stack: make([]int, 0),
@ -57,12 +56,12 @@ func (lex *Lexer) setTokenPosition(token *Token) {
token.Position.EndPos = lex.te
}
func (lex *Lexer) addHiddenToken(id TokenID, ps, pe int) {
if !lex.withHiddenTokens {
func (lex *Lexer) addHiddenToken(t *Token, id TokenID, ps, pe int) {
if !lex.withTokens {
return
}
lex.hiddenTokens = append(lex.hiddenTokens, token.Token{
t.Tokens = append(t.Tokens, token.Token{
ID: token.ID(id),
Value: lex.data[ps:pe],
})
@ -236,9 +235,9 @@ func (lex *Lexer) error(msg string) {
}
func isValidVarNameStart(r byte) bool {
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' || (r >= 0x80 && r <= 0xff)
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' || r >= 0x80
}
func isValidVarName(r byte) bool {
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '_' || (r >= 0x80 && r <= 0xff)
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '_' || r >= 0x80
}

File diff suppressed because it is too large Load Diff

View File

@ -19,12 +19,11 @@ func initLexer(lex *Lexer) {
}
func (lex *Lexer) Lex() *Token {
lex.hiddenTokens = nil
eof := lex.pe
var tok TokenID
token := lex.tokenPool.Get()
token.Hidden = nil
token.Tokens = token.Tokens[:0]
token.Value = lex.data[0:0]
lblStart := 0
@ -125,7 +124,7 @@ func (lex *Lexer) Lex() *Token {
main := |*
"#!" any* :>> newline => {
lex.addHiddenToken(T_COMMENT, lex.ts, lex.te)
lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
};
any => {
fnext html;
@ -141,12 +140,12 @@ func (lex *Lexer) Lex() *Token {
fbreak;
};
'<?' => {
lex.addHiddenToken(T_OPEN_TAG, lex.ts, lex.te)
lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.te)
fnext php;
};
'<?php'i ( [ \t] | newline ) => {
lex.ungetCnt(lex.te - lex.ts - 5)
lex.addHiddenToken(T_OPEN_TAG, lex.ts, lex.ts+5)
lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.ts+5)
fnext php;
};
'<?='i => {
@ -158,7 +157,7 @@ func (lex *Lexer) Lex() *Token {
*|;
php := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)};
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
'?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
@ -318,7 +317,7 @@ func (lex *Lexer) Lex() *Token {
('#' | '//') any_line* when is_not_comment_end => {
lex.ungetStr("?>")
lex.addHiddenToken(T_COMMENT, lex.ts, lex.te)
lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
};
'/*' any_line* :>> '*/' {
isDocComment := false;
@ -327,9 +326,9 @@ func (lex *Lexer) Lex() *Token {
}
if isDocComment {
lex.addHiddenToken(T_DOC_COMMENT, lex.ts, lex.te)
lex.addHiddenToken(token, T_DOC_COMMENT, lex.ts, lex.te)
} else {
lex.addHiddenToken(T_COMMENT, lex.ts, lex.te)
lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
}
};
@ -378,7 +377,7 @@ func (lex *Lexer) Lex() *Token {
*|;
property := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)};
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
"->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;};
varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;};
any => {lex.ungetCnt(1); fgoto php;};
@ -474,33 +473,33 @@ func (lex *Lexer) Lex() *Token {
*|;
halt_compiller_open_parenthesis := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)};
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
"(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;};
any => {lex.ungetCnt(1); fnext php;};
*|;
halt_compiller_close_parenthesis := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)};
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;};
any => {lex.ungetCnt(1); fnext php;};
*|;
halt_compiller_close_semicolon := |*
whitespace_line* => {lex.addHiddenToken(T_WHITESPACE, lex.ts, lex.te)};
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;};
any => {lex.ungetCnt(1); fnext php;};
*|;
halt_compiller_end := |*
any_line* => { lex.addHiddenToken(T_HALT_COMPILER, lex.ts, lex.te); };
any_line* => { lex.addHiddenToken(token, T_HALT_COMPILER, lex.ts, lex.te); };
*|;
write exec;
}%%
token.Hidden = lex.hiddenTokens
token.Value = lex.data[lex.ts:lex.te]
token.ID = tok
lex.addHiddenToken(token, tok, lex.ts, lex.te);
return token
}

View File

@ -354,7 +354,7 @@ func TestTokens(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -382,13 +382,14 @@ func TestShebang(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
tkn := lexer.Lex()
assert.Equal(t, tkn.ID, T_DNUMBER)
for _, tt := range tkn.Hidden {
l := len(tkn.Tokens)
for _, tt := range tkn.Tokens[:l-1] {
actual = append(actual, string(tt.Value))
}
@ -402,11 +403,11 @@ func TestShebangHtml(t *testing.T) {
`
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
tkn := lexer.Lex()
assert.Equal(t, tkn.ID, T_INLINE_HTML)
assert.Equal(t, string(tkn.Hidden[0].Value), "#!/usr/bin/env php\n")
assert.Equal(t, string(tkn.Tokens[0].Value), "#!/usr/bin/env php\n")
tkn = lexer.Lex()
assert.Equal(t, tkn.ID, T_DNUMBER)
@ -452,7 +453,7 @@ func TestNumberTokens(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -509,7 +510,7 @@ func TestConstantStrings(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -643,7 +644,7 @@ func TestTeplateStringTokens(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -728,7 +729,7 @@ func TestBackquoteStringTokens(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -822,7 +823,7 @@ CAT;
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -895,7 +896,7 @@ CAT
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -934,7 +935,7 @@ CAT;
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -965,7 +966,7 @@ func TestHereDocTokens73(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -996,7 +997,7 @@ CAT;`
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.phpVersion = "7.2"
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -1028,7 +1029,7 @@ func TestInlineHtmlNopTokens(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
actual := []string{}
for {
@ -1133,11 +1134,12 @@ func TestCommentEnd(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
lexer.Lex()
tkn := lexer.Lex()
actual := lexer.hiddenTokens
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1162,11 +1164,12 @@ func TestCommentNewLine(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1191,11 +1194,12 @@ func TestCommentNewLine1(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1220,11 +1224,12 @@ func TestCommentNewLine2(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1250,11 +1255,12 @@ func TestCommentWithPhpEndTag(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1280,11 +1286,12 @@ func TestInlineComment(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1310,11 +1317,12 @@ func TestInlineComment2(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
lexer.Lex()
tkn := lexer.Lex()
actual := lexer.hiddenTokens
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1344,11 +1352,12 @@ func TestEmptyInlineComment(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
lexer.Lex()
tkn := lexer.Lex()
actual := lexer.hiddenTokens
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1374,11 +1383,12 @@ func TestEmptyInlineComment2(t *testing.T) {
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1388,7 +1398,7 @@ func TestMethodCallTokens(t *testing.T) {
$a -> bar ( '' ) ;`
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
expected := []token.Token{
{
@ -1401,7 +1411,8 @@ func TestMethodCallTokens(t *testing.T) {
},
}
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
expected = []token.Token{
@ -1411,7 +1422,8 @@ func TestMethodCallTokens(t *testing.T) {
},
}
tkn = lexer.Lex()
actual = tkn.Hidden
l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
expected = []token.Token{
@ -1421,7 +1433,8 @@ func TestMethodCallTokens(t *testing.T) {
},
}
tkn = lexer.Lex()
actual = tkn.Hidden
l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
expected = []token.Token{
@ -1431,7 +1444,8 @@ func TestMethodCallTokens(t *testing.T) {
},
}
tkn = lexer.Lex()
actual = tkn.Hidden
l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
expected = []token.Token{
@ -1441,7 +1455,8 @@ func TestMethodCallTokens(t *testing.T) {
},
}
tkn = lexer.Lex()
actual = tkn.Hidden
l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
expected = []token.Token{
@ -1451,7 +1466,8 @@ func TestMethodCallTokens(t *testing.T) {
},
}
tkn = lexer.Lex()
actual = tkn.Hidden
l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
expected = []token.Token{
@ -1461,7 +1477,8 @@ func TestMethodCallTokens(t *testing.T) {
},
}
tkn = lexer.Lex()
actual = tkn.Hidden
l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}
@ -1470,7 +1487,7 @@ func TestYieldFromTokens(t *testing.T) {
yield from $a`
lexer := NewLexer([]byte(src), "7.4", false, nil)
lexer.withHiddenTokens = true
lexer.withTokens = true
expected := []token.Token{
{
@ -1483,7 +1500,8 @@ func TestYieldFromTokens(t *testing.T) {
},
}
tkn := lexer.Lex()
actual := tkn.Hidden
l := len(tkn.Tokens)
actual := tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
expected = []token.Token{
@ -1493,7 +1511,8 @@ func TestYieldFromTokens(t *testing.T) {
},
}
tkn = lexer.Lex()
actual = tkn.Hidden
l = len(tkn.Tokens)
actual = tkn.Tokens[:l-1]
assert.DeepEqual(t, expected, actual)
}

View File

@ -9,6 +9,6 @@ import (
type Token struct {
ID TokenID
Value []byte
Hidden []token.Token
Tokens []token.Token
Position position.Position
}

View File

@ -32,9 +32,9 @@ func Parse(src []byte, ver string, cfg Config) (ast.Vertex, error) {
lexer := scanner.NewLexer(src, ver, cfg.WithTokens, cfg.ErrorHandlerFunc)
if r == -1 {
parser = php5.NewParser(lexer, cfg.WithTokens, cfg.ErrorHandlerFunc)
parser = php5.NewParser(lexer, cfg.ErrorHandlerFunc)
} else {
parser = php7.NewParser(lexer, cfg.WithTokens, cfg.ErrorHandlerFunc)
parser = php7.NewParser(lexer, cfg.ErrorHandlerFunc)
}
parser.Parse()

View File

@ -13,7 +13,7 @@ import (
func parsePhp5(src string) ast.Vertex {
lexer := scanner.NewLexer([]byte(src), "5.6", true, nil)
php5parser := php5.NewParser(lexer, true, nil)
php5parser := php5.NewParser(lexer, nil)
php5parser.Parse()
return php5parser.GetRootNode()
@ -832,7 +832,8 @@ func TestParseAndPrintPhp5Break(t *testing.T) {
break ( 2 ) ;
`
actual := printPhp5(parsePhp5(src))
root := parsePhp5(src)
actual := printPhp5(root)
if src != actual {
t.Errorf("\nexpected: %s\ngot: %s\n", src, actual)

View File

@ -30,7 +30,7 @@ abstract class Bar extends Baz
// parse
lexer := scanner.NewLexer([]byte(src), "7.4", true, nil)
php7parser := php7.NewParser(lexer, true, nil)
php7parser := php7.NewParser(lexer, nil)
php7parser.Parse()
rootNode := php7parser.GetRootNode()
@ -62,7 +62,7 @@ abstract class Bar extends Baz
func parse(src string) ast.Vertex {
lexer := scanner.NewLexer([]byte(src), "7.4", true, nil)
php7parser := php7.NewParser(lexer, true, nil)
php7parser := php7.NewParser(lexer, nil)
php7parser.Parse()
return php7parser.GetRootNode()