diff --git a/internal/php5/parser.go b/internal/php5/parser.go index 8ceb195..fe68521 100644 --- a/internal/php5/parser.go +++ b/internal/php5/parser.go @@ -36,11 +36,14 @@ func NewParser(src []byte, v string) *Parser { } } -// Lex proxy to lexer Lex +// Lex proxy to scanner Lex func (l *Parser) Lex(lval *yySymType) int { - t := l.Lexer.Lex(lval) - l.currentToken = lval.token - return t + t := l.Lexer.Lex() + + l.currentToken = t + lval.token = t + + return int(t.ID) } func (l *Parser) Error(msg string) { diff --git a/internal/php7/parser.go b/internal/php7/parser.go index b2f106b..b2a0cf6 100644 --- a/internal/php7/parser.go +++ b/internal/php7/parser.go @@ -36,9 +36,12 @@ func NewParser(src []byte, v string) *Parser { } func (l *Parser) Lex(lval *yySymType) int { - t := l.Lexer.Lex(lval) - l.currentToken = lval.token - return t + t := l.Lexer.Lex() + + l.currentToken = t + lval.token = t + + return int(t.ID) } func (l *Parser) Error(msg string) { diff --git a/internal/scanner/lexer.go b/internal/scanner/lexer.go index 8a68d86..d367291 100644 --- a/internal/scanner/lexer.go +++ b/internal/scanner/lexer.go @@ -11,7 +11,7 @@ import ( ) type Scanner interface { - Lex(lval Lval) int + Lex() *Token ReturnTokenToPool(t *Token) GetErrors() []*errors.Error GetWithHiddenTokens() bool @@ -20,11 +20,6 @@ type Scanner interface { SetErrors(e []*errors.Error) } -// Lval parsers yySymType must implement this interface -type Lval interface { - Token(tkn *Token) -} - type Lexer struct { data []byte p, pe, cs int diff --git a/internal/scanner/scanner.go b/internal/scanner/scanner.go index 185e7a9..95db2b2 100644 Binary files a/internal/scanner/scanner.go and b/internal/scanner/scanner.go differ diff --git a/internal/scanner/scanner.rl b/internal/scanner/scanner.rl index d8efac1..abd03b4 100644 --- a/internal/scanner/scanner.rl +++ b/internal/scanner/scanner.rl @@ -27,7 +27,7 @@ func NewLexer(data []byte) *Lexer { return lex } -func (lex *Lexer) Lex(lval Lval) int { +func (lex *Lexer) Lex() *Token { lex.HiddenTokens = nil eof := lex.pe var tok TokenID @@ -511,7 +511,5 @@ func (lex *Lexer) Lex(lval Lval) int { token.Value = lex.data[lex.ts:lex.te] token.ID = tok - lval.Token(token) - - return int(tok); + return token } \ No newline at end of file diff --git a/internal/scanner/scanner_test.go b/internal/scanner/scanner_test.go index fecea9d..ec61cbc 100644 --- a/internal/scanner/scanner_test.go +++ b/internal/scanner/scanner_test.go @@ -7,14 +7,6 @@ import ( "gotest.tools/assert" ) -type lval struct { - Tkn *Token -} - -func (lv *lval) Token(t *Token) { - lv.Tkn = t -} - func TestTokens(t *testing.T) { src := `inline html - @@ -361,16 +353,15 @@ func TestTokens(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -390,13 +381,12 @@ func TestShebang(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} - token := lexer.Lex(lv) - assert.Equal(t, token, int(T_DNUMBER)) + tkn := lexer.Lex() + assert.Equal(t, tkn.ID, T_DNUMBER) - for _, tt := range lv.Tkn.Hidden { + for _, tt := range tkn.Hidden { actual = append(actual, string(tt.Value)) } @@ -411,14 +401,13 @@ func TestShebangHtml(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - token := lexer.Lex(lv) - assert.Equal(t, token, int(T_INLINE_HTML)) - assert.Equal(t, string(lv.Tkn.Hidden[0].Value), "#!/usr/bin/env php\n") + tkn := lexer.Lex() + assert.Equal(t, tkn.ID, T_INLINE_HTML) + assert.Equal(t, string(tkn.Hidden[0].Value), "#!/usr/bin/env php\n") - token = lexer.Lex(lv) - assert.Equal(t, token, int(T_DNUMBER)) + tkn = lexer.Lex() + assert.Equal(t, tkn.ID, T_DNUMBER) } func TestNumberTokens(t *testing.T) { @@ -462,16 +451,15 @@ func TestNumberTokens(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -520,16 +508,15 @@ func TestConstantStrings(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -567,16 +554,15 @@ func TestSingleQuoteStringTokens(t *testing.T) { } lexer := NewLexer([]byte(src)) - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -656,16 +642,15 @@ func TestTeplateStringTokens(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -742,16 +727,15 @@ func TestBackquoteStringTokens(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -837,16 +821,15 @@ CAT; lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -911,16 +894,15 @@ CAT lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -951,16 +933,15 @@ CAT; lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -983,16 +964,15 @@ func TestHereDocTokens73(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -1015,16 +995,15 @@ CAT;` lexer := NewLexer([]byte(src)) lexer.PHPVersion = "7.2" lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -1048,16 +1027,15 @@ func TestInlineHtmlNopTokens(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} actual := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actual = append(actual, TokenID(token).String()) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -1083,18 +1061,17 @@ func TestStringTokensAfterVariable(t *testing.T) { } lexer := NewLexer([]byte(src)) - lv := &lval{} actual := []string{} actualTokens := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actualTokens = append(actualTokens, string(lv.Tkn.Value)) - actual = append(actual, TokenID(token).String()) + actualTokens = append(actualTokens, string(tkn.Value)) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -1117,18 +1094,17 @@ func TestSlashAfterVariable(t *testing.T) { } lexer := NewLexer([]byte(src)) - lv := &lval{} actual := []string{} actualTokens := []string{} for { - token := lexer.Lex(lv) - if token == 0 { + tkn := lexer.Lex() + if tkn.ID == 0 { break } - actualTokens = append(actualTokens, string(lv.Tkn.Value)) - actual = append(actual, TokenID(token).String()) + actualTokens = append(actualTokens, string(tkn.Value)) + actual = append(actual, tkn.ID.String()) } assert.DeepEqual(t, expected, actual) @@ -1156,9 +1132,8 @@ func TestCommentEnd(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + lexer.Lex() actual := lexer.HiddenTokens @@ -1186,11 +1161,10 @@ func TestCommentNewLine(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + tkn := lexer.Lex() - actual := lv.Tkn.Hidden + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1216,11 +1190,10 @@ func TestCommentNewLine1(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + tkn := lexer.Lex() - actual := lv.Tkn.Hidden + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1246,11 +1219,10 @@ func TestCommentNewLine2(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + tkn := lexer.Lex() - actual := lv.Tkn.Hidden + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1277,11 +1249,10 @@ func TestCommentWithPhpEndTag(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + tkn := lexer.Lex() - actual := lv.Tkn.Hidden + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1308,11 +1279,10 @@ func TestInlineComment(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + tkn := lexer.Lex() - actual := lv.Tkn.Hidden + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1339,9 +1309,8 @@ func TestInlineComment2(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + lexer.Lex() actual := lexer.HiddenTokens @@ -1374,9 +1343,8 @@ func TestEmptyInlineComment(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + lexer.Lex() actual := lexer.HiddenTokens @@ -1405,11 +1373,10 @@ func TestEmptyInlineComment2(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} - lexer.Lex(lv) + tkn := lexer.Lex() - actual := lv.Tkn.Hidden + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1420,7 +1387,6 @@ func TestMethodCallTokens(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} expected := []token.Token{ { @@ -1432,8 +1398,8 @@ func TestMethodCallTokens(t *testing.T) { Value: []byte("\n\t"), }, } - lexer.Lex(lv) - actual := lv.Tkn.Hidden + tkn := lexer.Lex() + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) expected = []token.Token{ @@ -1442,8 +1408,8 @@ func TestMethodCallTokens(t *testing.T) { Value: []byte(" "), }, } - lexer.Lex(lv) - actual = lv.Tkn.Hidden + tkn = lexer.Lex() + actual = tkn.Hidden assert.DeepEqual(t, expected, actual) expected = []token.Token{ @@ -1452,8 +1418,8 @@ func TestMethodCallTokens(t *testing.T) { Value: []byte(" "), }, } - lexer.Lex(lv) - actual = lv.Tkn.Hidden + tkn = lexer.Lex() + actual = tkn.Hidden assert.DeepEqual(t, expected, actual) expected = []token.Token{ @@ -1462,8 +1428,8 @@ func TestMethodCallTokens(t *testing.T) { Value: []byte(" "), }, } - lexer.Lex(lv) - actual = lv.Tkn.Hidden + tkn = lexer.Lex() + actual = tkn.Hidden assert.DeepEqual(t, expected, actual) expected = []token.Token{ @@ -1472,8 +1438,8 @@ func TestMethodCallTokens(t *testing.T) { Value: []byte(" "), }, } - lexer.Lex(lv) - actual = lv.Tkn.Hidden + tkn = lexer.Lex() + actual = tkn.Hidden assert.DeepEqual(t, expected, actual) expected = []token.Token{ @@ -1482,8 +1448,8 @@ func TestMethodCallTokens(t *testing.T) { Value: []byte(" "), }, } - lexer.Lex(lv) - actual = lv.Tkn.Hidden + tkn = lexer.Lex() + actual = tkn.Hidden assert.DeepEqual(t, expected, actual) expected = []token.Token{ @@ -1492,8 +1458,8 @@ func TestMethodCallTokens(t *testing.T) { Value: []byte(" "), }, } - lexer.Lex(lv) - actual = lv.Tkn.Hidden + tkn = lexer.Lex() + actual = tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1503,7 +1469,6 @@ func TestYieldFromTokens(t *testing.T) { lexer := NewLexer([]byte(src)) lexer.WithHiddenTokens = true - lv := &lval{} expected := []token.Token{ { @@ -1515,8 +1480,8 @@ func TestYieldFromTokens(t *testing.T) { Value: []byte("\n\t"), }, } - lexer.Lex(lv) - actual := lv.Tkn.Hidden + tkn := lexer.Lex() + actual := tkn.Hidden assert.DeepEqual(t, expected, actual) expected = []token.Token{ @@ -1525,8 +1490,8 @@ func TestYieldFromTokens(t *testing.T) { Value: []byte(" "), }, } - lexer.Lex(lv) - actual = lv.Tkn.Hidden + tkn = lexer.Lex() + actual = tkn.Hidden assert.DeepEqual(t, expected, actual) } @@ -1534,51 +1499,48 @@ func TestVarNameByteChars(t *testing.T) { src := "