[refactoring] remove scanner token
This commit is contained in:
parent
394092269a
commit
97747c5ac0
@ -12,7 +12,7 @@ import (
|
||||
// Parser structure
|
||||
type Parser struct {
|
||||
Lexer *scanner.Lexer
|
||||
currentToken *scanner.Token
|
||||
currentToken *token.Token
|
||||
rootNode ast.Vertex
|
||||
errHandlerFunc func(*errors.Error)
|
||||
}
|
||||
@ -40,8 +40,7 @@ func (p *Parser) Error(msg string) {
|
||||
return
|
||||
}
|
||||
|
||||
var pos = p.currentToken.Position
|
||||
p.errHandlerFunc(errors.NewError(msg, &pos))
|
||||
p.errHandlerFunc(errors.NewError(msg, p.currentToken.Position))
|
||||
}
|
||||
|
||||
// Parse the php7 Parser entrypoint
|
||||
@ -82,7 +81,7 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
|
||||
delete(src.GetNode().Tokens, token.Start)
|
||||
}
|
||||
|
||||
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) {
|
||||
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []*token.Token) {
|
||||
if len(tokens) == 0 {
|
||||
return
|
||||
}
|
||||
@ -98,7 +97,7 @@ func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []to
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) {
|
||||
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []*token.Token) {
|
||||
if len(tokens) == 0 {
|
||||
return
|
||||
}
|
||||
@ -108,14 +107,14 @@ func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, token
|
||||
*dstCollection = make(token.Collection)
|
||||
}
|
||||
|
||||
(*dstCollection)[pos] = make([]token.Token, 0)
|
||||
(*dstCollection)[pos] = make([]*token.Token, 0)
|
||||
|
||||
for _, v := range tokens {
|
||||
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) {
|
||||
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []*token.Token) {
|
||||
if len(tokens) == 0 {
|
||||
return
|
||||
}
|
||||
@ -141,7 +140,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
|
||||
}
|
||||
|
||||
if semiColon[0].Value[0] == ';' {
|
||||
p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{
|
||||
p.setFreeFloatingTokens(prevNode, token.SemiColon, []*token.Token{
|
||||
{
|
||||
ID: token.ID(';'),
|
||||
Value: semiColon[0].Value[0:1],
|
||||
@ -155,28 +154,18 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
|
||||
tlen = 3
|
||||
}
|
||||
|
||||
phpCloseTag := []token.Token{}
|
||||
phpCloseTag := []*token.Token{}
|
||||
if vlen-tlen > 1 {
|
||||
phpCloseTag = append(phpCloseTag, token.Token{
|
||||
phpCloseTag = append(phpCloseTag, &token.Token{
|
||||
ID: token.T_WHITESPACE,
|
||||
Value: semiColon[0].Value[1 : vlen-tlen],
|
||||
})
|
||||
}
|
||||
|
||||
phpCloseTag = append(phpCloseTag, token.Token{
|
||||
phpCloseTag = append(phpCloseTag, &token.Token{
|
||||
ID: T_CLOSE_TAG,
|
||||
Value: semiColon[0].Value[vlen-tlen:],
|
||||
})
|
||||
|
||||
p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
|
||||
}
|
||||
|
||||
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {
|
||||
for i := 1; i < len(yyDollar); i++ {
|
||||
if yyDollar[i].token != nil {
|
||||
p.Lexer.ReturnTokenToPool(yyDollar[i].token)
|
||||
}
|
||||
yyDollar[i].token = nil
|
||||
}
|
||||
yyVAL.token = nil
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
BIN
internal/php5/php5.go
generated
BIN
internal/php5/php5.go
generated
Binary file not shown.
2308
internal/php5/php5.y
2308
internal/php5/php5.y
File diff suppressed because it is too large
Load Diff
@ -414,7 +414,7 @@ CAD;
|
||||
`
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
||||
php5parser := php5.NewParser(lexer, nil)
|
||||
php5parser.Parse()
|
||||
}
|
||||
|
@ -22212,11 +22212,12 @@ func TestPhp5(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
||||
php5parser := php5.NewParser(lexer, nil)
|
||||
php5parser.Parse()
|
||||
actual := php5parser.GetRootNode()
|
||||
traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual)
|
||||
traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual)
|
||||
assert.DeepEqual(t, expected, actual)
|
||||
}
|
||||
|
||||
@ -22351,11 +22352,12 @@ func TestPhp5Strings(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
||||
php5parser := php5.NewParser(lexer, nil)
|
||||
php5parser.Parse()
|
||||
actual := php5parser.GetRootNode()
|
||||
traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual)
|
||||
traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual)
|
||||
assert.DeepEqual(t, expected, actual)
|
||||
}
|
||||
|
||||
@ -22579,11 +22581,12 @@ CAD;
|
||||
},
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", false, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
||||
php5parser := php5.NewParser(lexer, nil)
|
||||
php5parser.Parse()
|
||||
actual := php5parser.GetRootNode()
|
||||
traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual)
|
||||
traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual)
|
||||
assert.DeepEqual(t, expected, actual)
|
||||
}
|
||||
|
||||
@ -22606,7 +22609,7 @@ func TestPhp5ControlCharsErrors(t *testing.T) {
|
||||
parserErrors = append(parserErrors, e)
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", false, errorHandlerFunc)
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", errorHandlerFunc)
|
||||
php5parser := php5.NewParser(lexer, errorHandlerFunc)
|
||||
php5parser.Parse()
|
||||
assert.DeepEqual(t, expected, parserErrors)
|
||||
|
@ -12,7 +12,7 @@ import (
|
||||
// Parser structure
|
||||
type Parser struct {
|
||||
Lexer *scanner.Lexer
|
||||
currentToken *scanner.Token
|
||||
currentToken *token.Token
|
||||
rootNode ast.Vertex
|
||||
errHandlerFunc func(*errors.Error)
|
||||
}
|
||||
@ -39,8 +39,7 @@ func (p *Parser) Error(msg string) {
|
||||
return
|
||||
}
|
||||
|
||||
var pos = p.currentToken.Position
|
||||
p.errHandlerFunc(errors.NewError(msg, &pos))
|
||||
p.errHandlerFunc(errors.NewError(msg, p.currentToken.Position))
|
||||
}
|
||||
|
||||
// Parse the php7 Parser entrypoint
|
||||
@ -82,7 +81,7 @@ func (p *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
|
||||
delete(src.GetNode().Tokens, token.Start)
|
||||
}
|
||||
|
||||
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []token.Token) {
|
||||
func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []*token.Token) {
|
||||
if len(tokens) == 0 {
|
||||
return
|
||||
}
|
||||
@ -98,7 +97,7 @@ func (p *Parser) setFreeFloating(dst ast.Vertex, pos token.Position, tokens []to
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []token.Token) {
|
||||
func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, tokens []*token.Token) {
|
||||
if len(tokens) == 0 {
|
||||
return
|
||||
}
|
||||
@ -108,14 +107,14 @@ func (p *Parser) setFreeFloatingTokens(dst ast.Vertex, pos token.Position, token
|
||||
*dstCollection = make(token.Collection)
|
||||
}
|
||||
|
||||
(*dstCollection)[pos] = make([]token.Token, 0)
|
||||
(*dstCollection)[pos] = make([]*token.Token, 0)
|
||||
|
||||
for _, v := range tokens {
|
||||
(*dstCollection)[pos] = append((*dstCollection)[pos], v)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []token.Token) {
|
||||
func (p *Parser) setToken(dst ast.Vertex, pos token.Position, tokens []*token.Token) {
|
||||
if len(tokens) == 0 {
|
||||
return
|
||||
}
|
||||
@ -141,7 +140,7 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
|
||||
}
|
||||
|
||||
if semiColon[0].Value[0] == ';' {
|
||||
p.setFreeFloatingTokens(prevNode, token.SemiColon, []token.Token{
|
||||
p.setFreeFloatingTokens(prevNode, token.SemiColon, []*token.Token{
|
||||
{
|
||||
ID: token.ID(';'),
|
||||
Value: semiColon[0].Value[0:1],
|
||||
@ -155,28 +154,18 @@ func (p *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.
|
||||
tlen = 3
|
||||
}
|
||||
|
||||
phpCloseTag := []token.Token{}
|
||||
phpCloseTag := []*token.Token{}
|
||||
if vlen-tlen > 1 {
|
||||
phpCloseTag = append(phpCloseTag, token.Token{
|
||||
phpCloseTag = append(phpCloseTag, &token.Token{
|
||||
ID: token.T_WHITESPACE,
|
||||
Value: semiColon[0].Value[1 : vlen-tlen],
|
||||
})
|
||||
}
|
||||
|
||||
phpCloseTag = append(phpCloseTag, token.Token{
|
||||
phpCloseTag = append(phpCloseTag, &token.Token{
|
||||
ID: T_CLOSE_TAG,
|
||||
Value: semiColon[0].Value[vlen-tlen:],
|
||||
})
|
||||
|
||||
p.setFreeFloatingTokens(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
|
||||
}
|
||||
|
||||
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {
|
||||
for i := 1; i < len(yyDollar); i++ {
|
||||
if yyDollar[i].token != nil {
|
||||
p.Lexer.ReturnTokenToPool(yyDollar[i].token)
|
||||
}
|
||||
yyDollar[i].token = nil
|
||||
}
|
||||
yyVAL.token = nil
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
BIN
internal/php7/php7.go
generated
BIN
internal/php7/php7.go
generated
Binary file not shown.
1903
internal/php7/php7.y
1903
internal/php7/php7.y
File diff suppressed because it is too large
Load Diff
@ -382,7 +382,7 @@ CAD;
|
||||
`
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
||||
php7parser := php7.NewParser(lexer, nil)
|
||||
php7parser.Parse()
|
||||
}
|
||||
|
@ -19595,11 +19595,12 @@ func TestPhp7(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer(src, "7.4", false, nil)
|
||||
lexer := scanner.NewLexer(src, "7.4", nil)
|
||||
php7parser := php7.NewParser(lexer, nil)
|
||||
php7parser.Parse()
|
||||
actual := php7parser.GetRootNode()
|
||||
traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual)
|
||||
traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual)
|
||||
assert.DeepEqual(t, expected, actual)
|
||||
}
|
||||
|
||||
@ -19734,11 +19735,12 @@ func TestPhp5Strings(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
||||
php7parser := php7.NewParser(lexer, nil)
|
||||
php7parser.Parse()
|
||||
actual := php7parser.GetRootNode()
|
||||
traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual)
|
||||
traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual)
|
||||
assert.DeepEqual(t, expected, actual)
|
||||
}
|
||||
|
||||
@ -19962,11 +19964,12 @@ CAD;
|
||||
},
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", false, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
||||
php7parser := php7.NewParser(lexer, nil)
|
||||
php7parser.Parse()
|
||||
actual := php7parser.GetRootNode()
|
||||
traverser.NewDFS(new(visitor.FilterParserNodes)).Traverse(actual)
|
||||
traverser.NewDFS(new(visitor.FilterTokens)).Traverse(actual)
|
||||
assert.DeepEqual(t, expected, actual)
|
||||
}
|
||||
|
||||
@ -19989,7 +19992,7 @@ func TestPhp7ControlCharsErrors(t *testing.T) {
|
||||
parserErrors = append(parserErrors, e)
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", false, errorHandlerFunc)
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", errorHandlerFunc)
|
||||
php7parser := php7.NewParser(lexer, errorHandlerFunc)
|
||||
php7parser.Parse()
|
||||
assert.DeepEqual(t, expected, parserErrors)
|
||||
|
@ -1,9 +1,9 @@
|
||||
package position
|
||||
|
||||
import (
|
||||
"github.com/z7zmey/php-parser/internal/scanner"
|
||||
"github.com/z7zmey/php-parser/pkg/ast"
|
||||
"github.com/z7zmey/php-parser/pkg/position"
|
||||
"github.com/z7zmey/php-parser/pkg/token"
|
||||
)
|
||||
|
||||
type startPos struct {
|
||||
@ -95,7 +95,7 @@ func NewNodePosition(n ast.Vertex) *position.Position {
|
||||
}
|
||||
|
||||
// NewTokenPosition returns new Position
|
||||
func NewTokenPosition(t *scanner.Token) *position.Position {
|
||||
func NewTokenPosition(t *token.Token) *position.Position {
|
||||
return &position.Position{
|
||||
StartLine: t.Position.StartLine,
|
||||
EndLine: t.Position.EndLine,
|
||||
@ -105,7 +105,7 @@ func NewTokenPosition(t *scanner.Token) *position.Position {
|
||||
}
|
||||
|
||||
// NewTokensPosition returns new Position
|
||||
func NewTokensPosition(startToken *scanner.Token, endToken *scanner.Token) *position.Position {
|
||||
func NewTokensPosition(startToken *token.Token, endToken *token.Token) *position.Position {
|
||||
return &position.Position{
|
||||
StartLine: startToken.Position.StartLine,
|
||||
EndLine: endToken.Position.EndLine,
|
||||
@ -115,7 +115,7 @@ func NewTokensPosition(startToken *scanner.Token, endToken *scanner.Token) *posi
|
||||
}
|
||||
|
||||
// NewTokenNodePosition returns new Position
|
||||
func NewTokenNodePosition(t *scanner.Token, n ast.Vertex) *position.Position {
|
||||
func NewTokenNodePosition(t *token.Token, n ast.Vertex) *position.Position {
|
||||
return &position.Position{
|
||||
StartLine: t.Position.StartLine,
|
||||
EndLine: getNodeEndPos(n).endLine,
|
||||
@ -125,7 +125,7 @@ func NewTokenNodePosition(t *scanner.Token, n ast.Vertex) *position.Position {
|
||||
}
|
||||
|
||||
// NewNodeTokenPosition returns new Position
|
||||
func NewNodeTokenPosition(n ast.Vertex, t *scanner.Token) *position.Position {
|
||||
func NewNodeTokenPosition(n ast.Vertex, t *token.Token) *position.Position {
|
||||
return &position.Position{
|
||||
StartLine: getNodeStartPos(n).startLine,
|
||||
EndLine: t.Position.EndLine,
|
||||
@ -145,7 +145,7 @@ func NewNodesPosition(startNode ast.Vertex, endNode ast.Vertex) *position.Positi
|
||||
}
|
||||
|
||||
// NewNodeListTokenPosition returns new Position
|
||||
func NewNodeListTokenPosition(list []ast.Vertex, t *scanner.Token) *position.Position {
|
||||
func NewNodeListTokenPosition(list []ast.Vertex, t *token.Token) *position.Position {
|
||||
return &position.Position{
|
||||
StartLine: getListStartPos(list).startLine,
|
||||
EndLine: t.Position.EndLine,
|
||||
@ -155,7 +155,7 @@ func NewNodeListTokenPosition(list []ast.Vertex, t *scanner.Token) *position.Pos
|
||||
}
|
||||
|
||||
// NewTokenNodeListPosition returns new Position
|
||||
func NewTokenNodeListPosition(t *scanner.Token, list []ast.Vertex) *position.Position {
|
||||
func NewTokenNodeListPosition(t *token.Token, list []ast.Vertex) *position.Position {
|
||||
return &position.Position{
|
||||
StartLine: t.Position.StartLine,
|
||||
EndLine: getListEndPos(list).endLine,
|
||||
@ -185,7 +185,7 @@ func NewNodeListNodePosition(list []ast.Vertex, n ast.Vertex) *position.Position
|
||||
}
|
||||
|
||||
// NewOptionalListTokensPosition returns new Position
|
||||
func NewOptionalListTokensPosition(list []ast.Vertex, t *scanner.Token, endToken *scanner.Token) *position.Position {
|
||||
func NewOptionalListTokensPosition(list []ast.Vertex, t *token.Token, endToken *token.Token) *position.Position {
|
||||
if list == nil {
|
||||
return &position.Position{
|
||||
StartLine: t.Position.StartLine,
|
||||
|
@ -5,15 +5,15 @@ import (
|
||||
"testing"
|
||||
|
||||
builder "github.com/z7zmey/php-parser/internal/position"
|
||||
"github.com/z7zmey/php-parser/internal/scanner"
|
||||
"github.com/z7zmey/php-parser/pkg/ast"
|
||||
"github.com/z7zmey/php-parser/pkg/position"
|
||||
"github.com/z7zmey/php-parser/pkg/token"
|
||||
)
|
||||
|
||||
func TestNewTokenPosition(t *testing.T) {
|
||||
tkn := &scanner.Token{
|
||||
tkn := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartPos: 0,
|
||||
@ -29,18 +29,18 @@ func TestNewTokenPosition(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNewTokensPosition(t *testing.T) {
|
||||
token1 := &scanner.Token{
|
||||
token1 := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartPos: 0,
|
||||
EndPos: 3,
|
||||
},
|
||||
}
|
||||
token2 := &scanner.Token{
|
||||
token2 := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 2,
|
||||
EndLine: 2,
|
||||
StartPos: 4,
|
||||
@ -71,9 +71,9 @@ func TestNewNodePosition(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNewTokenNodePosition(t *testing.T) {
|
||||
tkn := &scanner.Token{
|
||||
tkn := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartPos: 0,
|
||||
@ -108,9 +108,9 @@ func TestNewNodeTokenPosition(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
tkn := &scanner.Token{
|
||||
tkn := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 2,
|
||||
EndLine: 2,
|
||||
StartPos: 10,
|
||||
@ -202,9 +202,9 @@ func TestNewNodeListTokenPosition(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
tkn := &scanner.Token{
|
||||
tkn := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 3,
|
||||
EndLine: 3,
|
||||
StartPos: 20,
|
||||
@ -218,9 +218,9 @@ func TestNewNodeListTokenPosition(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNewTokenNodeListPosition(t *testing.T) {
|
||||
tkn := &scanner.Token{
|
||||
tkn := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartPos: 0,
|
||||
@ -332,18 +332,18 @@ func TestNewNodeListNodePosition(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNewOptionalListTokensPosition(t *testing.T) {
|
||||
token1 := &scanner.Token{
|
||||
token1 := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartPos: 0,
|
||||
EndPos: 3,
|
||||
},
|
||||
}
|
||||
token2 := &scanner.Token{
|
||||
token2 := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 2,
|
||||
EndLine: 2,
|
||||
StartPos: 4,
|
||||
@ -378,18 +378,18 @@ func TestNewOptionalListTokensPosition2(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
token1 := &scanner.Token{
|
||||
token1 := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 4,
|
||||
EndLine: 4,
|
||||
StartPos: 27,
|
||||
EndPos: 29,
|
||||
},
|
||||
}
|
||||
token2 := &scanner.Token{
|
||||
token2 := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 5,
|
||||
EndLine: 5,
|
||||
StartPos: 30,
|
||||
@ -426,9 +426,9 @@ func TestNilNodeListPos(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNilNodeListTokenPos(t *testing.T) {
|
||||
token := &scanner.Token{
|
||||
token := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartPos: 0,
|
||||
@ -459,9 +459,9 @@ func TestEmptyNodeListPos(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestEmptyNodeListTokenPos(t *testing.T) {
|
||||
token := &scanner.Token{
|
||||
token := &token.Token{
|
||||
Value: []byte(`foo`),
|
||||
Position: position.Position{
|
||||
Position: &position.Position{
|
||||
StartLine: 1,
|
||||
EndLine: 1,
|
||||
StartPos: 0,
|
||||
|
@ -13,30 +13,31 @@ import (
|
||||
type Lexer struct {
|
||||
data []byte
|
||||
phpVersion string
|
||||
withTokens bool
|
||||
errHandlerFunc func(*errors.Error)
|
||||
|
||||
sts, ste int
|
||||
p, pe, cs int
|
||||
ts, te, act int
|
||||
stack []int
|
||||
top int
|
||||
|
||||
heredocLabel []byte
|
||||
tokenPool *TokenPool
|
||||
tokenPool *token.Pool
|
||||
positionPool *position.Pool
|
||||
newLines NewLines
|
||||
}
|
||||
|
||||
func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc func(*errors.Error)) *Lexer {
|
||||
func NewLexer(data []byte, phpVersion string, errHandlerFunc func(*errors.Error)) *Lexer {
|
||||
lex := &Lexer{
|
||||
data: data,
|
||||
phpVersion: phpVersion,
|
||||
withTokens: withTokens,
|
||||
errHandlerFunc: errHandlerFunc,
|
||||
|
||||
pe: len(data),
|
||||
stack: make([]int, 0),
|
||||
|
||||
tokenPool: &TokenPool{},
|
||||
tokenPool: token.NewPool(position.DefaultBlockSize),
|
||||
positionPool: position.NewPool(position.DefaultBlockSize),
|
||||
newLines: NewLines{make([]int, 0, 128)},
|
||||
}
|
||||
|
||||
@ -45,26 +46,37 @@ func NewLexer(data []byte, phpVersion string, withTokens bool, errHandlerFunc fu
|
||||
return lex
|
||||
}
|
||||
|
||||
func (lex *Lexer) ReturnTokenToPool(t *Token) {
|
||||
lex.tokenPool.Put(t)
|
||||
func (lex *Lexer) setTokenPosition(token *token.Token) {
|
||||
pos := lex.positionPool.Get()
|
||||
|
||||
pos.StartLine = lex.newLines.GetLine(lex.ts)
|
||||
pos.EndLine = lex.newLines.GetLine(lex.te - 1)
|
||||
pos.StartPos = lex.ts
|
||||
pos.EndPos = lex.te
|
||||
|
||||
token.Position = pos
|
||||
}
|
||||
|
||||
func (lex *Lexer) setTokenPosition(token *Token) {
|
||||
token.Position.StartLine = lex.newLines.GetLine(lex.ts)
|
||||
token.Position.EndLine = lex.newLines.GetLine(lex.te - 1)
|
||||
token.Position.StartPos = lex.ts
|
||||
token.Position.EndPos = lex.te
|
||||
func (lex *Lexer) addSkippedToken(t *token.Token, id token.ID, ps, pe int) {
|
||||
if lex.sts == 0 {
|
||||
lex.sts = lex.ts
|
||||
}
|
||||
|
||||
func (lex *Lexer) addHiddenToken(t *Token, id TokenID, ps, pe int) {
|
||||
if !lex.withTokens {
|
||||
return
|
||||
lex.ste = lex.te
|
||||
|
||||
// TODO remove after parser refactoring
|
||||
|
||||
skippedTkn := lex.tokenPool.Get()
|
||||
skippedTkn.ID = id
|
||||
skippedTkn.Value = lex.data[ps:pe]
|
||||
|
||||
lex.setTokenPosition(skippedTkn)
|
||||
|
||||
if t.SkippedTokens == nil {
|
||||
t.SkippedTokens = make([]*token.Token, 0, 2)
|
||||
}
|
||||
|
||||
t.Tokens = append(t.Tokens, token.Token{
|
||||
ID: token.ID(id),
|
||||
Value: lex.data[ps:pe],
|
||||
})
|
||||
t.SkippedTokens = append(t.SkippedTokens, skippedTkn)
|
||||
}
|
||||
|
||||
func (lex *Lexer) isNotStringVar() bool {
|
||||
|
@ -1,145 +0,0 @@
|
||||
package scanner
|
||||
|
||||
type TokenID int
|
||||
|
||||
//go:generate stringer -type=TokenID -output ./tokenid_string.go
|
||||
const (
|
||||
T_INCLUDE TokenID = iota + 57346
|
||||
T_INCLUDE_ONCE
|
||||
T_EXIT
|
||||
T_IF
|
||||
T_LNUMBER
|
||||
T_DNUMBER
|
||||
T_STRING
|
||||
T_STRING_VARNAME
|
||||
T_VARIABLE
|
||||
T_NUM_STRING
|
||||
T_INLINE_HTML
|
||||
T_CHARACTER
|
||||
T_BAD_CHARACTER
|
||||
T_ENCAPSED_AND_WHITESPACE
|
||||
T_CONSTANT_ENCAPSED_STRING
|
||||
T_ECHO
|
||||
T_DO
|
||||
T_WHILE
|
||||
T_ENDWHILE
|
||||
T_FOR
|
||||
T_ENDFOR
|
||||
T_FOREACH
|
||||
T_ENDFOREACH
|
||||
T_DECLARE
|
||||
T_ENDDECLARE
|
||||
T_AS
|
||||
T_SWITCH
|
||||
T_ENDSWITCH
|
||||
T_CASE
|
||||
T_DEFAULT
|
||||
T_BREAK
|
||||
T_CONTINUE
|
||||
T_GOTO
|
||||
T_FUNCTION
|
||||
T_FN
|
||||
T_CONST
|
||||
T_RETURN
|
||||
T_TRY
|
||||
T_CATCH
|
||||
T_FINALLY
|
||||
T_THROW
|
||||
T_USE
|
||||
T_INSTEADOF
|
||||
T_GLOBAL
|
||||
T_VAR
|
||||
T_UNSET
|
||||
T_ISSET
|
||||
T_EMPTY
|
||||
T_HALT_COMPILER
|
||||
T_CLASS
|
||||
T_TRAIT
|
||||
T_INTERFACE
|
||||
T_EXTENDS
|
||||
T_IMPLEMENTS
|
||||
T_OBJECT_OPERATOR
|
||||
T_DOUBLE_ARROW
|
||||
T_LIST
|
||||
T_ARRAY
|
||||
T_CALLABLE
|
||||
T_CLASS_C
|
||||
T_TRAIT_C
|
||||
T_METHOD_C
|
||||
T_FUNC_C
|
||||
T_LINE
|
||||
T_FILE
|
||||
T_COMMENT
|
||||
T_DOC_COMMENT
|
||||
T_OPEN_TAG
|
||||
T_OPEN_TAG_WITH_ECHO
|
||||
T_CLOSE_TAG
|
||||
T_WHITESPACE
|
||||
T_START_HEREDOC
|
||||
T_END_HEREDOC
|
||||
T_DOLLAR_OPEN_CURLY_BRACES
|
||||
T_CURLY_OPEN
|
||||
T_PAAMAYIM_NEKUDOTAYIM
|
||||
T_NAMESPACE
|
||||
T_NS_C
|
||||
T_DIR
|
||||
T_NS_SEPARATOR
|
||||
T_ELLIPSIS
|
||||
T_EVAL
|
||||
T_REQUIRE
|
||||
T_REQUIRE_ONCE
|
||||
T_LOGICAL_OR
|
||||
T_LOGICAL_XOR
|
||||
T_LOGICAL_AND
|
||||
T_INSTANCEOF
|
||||
T_NEW
|
||||
T_CLONE
|
||||
T_ELSEIF
|
||||
T_ELSE
|
||||
T_ENDIF
|
||||
T_PRINT
|
||||
T_YIELD
|
||||
T_STATIC
|
||||
T_ABSTRACT
|
||||
T_FINAL
|
||||
T_PRIVATE
|
||||
T_PROTECTED
|
||||
T_PUBLIC
|
||||
T_INC
|
||||
T_DEC
|
||||
T_YIELD_FROM
|
||||
T_INT_CAST
|
||||
T_DOUBLE_CAST
|
||||
T_STRING_CAST
|
||||
T_ARRAY_CAST
|
||||
T_OBJECT_CAST
|
||||
T_BOOL_CAST
|
||||
T_UNSET_CAST
|
||||
T_COALESCE
|
||||
T_SPACESHIP
|
||||
T_NOELSE
|
||||
T_PLUS_EQUAL
|
||||
T_MINUS_EQUAL
|
||||
T_MUL_EQUAL
|
||||
T_POW_EQUAL
|
||||
T_DIV_EQUAL
|
||||
T_CONCAT_EQUAL
|
||||
T_MOD_EQUAL
|
||||
T_AND_EQUAL
|
||||
T_OR_EQUAL
|
||||
T_XOR_EQUAL
|
||||
T_SL_EQUAL
|
||||
T_SR_EQUAL
|
||||
T_COALESCE_EQUAL
|
||||
T_BOOLEAN_OR
|
||||
T_BOOLEAN_AND
|
||||
T_POW
|
||||
T_SL
|
||||
T_SR
|
||||
T_IS_IDENTICAL
|
||||
T_IS_NOT_IDENTICAL
|
||||
T_IS_EQUAL
|
||||
T_IS_NOT_EQUAL
|
||||
T_IS_SMALLER_OR_EQUAL
|
||||
T_IS_GREATER_OR_EQUAL
|
||||
)
|
BIN
internal/scanner/scanner.go
generated
BIN
internal/scanner/scanner.go
generated
Binary file not shown.
@ -4,6 +4,8 @@ import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/z7zmey/php-parser/pkg/token"
|
||||
)
|
||||
|
||||
%%{
|
||||
@ -18,13 +20,14 @@ func initLexer(lex *Lexer) {
|
||||
%% write init;
|
||||
}
|
||||
|
||||
func (lex *Lexer) Lex() *Token {
|
||||
func (lex *Lexer) Lex() *token.Token {
|
||||
eof := lex.pe
|
||||
var tok TokenID
|
||||
var tok token.ID
|
||||
|
||||
token := lex.tokenPool.Get()
|
||||
token.Tokens = token.Tokens[:0]
|
||||
token.Value = lex.data[0:0]
|
||||
tkn := lex.tokenPool.Get()
|
||||
|
||||
lex.sts = 0
|
||||
lex.ste = 0
|
||||
|
||||
lblStart := 0
|
||||
lblEnd := 0
|
||||
@ -124,7 +127,7 @@ func (lex *Lexer) Lex() *Token {
|
||||
|
||||
main := |*
|
||||
"#!" any* :>> newline => {
|
||||
lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
|
||||
lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te)
|
||||
};
|
||||
any => {
|
||||
fnext html;
|
||||
@ -135,42 +138,42 @@ func (lex *Lexer) Lex() *Token {
|
||||
html := |*
|
||||
any_line+ -- '<?' => {
|
||||
lex.ungetStr("<")
|
||||
lex.setTokenPosition(token)
|
||||
tok = T_INLINE_HTML;
|
||||
lex.setTokenPosition(tkn)
|
||||
tok = token.T_INLINE_HTML;
|
||||
fbreak;
|
||||
};
|
||||
'<?' => {
|
||||
lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.te)
|
||||
lex.addSkippedToken(tkn, token.T_OPEN_TAG, lex.ts, lex.te)
|
||||
fnext php;
|
||||
};
|
||||
'<?php'i ( [ \t] | newline ) => {
|
||||
lex.ungetCnt(lex.te - lex.ts - 5)
|
||||
lex.addHiddenToken(token, T_OPEN_TAG, lex.ts, lex.ts+5)
|
||||
lex.addSkippedToken(tkn, token.T_OPEN_TAG, lex.ts, lex.ts+5)
|
||||
fnext php;
|
||||
};
|
||||
'<?='i => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_ECHO;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_ECHO;
|
||||
fnext php;
|
||||
fbreak;
|
||||
};
|
||||
*|;
|
||||
|
||||
php := |*
|
||||
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
|
||||
'?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
|
||||
';' whitespace_line* '?>' newline? => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext html; fbreak;};
|
||||
whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
|
||||
'?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;};
|
||||
';' whitespace_line* '?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;};
|
||||
|
||||
(dnum | exponent_dnum) => {lex.setTokenPosition(token); tok = T_DNUMBER; fbreak;};
|
||||
(dnum | exponent_dnum) => {lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;};
|
||||
bnum => {
|
||||
s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1)
|
||||
_, err := strconv.ParseInt(s, 2, 0)
|
||||
|
||||
if err == nil {
|
||||
lex.setTokenPosition(token); tok = T_LNUMBER; fbreak;
|
||||
lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak;
|
||||
}
|
||||
|
||||
lex.setTokenPosition(token); tok = T_DNUMBER; fbreak;
|
||||
lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;
|
||||
};
|
||||
lnum => {
|
||||
base := 10
|
||||
@ -182,142 +185,142 @@ func (lex *Lexer) Lex() *Token {
|
||||
_, err := strconv.ParseInt(s, base, 0)
|
||||
|
||||
if err == nil {
|
||||
lex.setTokenPosition(token); tok = T_LNUMBER; fbreak;
|
||||
lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak;
|
||||
}
|
||||
|
||||
lex.setTokenPosition(token); tok = T_DNUMBER; fbreak;
|
||||
lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;
|
||||
};
|
||||
hnum => {
|
||||
s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1)
|
||||
_, err := strconv.ParseInt(s, 16, 0)
|
||||
|
||||
if err == nil {
|
||||
lex.setTokenPosition(token); tok = T_LNUMBER; fbreak;
|
||||
lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak;
|
||||
}
|
||||
|
||||
lex.setTokenPosition(token); tok = T_DNUMBER; fbreak;
|
||||
lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;
|
||||
};
|
||||
|
||||
'abstract'i => {lex.setTokenPosition(token); tok = T_ABSTRACT; fbreak;};
|
||||
'array'i => {lex.setTokenPosition(token); tok = T_ARRAY; fbreak;};
|
||||
'as'i => {lex.setTokenPosition(token); tok = T_AS; fbreak;};
|
||||
'break'i => {lex.setTokenPosition(token); tok = T_BREAK; fbreak;};
|
||||
'callable'i => {lex.setTokenPosition(token); tok = T_CALLABLE; fbreak;};
|
||||
'case'i => {lex.setTokenPosition(token); tok = T_CASE; fbreak;};
|
||||
'catch'i => {lex.setTokenPosition(token); tok = T_CATCH; fbreak;};
|
||||
'class'i => {lex.setTokenPosition(token); tok = T_CLASS; fbreak;};
|
||||
'clone'i => {lex.setTokenPosition(token); tok = T_CLONE; fbreak;};
|
||||
'const'i => {lex.setTokenPosition(token); tok = T_CONST; fbreak;};
|
||||
'continue'i => {lex.setTokenPosition(token); tok = T_CONTINUE; fbreak;};
|
||||
'declare'i => {lex.setTokenPosition(token); tok = T_DECLARE; fbreak;};
|
||||
'default'i => {lex.setTokenPosition(token); tok = T_DEFAULT; fbreak;};
|
||||
'do'i => {lex.setTokenPosition(token); tok = T_DO; fbreak;};
|
||||
'echo'i => {lex.setTokenPosition(token); tok = T_ECHO; fbreak;};
|
||||
'else'i => {lex.setTokenPosition(token); tok = T_ELSE; fbreak;};
|
||||
'elseif'i => {lex.setTokenPosition(token); tok = T_ELSEIF; fbreak;};
|
||||
'empty'i => {lex.setTokenPosition(token); tok = T_EMPTY; fbreak;};
|
||||
'enddeclare'i => {lex.setTokenPosition(token); tok = T_ENDDECLARE; fbreak;};
|
||||
'endfor'i => {lex.setTokenPosition(token); tok = T_ENDFOR; fbreak;};
|
||||
'endforeach'i => {lex.setTokenPosition(token); tok = T_ENDFOREACH; fbreak;};
|
||||
'endif'i => {lex.setTokenPosition(token); tok = T_ENDIF; fbreak;};
|
||||
'endswitch'i => {lex.setTokenPosition(token); tok = T_ENDSWITCH; fbreak;};
|
||||
'endwhile'i => {lex.setTokenPosition(token); tok = T_ENDWHILE; fbreak;};
|
||||
'eval'i => {lex.setTokenPosition(token); tok = T_EVAL; fbreak;};
|
||||
'exit'i | 'die'i => {lex.setTokenPosition(token); tok = T_EXIT; fbreak;};
|
||||
'extends'i => {lex.setTokenPosition(token); tok = T_EXTENDS; fbreak;};
|
||||
'final'i => {lex.setTokenPosition(token); tok = T_FINAL; fbreak;};
|
||||
'finally'i => {lex.setTokenPosition(token); tok = T_FINALLY; fbreak;};
|
||||
'for'i => {lex.setTokenPosition(token); tok = T_FOR; fbreak;};
|
||||
'foreach'i => {lex.setTokenPosition(token); tok = T_FOREACH; fbreak;};
|
||||
'function'i | 'cfunction'i => {lex.setTokenPosition(token); tok = T_FUNCTION; fbreak;};
|
||||
'fn'i => {lex.setTokenPosition(token); tok = T_FN; fbreak;};
|
||||
'global'i => {lex.setTokenPosition(token); tok = T_GLOBAL; fbreak;};
|
||||
'goto'i => {lex.setTokenPosition(token); tok = T_GOTO; fbreak;};
|
||||
'if'i => {lex.setTokenPosition(token); tok = T_IF; fbreak;};
|
||||
'isset'i => {lex.setTokenPosition(token); tok = T_ISSET; fbreak;};
|
||||
'implements'i => {lex.setTokenPosition(token); tok = T_IMPLEMENTS; fbreak;};
|
||||
'instanceof'i => {lex.setTokenPosition(token); tok = T_INSTANCEOF; fbreak;};
|
||||
'insteadof'i => {lex.setTokenPosition(token); tok = T_INSTEADOF; fbreak;};
|
||||
'interface'i => {lex.setTokenPosition(token); tok = T_INTERFACE; fbreak;};
|
||||
'list'i => {lex.setTokenPosition(token); tok = T_LIST; fbreak;};
|
||||
'namespace'i => {lex.setTokenPosition(token); tok = T_NAMESPACE; fbreak;};
|
||||
'private'i => {lex.setTokenPosition(token); tok = T_PRIVATE; fbreak;};
|
||||
'public'i => {lex.setTokenPosition(token); tok = T_PUBLIC; fbreak;};
|
||||
'print'i => {lex.setTokenPosition(token); tok = T_PRINT; fbreak;};
|
||||
'protected'i => {lex.setTokenPosition(token); tok = T_PROTECTED; fbreak;};
|
||||
'return'i => {lex.setTokenPosition(token); tok = T_RETURN; fbreak;};
|
||||
'static'i => {lex.setTokenPosition(token); tok = T_STATIC; fbreak;};
|
||||
'switch'i => {lex.setTokenPosition(token); tok = T_SWITCH; fbreak;};
|
||||
'throw'i => {lex.setTokenPosition(token); tok = T_THROW; fbreak;};
|
||||
'trait'i => {lex.setTokenPosition(token); tok = T_TRAIT; fbreak;};
|
||||
'try'i => {lex.setTokenPosition(token); tok = T_TRY; fbreak;};
|
||||
'unset'i => {lex.setTokenPosition(token); tok = T_UNSET; fbreak;};
|
||||
'use'i => {lex.setTokenPosition(token); tok = T_USE; fbreak;};
|
||||
'var'i => {lex.setTokenPosition(token); tok = T_VAR; fbreak;};
|
||||
'while'i => {lex.setTokenPosition(token); tok = T_WHILE; fbreak;};
|
||||
'yield'i whitespace_line* 'from'i => {lex.setTokenPosition(token); tok = T_YIELD_FROM; fbreak;};
|
||||
'yield'i => {lex.setTokenPosition(token); tok = T_YIELD; fbreak;};
|
||||
'include'i => {lex.setTokenPosition(token); tok = T_INCLUDE; fbreak;};
|
||||
'include_once'i => {lex.setTokenPosition(token); tok = T_INCLUDE_ONCE; fbreak;};
|
||||
'require'i => {lex.setTokenPosition(token); tok = T_REQUIRE; fbreak;};
|
||||
'require_once'i => {lex.setTokenPosition(token); tok = T_REQUIRE_ONCE; fbreak;};
|
||||
'__CLASS__'i => {lex.setTokenPosition(token); tok = T_CLASS_C; fbreak;};
|
||||
'__DIR__'i => {lex.setTokenPosition(token); tok = T_DIR; fbreak;};
|
||||
'__FILE__'i => {lex.setTokenPosition(token); tok = T_FILE; fbreak;};
|
||||
'__FUNCTION__'i => {lex.setTokenPosition(token); tok = T_FUNC_C; fbreak;};
|
||||
'__LINE__'i => {lex.setTokenPosition(token); tok = T_LINE; fbreak;};
|
||||
'__NAMESPACE__'i => {lex.setTokenPosition(token); tok = T_NS_C; fbreak;};
|
||||
'__METHOD__'i => {lex.setTokenPosition(token); tok = T_METHOD_C; fbreak;};
|
||||
'__TRAIT__'i => {lex.setTokenPosition(token); tok = T_TRAIT_C; fbreak;};
|
||||
'__halt_compiler'i => {lex.setTokenPosition(token); tok = T_HALT_COMPILER; fnext halt_compiller_open_parenthesis; fbreak;};
|
||||
'new'i => {lex.setTokenPosition(token); tok = T_NEW; fbreak;};
|
||||
'and'i => {lex.setTokenPosition(token); tok = T_LOGICAL_AND; fbreak;};
|
||||
'or'i => {lex.setTokenPosition(token); tok = T_LOGICAL_OR; fbreak;};
|
||||
'xor'i => {lex.setTokenPosition(token); tok = T_LOGICAL_XOR; fbreak;};
|
||||
'\\' => {lex.setTokenPosition(token); tok = T_NS_SEPARATOR; fbreak;};
|
||||
'...' => {lex.setTokenPosition(token); tok = T_ELLIPSIS; fbreak;};
|
||||
'::' => {lex.setTokenPosition(token); tok = T_PAAMAYIM_NEKUDOTAYIM; fbreak;};
|
||||
'&&' => {lex.setTokenPosition(token); tok = T_BOOLEAN_AND; fbreak;};
|
||||
'||' => {lex.setTokenPosition(token); tok = T_BOOLEAN_OR; fbreak;};
|
||||
'&=' => {lex.setTokenPosition(token); tok = T_AND_EQUAL; fbreak;};
|
||||
'|=' => {lex.setTokenPosition(token); tok = T_OR_EQUAL; fbreak;};
|
||||
'.=' => {lex.setTokenPosition(token); tok = T_CONCAT_EQUAL; fbreak;};
|
||||
'*=' => {lex.setTokenPosition(token); tok = T_MUL_EQUAL; fbreak;};
|
||||
'**=' => {lex.setTokenPosition(token); tok = T_POW_EQUAL; fbreak;};
|
||||
'/=' => {lex.setTokenPosition(token); tok = T_DIV_EQUAL; fbreak;};
|
||||
'+=' => {lex.setTokenPosition(token); tok = T_PLUS_EQUAL; fbreak;};
|
||||
'-=' => {lex.setTokenPosition(token); tok = T_MINUS_EQUAL; fbreak;};
|
||||
'^=' => {lex.setTokenPosition(token); tok = T_XOR_EQUAL; fbreak;};
|
||||
'%=' => {lex.setTokenPosition(token); tok = T_MOD_EQUAL; fbreak;};
|
||||
'--' => {lex.setTokenPosition(token); tok = T_DEC; fbreak;};
|
||||
'++' => {lex.setTokenPosition(token); tok = T_INC; fbreak;};
|
||||
'=>' => {lex.setTokenPosition(token); tok = T_DOUBLE_ARROW; fbreak;};
|
||||
'<=>' => {lex.setTokenPosition(token); tok = T_SPACESHIP; fbreak;};
|
||||
'!=' | '<>' => {lex.setTokenPosition(token); tok = T_IS_NOT_EQUAL; fbreak;};
|
||||
'!==' => {lex.setTokenPosition(token); tok = T_IS_NOT_IDENTICAL; fbreak;};
|
||||
'==' => {lex.setTokenPosition(token); tok = T_IS_EQUAL; fbreak;};
|
||||
'===' => {lex.setTokenPosition(token); tok = T_IS_IDENTICAL; fbreak;};
|
||||
'<<=' => {lex.setTokenPosition(token); tok = T_SL_EQUAL; fbreak;};
|
||||
'>>=' => {lex.setTokenPosition(token); tok = T_SR_EQUAL; fbreak;};
|
||||
'>=' => {lex.setTokenPosition(token); tok = T_IS_GREATER_OR_EQUAL; fbreak;};
|
||||
'<=' => {lex.setTokenPosition(token); tok = T_IS_SMALLER_OR_EQUAL; fbreak;};
|
||||
'**' => {lex.setTokenPosition(token); tok = T_POW; fbreak;};
|
||||
'<<' => {lex.setTokenPosition(token); tok = T_SL; fbreak;};
|
||||
'>>' => {lex.setTokenPosition(token); tok = T_SR; fbreak;};
|
||||
'??' => {lex.setTokenPosition(token); tok = T_COALESCE; fbreak;};
|
||||
'??=' => {lex.setTokenPosition(token); tok = T_COALESCE_EQUAL; fbreak;};
|
||||
'abstract'i => {lex.setTokenPosition(tkn); tok = token.T_ABSTRACT; fbreak;};
|
||||
'array'i => {lex.setTokenPosition(tkn); tok = token.T_ARRAY; fbreak;};
|
||||
'as'i => {lex.setTokenPosition(tkn); tok = token.T_AS; fbreak;};
|
||||
'break'i => {lex.setTokenPosition(tkn); tok = token.T_BREAK; fbreak;};
|
||||
'callable'i => {lex.setTokenPosition(tkn); tok = token.T_CALLABLE; fbreak;};
|
||||
'case'i => {lex.setTokenPosition(tkn); tok = token.T_CASE; fbreak;};
|
||||
'catch'i => {lex.setTokenPosition(tkn); tok = token.T_CATCH; fbreak;};
|
||||
'class'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS; fbreak;};
|
||||
'clone'i => {lex.setTokenPosition(tkn); tok = token.T_CLONE; fbreak;};
|
||||
'const'i => {lex.setTokenPosition(tkn); tok = token.T_CONST; fbreak;};
|
||||
'continue'i => {lex.setTokenPosition(tkn); tok = token.T_CONTINUE; fbreak;};
|
||||
'declare'i => {lex.setTokenPosition(tkn); tok = token.T_DECLARE; fbreak;};
|
||||
'default'i => {lex.setTokenPosition(tkn); tok = token.T_DEFAULT; fbreak;};
|
||||
'do'i => {lex.setTokenPosition(tkn); tok = token.T_DO; fbreak;};
|
||||
'echo'i => {lex.setTokenPosition(tkn); tok = token.T_ECHO; fbreak;};
|
||||
'else'i => {lex.setTokenPosition(tkn); tok = token.T_ELSE; fbreak;};
|
||||
'elseif'i => {lex.setTokenPosition(tkn); tok = token.T_ELSEIF; fbreak;};
|
||||
'empty'i => {lex.setTokenPosition(tkn); tok = token.T_EMPTY; fbreak;};
|
||||
'enddeclare'i => {lex.setTokenPosition(tkn); tok = token.T_ENDDECLARE; fbreak;};
|
||||
'endfor'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOR; fbreak;};
|
||||
'endforeach'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOREACH; fbreak;};
|
||||
'endif'i => {lex.setTokenPosition(tkn); tok = token.T_ENDIF; fbreak;};
|
||||
'endswitch'i => {lex.setTokenPosition(tkn); tok = token.T_ENDSWITCH; fbreak;};
|
||||
'endwhile'i => {lex.setTokenPosition(tkn); tok = token.T_ENDWHILE; fbreak;};
|
||||
'eval'i => {lex.setTokenPosition(tkn); tok = token.T_EVAL; fbreak;};
|
||||
'exit'i | 'die'i => {lex.setTokenPosition(tkn); tok = token.T_EXIT; fbreak;};
|
||||
'extends'i => {lex.setTokenPosition(tkn); tok = token.T_EXTENDS; fbreak;};
|
||||
'final'i => {lex.setTokenPosition(tkn); tok = token.T_FINAL; fbreak;};
|
||||
'finally'i => {lex.setTokenPosition(tkn); tok = token.T_FINALLY; fbreak;};
|
||||
'for'i => {lex.setTokenPosition(tkn); tok = token.T_FOR; fbreak;};
|
||||
'foreach'i => {lex.setTokenPosition(tkn); tok = token.T_FOREACH; fbreak;};
|
||||
'function'i | 'cfunction'i => {lex.setTokenPosition(tkn); tok = token.T_FUNCTION; fbreak;};
|
||||
'fn'i => {lex.setTokenPosition(tkn); tok = token.T_FN; fbreak;};
|
||||
'global'i => {lex.setTokenPosition(tkn); tok = token.T_GLOBAL; fbreak;};
|
||||
'goto'i => {lex.setTokenPosition(tkn); tok = token.T_GOTO; fbreak;};
|
||||
'if'i => {lex.setTokenPosition(tkn); tok = token.T_IF; fbreak;};
|
||||
'isset'i => {lex.setTokenPosition(tkn); tok = token.T_ISSET; fbreak;};
|
||||
'implements'i => {lex.setTokenPosition(tkn); tok = token.T_IMPLEMENTS; fbreak;};
|
||||
'instanceof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTANCEOF; fbreak;};
|
||||
'insteadof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTEADOF; fbreak;};
|
||||
'interface'i => {lex.setTokenPosition(tkn); tok = token.T_INTERFACE; fbreak;};
|
||||
'list'i => {lex.setTokenPosition(tkn); tok = token.T_LIST; fbreak;};
|
||||
'namespace'i => {lex.setTokenPosition(tkn); tok = token.T_NAMESPACE; fbreak;};
|
||||
'private'i => {lex.setTokenPosition(tkn); tok = token.T_PRIVATE; fbreak;};
|
||||
'public'i => {lex.setTokenPosition(tkn); tok = token.T_PUBLIC; fbreak;};
|
||||
'print'i => {lex.setTokenPosition(tkn); tok = token.T_PRINT; fbreak;};
|
||||
'protected'i => {lex.setTokenPosition(tkn); tok = token.T_PROTECTED; fbreak;};
|
||||
'return'i => {lex.setTokenPosition(tkn); tok = token.T_RETURN; fbreak;};
|
||||
'static'i => {lex.setTokenPosition(tkn); tok = token.T_STATIC; fbreak;};
|
||||
'switch'i => {lex.setTokenPosition(tkn); tok = token.T_SWITCH; fbreak;};
|
||||
'throw'i => {lex.setTokenPosition(tkn); tok = token.T_THROW; fbreak;};
|
||||
'trait'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT; fbreak;};
|
||||
'try'i => {lex.setTokenPosition(tkn); tok = token.T_TRY; fbreak;};
|
||||
'unset'i => {lex.setTokenPosition(tkn); tok = token.T_UNSET; fbreak;};
|
||||
'use'i => {lex.setTokenPosition(tkn); tok = token.T_USE; fbreak;};
|
||||
'var'i => {lex.setTokenPosition(tkn); tok = token.T_VAR; fbreak;};
|
||||
'while'i => {lex.setTokenPosition(tkn); tok = token.T_WHILE; fbreak;};
|
||||
'yield'i whitespace_line* 'from'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD_FROM; fbreak;};
|
||||
'yield'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD; fbreak;};
|
||||
'include'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE; fbreak;};
|
||||
'include_once'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE_ONCE; fbreak;};
|
||||
'require'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE; fbreak;};
|
||||
'require_once'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE_ONCE; fbreak;};
|
||||
'__CLASS__'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS_C; fbreak;};
|
||||
'__DIR__'i => {lex.setTokenPosition(tkn); tok = token.T_DIR; fbreak;};
|
||||
'__FILE__'i => {lex.setTokenPosition(tkn); tok = token.T_FILE; fbreak;};
|
||||
'__FUNCTION__'i => {lex.setTokenPosition(tkn); tok = token.T_FUNC_C; fbreak;};
|
||||
'__LINE__'i => {lex.setTokenPosition(tkn); tok = token.T_LINE; fbreak;};
|
||||
'__NAMESPACE__'i => {lex.setTokenPosition(tkn); tok = token.T_NS_C; fbreak;};
|
||||
'__METHOD__'i => {lex.setTokenPosition(tkn); tok = token.T_METHOD_C; fbreak;};
|
||||
'__TRAIT__'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT_C; fbreak;};
|
||||
'__halt_compiler'i => {lex.setTokenPosition(tkn); tok = token.T_HALT_COMPILER; fnext halt_compiller_open_parenthesis; fbreak;};
|
||||
'new'i => {lex.setTokenPosition(tkn); tok = token.T_NEW; fbreak;};
|
||||
'and'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_AND; fbreak;};
|
||||
'or'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_OR; fbreak;};
|
||||
'xor'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_XOR; fbreak;};
|
||||
'\\' => {lex.setTokenPosition(tkn); tok = token.T_NS_SEPARATOR; fbreak;};
|
||||
'...' => {lex.setTokenPosition(tkn); tok = token.T_ELLIPSIS; fbreak;};
|
||||
'::' => {lex.setTokenPosition(tkn); tok = token.T_PAAMAYIM_NEKUDOTAYIM; fbreak;};
|
||||
'&&' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_AND; fbreak;};
|
||||
'||' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_OR; fbreak;};
|
||||
'&=' => {lex.setTokenPosition(tkn); tok = token.T_AND_EQUAL; fbreak;};
|
||||
'|=' => {lex.setTokenPosition(tkn); tok = token.T_OR_EQUAL; fbreak;};
|
||||
'.=' => {lex.setTokenPosition(tkn); tok = token.T_CONCAT_EQUAL; fbreak;};
|
||||
'*=' => {lex.setTokenPosition(tkn); tok = token.T_MUL_EQUAL; fbreak;};
|
||||
'**=' => {lex.setTokenPosition(tkn); tok = token.T_POW_EQUAL; fbreak;};
|
||||
'/=' => {lex.setTokenPosition(tkn); tok = token.T_DIV_EQUAL; fbreak;};
|
||||
'+=' => {lex.setTokenPosition(tkn); tok = token.T_PLUS_EQUAL; fbreak;};
|
||||
'-=' => {lex.setTokenPosition(tkn); tok = token.T_MINUS_EQUAL; fbreak;};
|
||||
'^=' => {lex.setTokenPosition(tkn); tok = token.T_XOR_EQUAL; fbreak;};
|
||||
'%=' => {lex.setTokenPosition(tkn); tok = token.T_MOD_EQUAL; fbreak;};
|
||||
'--' => {lex.setTokenPosition(tkn); tok = token.T_DEC; fbreak;};
|
||||
'++' => {lex.setTokenPosition(tkn); tok = token.T_INC; fbreak;};
|
||||
'=>' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_ARROW; fbreak;};
|
||||
'<=>' => {lex.setTokenPosition(tkn); tok = token.T_SPACESHIP; fbreak;};
|
||||
'!=' | '<>' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_EQUAL; fbreak;};
|
||||
'!==' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_IDENTICAL; fbreak;};
|
||||
'==' => {lex.setTokenPosition(tkn); tok = token.T_IS_EQUAL; fbreak;};
|
||||
'===' => {lex.setTokenPosition(tkn); tok = token.T_IS_IDENTICAL; fbreak;};
|
||||
'<<=' => {lex.setTokenPosition(tkn); tok = token.T_SL_EQUAL; fbreak;};
|
||||
'>>=' => {lex.setTokenPosition(tkn); tok = token.T_SR_EQUAL; fbreak;};
|
||||
'>=' => {lex.setTokenPosition(tkn); tok = token.T_IS_GREATER_OR_EQUAL; fbreak;};
|
||||
'<=' => {lex.setTokenPosition(tkn); tok = token.T_IS_SMALLER_OR_EQUAL; fbreak;};
|
||||
'**' => {lex.setTokenPosition(tkn); tok = token.T_POW; fbreak;};
|
||||
'<<' => {lex.setTokenPosition(tkn); tok = token.T_SL; fbreak;};
|
||||
'>>' => {lex.setTokenPosition(tkn); tok = token.T_SR; fbreak;};
|
||||
'??' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE; fbreak;};
|
||||
'??=' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE_EQUAL; fbreak;};
|
||||
|
||||
'(' whitespace* 'array'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_ARRAY_CAST; fbreak;};
|
||||
'(' whitespace* ('bool'i|'boolean'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_BOOL_CAST; fbreak;};
|
||||
'(' whitespace* ('real'i|'double'i|'float'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_DOUBLE_CAST; fbreak;};
|
||||
'(' whitespace* ('int'i|'integer'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_INT_CAST; fbreak;};
|
||||
'(' whitespace* 'object'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_OBJECT_CAST; fbreak;};
|
||||
'(' whitespace* ('string'i|'binary'i) whitespace* ')' => {lex.setTokenPosition(token); tok = T_STRING_CAST; fbreak;};
|
||||
'(' whitespace* 'unset'i whitespace* ')' => {lex.setTokenPosition(token); tok = T_UNSET_CAST; fbreak;};
|
||||
'(' whitespace* 'array'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_ARRAY_CAST; fbreak;};
|
||||
'(' whitespace* ('bool'i|'boolean'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_BOOL_CAST; fbreak;};
|
||||
'(' whitespace* ('real'i|'double'i|'float'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_CAST; fbreak;};
|
||||
'(' whitespace* ('int'i|'integer'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_INT_CAST; fbreak;};
|
||||
'(' whitespace* 'object'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_CAST; fbreak;};
|
||||
'(' whitespace* ('string'i|'binary'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_STRING_CAST; fbreak;};
|
||||
'(' whitespace* 'unset'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_UNSET_CAST; fbreak;};
|
||||
|
||||
('#' | '//') any_line* when is_not_comment_end => {
|
||||
lex.ungetStr("?>")
|
||||
lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
|
||||
lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te)
|
||||
};
|
||||
'/*' any_line* :>> '*/' {
|
||||
isDocComment := false;
|
||||
@ -326,37 +329,35 @@ func (lex *Lexer) Lex() *Token {
|
||||
}
|
||||
|
||||
if isDocComment {
|
||||
lex.addHiddenToken(token, T_DOC_COMMENT, lex.ts, lex.te)
|
||||
lex.addSkippedToken(tkn, token.T_DOC_COMMENT, lex.ts, lex.te)
|
||||
} else {
|
||||
lex.addHiddenToken(token, T_COMMENT, lex.ts, lex.te)
|
||||
lex.addSkippedToken(tkn, token.T_COMMENT, lex.ts, lex.te)
|
||||
}
|
||||
};
|
||||
|
||||
operators => {
|
||||
// rune, _ := utf8.DecodeRune(lex.data[lex.ts:lex.te]);
|
||||
// tok = TokenID(Rune2Class(rune));
|
||||
lex.setTokenPosition(token);
|
||||
tok = TokenID(int(lex.data[lex.ts]));
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.ID(int(lex.data[lex.ts]));
|
||||
fbreak;
|
||||
};
|
||||
|
||||
"{" => { lex.setTokenPosition(token); tok = TokenID(int('{')); lex.call(ftargs, fentry(php)); goto _out; };
|
||||
"}" => { lex.setTokenPosition(token); tok = TokenID(int('}')); lex.ret(1); goto _out;};
|
||||
"$" varname => { lex.setTokenPosition(token); tok = T_VARIABLE; fbreak; };
|
||||
varname => { lex.setTokenPosition(token); tok = T_STRING; fbreak; };
|
||||
"{" => { lex.setTokenPosition(tkn); tok = token.ID(int('{')); lex.call(ftargs, fentry(php)); goto _out; };
|
||||
"}" => { lex.setTokenPosition(tkn); tok = token.ID(int('}')); lex.ret(1); goto _out;};
|
||||
"$" varname => { lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak; };
|
||||
varname => { lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak; };
|
||||
|
||||
"->" => { lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fnext property; fbreak; };
|
||||
"->" => { lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fnext property; fbreak; };
|
||||
|
||||
constant_string => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_CONSTANT_ENCAPSED_STRING;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_CONSTANT_ENCAPSED_STRING;
|
||||
fbreak;
|
||||
};
|
||||
|
||||
"b"i? "<<<" [ \t]* ( heredoc_label | ("'" heredoc_label "'") | ('"' heredoc_label '"') ) newline => {
|
||||
lex.heredocLabel = lex.data[lblStart:lblEnd]
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_START_HEREDOC;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_START_HEREDOC;
|
||||
|
||||
if lex.isHeredocEnd(lex.p+1) {
|
||||
fnext heredoc_end;
|
||||
@ -367,8 +368,8 @@ func (lex *Lexer) Lex() *Token {
|
||||
}
|
||||
fbreak;
|
||||
};
|
||||
"`" => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext backqote; fbreak;};
|
||||
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext template_string; fbreak;};
|
||||
"`" => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext backqote; fbreak;};
|
||||
'"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext template_string; fbreak;};
|
||||
|
||||
any_line => {
|
||||
c := lex.data[lex.p]
|
||||
@ -377,28 +378,28 @@ func (lex *Lexer) Lex() *Token {
|
||||
*|;
|
||||
|
||||
property := |*
|
||||
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
|
||||
"->" => {lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;};
|
||||
varname => {lex.setTokenPosition(token); tok = T_STRING; fnext php; fbreak;};
|
||||
whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
|
||||
"->" => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;};
|
||||
varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fnext php; fbreak;};
|
||||
any => {lex.ungetCnt(1); fgoto php;};
|
||||
*|;
|
||||
|
||||
nowdoc := |*
|
||||
any_line* when is_not_heredoc_end => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_ENCAPSED_AND_WHITESPACE;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_ENCAPSED_AND_WHITESPACE;
|
||||
fnext heredoc_end;
|
||||
fbreak;
|
||||
};
|
||||
*|;
|
||||
|
||||
heredoc := |*
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"$" => {lex.ungetCnt(1); fcall string_var;};
|
||||
any_line* when is_not_heredoc_end_or_var => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_ENCAPSED_AND_WHITESPACE;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_ENCAPSED_AND_WHITESPACE;
|
||||
|
||||
if len(lex.data) > lex.p+1 && lex.data[lex.p+1] != '$' && lex.data[lex.p+1] != '{' {
|
||||
fnext heredoc_end;
|
||||
@ -408,59 +409,59 @@ func (lex *Lexer) Lex() *Token {
|
||||
*|;
|
||||
|
||||
backqote := |*
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
|
||||
'`' => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext php; fbreak;};
|
||||
any_line* when is_not_backqoute_end_or_var => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_ENCAPSED_AND_WHITESPACE;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_ENCAPSED_AND_WHITESPACE;
|
||||
fbreak;
|
||||
};
|
||||
*|;
|
||||
|
||||
template_string := |*
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
|
||||
'"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext php; fbreak;};
|
||||
any_line* when is_not_string_end_or_var => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_ENCAPSED_AND_WHITESPACE;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_ENCAPSED_AND_WHITESPACE;
|
||||
fbreak;
|
||||
};
|
||||
*|;
|
||||
|
||||
heredoc_end := |*
|
||||
varname -- ";" => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_END_HEREDOC;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_END_HEREDOC;
|
||||
fnext php;
|
||||
fbreak;
|
||||
};
|
||||
varname => {
|
||||
lex.setTokenPosition(token);
|
||||
tok = T_END_HEREDOC;
|
||||
lex.setTokenPosition(tkn);
|
||||
tok = token.T_END_HEREDOC;
|
||||
fnext php;
|
||||
fbreak;
|
||||
};
|
||||
*|;
|
||||
|
||||
string_var := |*
|
||||
'$' varname => {lex.setTokenPosition(token); tok = T_VARIABLE; fbreak;};
|
||||
'->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_OBJECT_OPERATOR; fbreak;};
|
||||
varname => {lex.setTokenPosition(token); tok = T_STRING; fbreak;};
|
||||
'[' => {lex.setTokenPosition(token); tok = TokenID(int('[')); lex.call(ftargs, fentry(string_var_index)); goto _out;};
|
||||
'$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;};
|
||||
'->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;};
|
||||
varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;};
|
||||
'[' => {lex.setTokenPosition(tkn); tok = token.ID(int('[')); lex.call(ftargs, fentry(string_var_index)); goto _out;};
|
||||
any => {lex.ungetCnt(1); fret;};
|
||||
*|;
|
||||
|
||||
string_var_index := |*
|
||||
lnum | hnum | bnum => {lex.setTokenPosition(token); tok = T_NUM_STRING; fbreak;};
|
||||
'$' varname => {lex.setTokenPosition(token); tok = T_VARIABLE; fbreak;};
|
||||
varname => {lex.setTokenPosition(token); tok = T_STRING; fbreak;};
|
||||
whitespace_line | [\\'#] => {lex.setTokenPosition(token); tok = T_ENCAPSED_AND_WHITESPACE; lex.ret(2); goto _out;};
|
||||
operators > (svi, 1) => {lex.setTokenPosition(token); tok = TokenID(int(lex.data[lex.ts])); fbreak;};
|
||||
']' > (svi, 2) => {lex.setTokenPosition(token); tok = TokenID(int(']')); lex.ret(2); goto _out;};
|
||||
lnum | hnum | bnum => {lex.setTokenPosition(tkn); tok = token.T_NUM_STRING; fbreak;};
|
||||
'$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;};
|
||||
varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;};
|
||||
whitespace_line | [\\'#] => {lex.setTokenPosition(tkn); tok = token.T_ENCAPSED_AND_WHITESPACE; lex.ret(2); goto _out;};
|
||||
operators > (svi, 1) => {lex.setTokenPosition(tkn); tok = token.ID(int(lex.data[lex.ts])); fbreak;};
|
||||
']' > (svi, 2) => {lex.setTokenPosition(tkn); tok = token.ID(int(']')); lex.ret(2); goto _out;};
|
||||
any_line => {
|
||||
c := lex.data[lex.p]
|
||||
lex.error(fmt.Sprintf("WARNING: Unexpected character in input: '%c' (ASCII=%d)", c, c));
|
||||
@ -468,38 +469,39 @@ func (lex *Lexer) Lex() *Token {
|
||||
*|;
|
||||
|
||||
string_var_name := |*
|
||||
varname ("[" | "}") => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_STRING_VARNAME; fnext php; fbreak;};
|
||||
varname ("[" | "}") => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_STRING_VARNAME; fnext php; fbreak;};
|
||||
any => {lex.ungetCnt(1); fnext php;};
|
||||
*|;
|
||||
|
||||
halt_compiller_open_parenthesis := |*
|
||||
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
|
||||
"(" => {lex.setTokenPosition(token); tok = TokenID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;};
|
||||
whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
|
||||
"(" => {lex.setTokenPosition(tkn); tok = token.ID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;};
|
||||
any => {lex.ungetCnt(1); fnext php;};
|
||||
*|;
|
||||
|
||||
halt_compiller_close_parenthesis := |*
|
||||
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
|
||||
")" => {lex.setTokenPosition(token); tok = TokenID(int(')')); fnext halt_compiller_close_semicolon; fbreak;};
|
||||
whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
|
||||
")" => {lex.setTokenPosition(tkn); tok = token.ID(int(')')); fnext halt_compiller_close_semicolon; fbreak;};
|
||||
any => {lex.ungetCnt(1); fnext php;};
|
||||
*|;
|
||||
|
||||
halt_compiller_close_semicolon := |*
|
||||
whitespace_line* => {lex.addHiddenToken(token, T_WHITESPACE, lex.ts, lex.te)};
|
||||
";" => {lex.setTokenPosition(token); tok = TokenID(int(';')); fnext halt_compiller_end; fbreak;};
|
||||
whitespace_line* => {lex.addSkippedToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
|
||||
";" => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext halt_compiller_end; fbreak;};
|
||||
any => {lex.ungetCnt(1); fnext php;};
|
||||
*|;
|
||||
|
||||
halt_compiller_end := |*
|
||||
any_line* => { lex.addHiddenToken(token, T_HALT_COMPILER, lex.ts, lex.te); };
|
||||
any_line* => { lex.addSkippedToken(tkn, token.T_HALT_COMPILER, lex.ts, lex.te); };
|
||||
*|;
|
||||
|
||||
write exec;
|
||||
}%%
|
||||
|
||||
token.Value = lex.data[lex.ts:lex.te]
|
||||
token.ID = tok
|
||||
lex.addHiddenToken(token, tok, lex.ts, lex.te);
|
||||
tkn.Value = lex.data[lex.ts:lex.te]
|
||||
tkn.ID = token.ID(tok)
|
||||
tkn.SkippedString = lex.data[lex.sts:lex.ste]
|
||||
lex.addSkippedToken(tkn, tok, lex.ts, lex.te);
|
||||
|
||||
return token
|
||||
return tkn
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,14 +0,0 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"github.com/z7zmey/php-parser/pkg/position"
|
||||
"github.com/z7zmey/php-parser/pkg/token"
|
||||
)
|
||||
|
||||
// Token value returned by lexer
|
||||
type Token struct {
|
||||
ID TokenID
|
||||
Value []byte
|
||||
Tokens []token.Token
|
||||
Position position.Position
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
package scanner
|
||||
|
||||
// TokenPool light version of sync.Pool for Token objects
|
||||
type TokenPool struct {
|
||||
pool []*Token
|
||||
}
|
||||
|
||||
// Get returns *Token from pool or creates new object
|
||||
func (tp *TokenPool) Get() *Token {
|
||||
if len(tp.pool) < 1 {
|
||||
return new(Token)
|
||||
}
|
||||
|
||||
t := tp.pool[len(tp.pool)-1]
|
||||
tp.pool = tp.pool[:len(tp.pool)-1]
|
||||
return t
|
||||
}
|
||||
|
||||
// Put returns *Token to pool
|
||||
func (tp *TokenPool) Put(t *Token) {
|
||||
tp.pool = append(tp.pool, t)
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
package scanner_test
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/z7zmey/php-parser/internal/scanner"
|
||||
)
|
||||
|
||||
func TestTokenPoolGetNew(t *testing.T) {
|
||||
tp := new(scanner.TokenPool)
|
||||
|
||||
newToken := tp.Get()
|
||||
|
||||
if newToken == nil {
|
||||
t.Errorf("*TokenPool.Get() must return new *Token object\n")
|
||||
}
|
||||
}
|
||||
|
||||
func TestTokenPoolGetFromPool(t *testing.T) {
|
||||
tp := new(scanner.TokenPool)
|
||||
|
||||
expectedToken := &scanner.Token{
|
||||
Value: []byte("test"),
|
||||
}
|
||||
|
||||
tp.Put(expectedToken)
|
||||
|
||||
actualToken := tp.Get()
|
||||
|
||||
if !reflect.DeepEqual(expectedToken, actualToken) {
|
||||
t.Errorf("*TokenPool.Put() must return *Token object from pool\n")
|
||||
}
|
||||
}
|
@ -1,161 +0,0 @@
|
||||
// Code generated by "stringer -type=TokenID -output ./tokenid_string.go"; DO NOT EDIT.
|
||||
|
||||
package scanner
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[T_INCLUDE-57346]
|
||||
_ = x[T_INCLUDE_ONCE-57347]
|
||||
_ = x[T_EXIT-57348]
|
||||
_ = x[T_IF-57349]
|
||||
_ = x[T_LNUMBER-57350]
|
||||
_ = x[T_DNUMBER-57351]
|
||||
_ = x[T_STRING-57352]
|
||||
_ = x[T_STRING_VARNAME-57353]
|
||||
_ = x[T_VARIABLE-57354]
|
||||
_ = x[T_NUM_STRING-57355]
|
||||
_ = x[T_INLINE_HTML-57356]
|
||||
_ = x[T_CHARACTER-57357]
|
||||
_ = x[T_BAD_CHARACTER-57358]
|
||||
_ = x[T_ENCAPSED_AND_WHITESPACE-57359]
|
||||
_ = x[T_CONSTANT_ENCAPSED_STRING-57360]
|
||||
_ = x[T_ECHO-57361]
|
||||
_ = x[T_DO-57362]
|
||||
_ = x[T_WHILE-57363]
|
||||
_ = x[T_ENDWHILE-57364]
|
||||
_ = x[T_FOR-57365]
|
||||
_ = x[T_ENDFOR-57366]
|
||||
_ = x[T_FOREACH-57367]
|
||||
_ = x[T_ENDFOREACH-57368]
|
||||
_ = x[T_DECLARE-57369]
|
||||
_ = x[T_ENDDECLARE-57370]
|
||||
_ = x[T_AS-57371]
|
||||
_ = x[T_SWITCH-57372]
|
||||
_ = x[T_ENDSWITCH-57373]
|
||||
_ = x[T_CASE-57374]
|
||||
_ = x[T_DEFAULT-57375]
|
||||
_ = x[T_BREAK-57376]
|
||||
_ = x[T_CONTINUE-57377]
|
||||
_ = x[T_GOTO-57378]
|
||||
_ = x[T_FUNCTION-57379]
|
||||
_ = x[T_FN-57380]
|
||||
_ = x[T_CONST-57381]
|
||||
_ = x[T_RETURN-57382]
|
||||
_ = x[T_TRY-57383]
|
||||
_ = x[T_CATCH-57384]
|
||||
_ = x[T_FINALLY-57385]
|
||||
_ = x[T_THROW-57386]
|
||||
_ = x[T_USE-57387]
|
||||
_ = x[T_INSTEADOF-57388]
|
||||
_ = x[T_GLOBAL-57389]
|
||||
_ = x[T_VAR-57390]
|
||||
_ = x[T_UNSET-57391]
|
||||
_ = x[T_ISSET-57392]
|
||||
_ = x[T_EMPTY-57393]
|
||||
_ = x[T_HALT_COMPILER-57394]
|
||||
_ = x[T_CLASS-57395]
|
||||
_ = x[T_TRAIT-57396]
|
||||
_ = x[T_INTERFACE-57397]
|
||||
_ = x[T_EXTENDS-57398]
|
||||
_ = x[T_IMPLEMENTS-57399]
|
||||
_ = x[T_OBJECT_OPERATOR-57400]
|
||||
_ = x[T_DOUBLE_ARROW-57401]
|
||||
_ = x[T_LIST-57402]
|
||||
_ = x[T_ARRAY-57403]
|
||||
_ = x[T_CALLABLE-57404]
|
||||
_ = x[T_CLASS_C-57405]
|
||||
_ = x[T_TRAIT_C-57406]
|
||||
_ = x[T_METHOD_C-57407]
|
||||
_ = x[T_FUNC_C-57408]
|
||||
_ = x[T_LINE-57409]
|
||||
_ = x[T_FILE-57410]
|
||||
_ = x[T_COMMENT-57411]
|
||||
_ = x[T_DOC_COMMENT-57412]
|
||||
_ = x[T_OPEN_TAG-57413]
|
||||
_ = x[T_OPEN_TAG_WITH_ECHO-57414]
|
||||
_ = x[T_CLOSE_TAG-57415]
|
||||
_ = x[T_WHITESPACE-57416]
|
||||
_ = x[T_START_HEREDOC-57417]
|
||||
_ = x[T_END_HEREDOC-57418]
|
||||
_ = x[T_DOLLAR_OPEN_CURLY_BRACES-57419]
|
||||
_ = x[T_CURLY_OPEN-57420]
|
||||
_ = x[T_PAAMAYIM_NEKUDOTAYIM-57421]
|
||||
_ = x[T_NAMESPACE-57422]
|
||||
_ = x[T_NS_C-57423]
|
||||
_ = x[T_DIR-57424]
|
||||
_ = x[T_NS_SEPARATOR-57425]
|
||||
_ = x[T_ELLIPSIS-57426]
|
||||
_ = x[T_EVAL-57427]
|
||||
_ = x[T_REQUIRE-57428]
|
||||
_ = x[T_REQUIRE_ONCE-57429]
|
||||
_ = x[T_LOGICAL_OR-57430]
|
||||
_ = x[T_LOGICAL_XOR-57431]
|
||||
_ = x[T_LOGICAL_AND-57432]
|
||||
_ = x[T_INSTANCEOF-57433]
|
||||
_ = x[T_NEW-57434]
|
||||
_ = x[T_CLONE-57435]
|
||||
_ = x[T_ELSEIF-57436]
|
||||
_ = x[T_ELSE-57437]
|
||||
_ = x[T_ENDIF-57438]
|
||||
_ = x[T_PRINT-57439]
|
||||
_ = x[T_YIELD-57440]
|
||||
_ = x[T_STATIC-57441]
|
||||
_ = x[T_ABSTRACT-57442]
|
||||
_ = x[T_FINAL-57443]
|
||||
_ = x[T_PRIVATE-57444]
|
||||
_ = x[T_PROTECTED-57445]
|
||||
_ = x[T_PUBLIC-57446]
|
||||
_ = x[T_INC-57447]
|
||||
_ = x[T_DEC-57448]
|
||||
_ = x[T_YIELD_FROM-57449]
|
||||
_ = x[T_INT_CAST-57450]
|
||||
_ = x[T_DOUBLE_CAST-57451]
|
||||
_ = x[T_STRING_CAST-57452]
|
||||
_ = x[T_ARRAY_CAST-57453]
|
||||
_ = x[T_OBJECT_CAST-57454]
|
||||
_ = x[T_BOOL_CAST-57455]
|
||||
_ = x[T_UNSET_CAST-57456]
|
||||
_ = x[T_COALESCE-57457]
|
||||
_ = x[T_SPACESHIP-57458]
|
||||
_ = x[T_NOELSE-57459]
|
||||
_ = x[T_PLUS_EQUAL-57460]
|
||||
_ = x[T_MINUS_EQUAL-57461]
|
||||
_ = x[T_MUL_EQUAL-57462]
|
||||
_ = x[T_POW_EQUAL-57463]
|
||||
_ = x[T_DIV_EQUAL-57464]
|
||||
_ = x[T_CONCAT_EQUAL-57465]
|
||||
_ = x[T_MOD_EQUAL-57466]
|
||||
_ = x[T_AND_EQUAL-57467]
|
||||
_ = x[T_OR_EQUAL-57468]
|
||||
_ = x[T_XOR_EQUAL-57469]
|
||||
_ = x[T_SL_EQUAL-57470]
|
||||
_ = x[T_SR_EQUAL-57471]
|
||||
_ = x[T_COALESCE_EQUAL-57472]
|
||||
_ = x[T_BOOLEAN_OR-57473]
|
||||
_ = x[T_BOOLEAN_AND-57474]
|
||||
_ = x[T_POW-57475]
|
||||
_ = x[T_SL-57476]
|
||||
_ = x[T_SR-57477]
|
||||
_ = x[T_IS_IDENTICAL-57478]
|
||||
_ = x[T_IS_NOT_IDENTICAL-57479]
|
||||
_ = x[T_IS_EQUAL-57480]
|
||||
_ = x[T_IS_NOT_EQUAL-57481]
|
||||
_ = x[T_IS_SMALLER_OR_EQUAL-57482]
|
||||
_ = x[T_IS_GREATER_OR_EQUAL-57483]
|
||||
}
|
||||
|
||||
const _TokenID_name = "T_INCLUDET_INCLUDE_ONCET_EXITT_IFT_LNUMBERT_DNUMBERT_STRINGT_STRING_VARNAMET_VARIABLET_NUM_STRINGT_INLINE_HTMLT_CHARACTERT_BAD_CHARACTERT_ENCAPSED_AND_WHITESPACET_CONSTANT_ENCAPSED_STRINGT_ECHOT_DOT_WHILET_ENDWHILET_FORT_ENDFORT_FOREACHT_ENDFOREACHT_DECLARET_ENDDECLARET_AST_SWITCHT_ENDSWITCHT_CASET_DEFAULTT_BREAKT_CONTINUET_GOTOT_FUNCTIONT_FNT_CONSTT_RETURNT_TRYT_CATCHT_FINALLYT_THROWT_USET_INSTEADOFT_GLOBALT_VART_UNSETT_ISSETT_EMPTYT_HALT_COMPILERT_CLASST_TRAITT_INTERFACET_EXTENDST_IMPLEMENTST_OBJECT_OPERATORT_DOUBLE_ARROWT_LISTT_ARRAYT_CALLABLET_CLASS_CT_TRAIT_CT_METHOD_CT_FUNC_CT_LINET_FILET_COMMENTT_DOC_COMMENTT_OPEN_TAGT_OPEN_TAG_WITH_ECHOT_CLOSE_TAGT_WHITESPACET_START_HEREDOCT_END_HEREDOCT_DOLLAR_OPEN_CURLY_BRACEST_CURLY_OPENT_PAAMAYIM_NEKUDOTAYIMT_NAMESPACET_NS_CT_DIRT_NS_SEPARATORT_ELLIPSIST_EVALT_REQUIRET_REQUIRE_ONCET_LOGICAL_ORT_LOGICAL_XORT_LOGICAL_ANDT_INSTANCEOFT_NEWT_CLONET_ELSEIFT_ELSET_ENDIFT_PRINTT_YIELDT_STATICT_ABSTRACTT_FINALT_PRIVATET_PROTECTEDT_PUBLICT_INCT_DECT_YIELD_FROMT_INT_CASTT_DOUBLE_CASTT_STRING_CASTT_ARRAY_CASTT_OBJECT_CASTT_BOOL_CASTT_UNSET_CASTT_COALESCET_SPACESHIPT_NOELSET_PLUS_EQUALT_MINUS_EQUALT_MUL_EQUALT_POW_EQUALT_DIV_EQUALT_CONCAT_EQUALT_MOD_EQUALT_AND_EQUALT_OR_EQUALT_XOR_EQUALT_SL_EQUALT_SR_EQUALT_COALESCE_EQUALT_BOOLEAN_ORT_BOOLEAN_ANDT_POWT_SLT_SRT_IS_IDENTICALT_IS_NOT_IDENTICALT_IS_EQUALT_IS_NOT_EQUALT_IS_SMALLER_OR_EQUALT_IS_GREATER_OR_EQUAL"
|
||||
|
||||
var _TokenID_index = [...]uint16{0, 9, 23, 29, 33, 42, 51, 59, 75, 85, 97, 110, 121, 136, 161, 187, 193, 197, 204, 214, 219, 227, 236, 248, 257, 269, 273, 281, 292, 298, 307, 314, 324, 330, 340, 344, 351, 359, 364, 371, 380, 387, 392, 403, 411, 416, 423, 430, 437, 452, 459, 466, 477, 486, 498, 515, 529, 535, 542, 552, 561, 570, 580, 588, 594, 600, 609, 622, 632, 652, 663, 675, 690, 703, 729, 741, 763, 774, 780, 785, 799, 809, 815, 824, 838, 850, 863, 876, 888, 893, 900, 908, 914, 921, 928, 935, 943, 953, 960, 969, 980, 988, 993, 998, 1010, 1020, 1033, 1046, 1058, 1071, 1082, 1094, 1104, 1115, 1123, 1135, 1148, 1159, 1170, 1181, 1195, 1206, 1217, 1227, 1238, 1248, 1258, 1274, 1286, 1299, 1304, 1308, 1312, 1326, 1344, 1354, 1368, 1389, 1410}
|
||||
|
||||
func (i TokenID) String() string {
|
||||
i -= 57346
|
||||
if i < 0 || i >= TokenID(len(_TokenID_index)-1) {
|
||||
return "TokenID(" + strconv.FormatInt(int64(i+57346), 10) + ")"
|
||||
}
|
||||
return _TokenID_name[_TokenID_index[i]:_TokenID_index[i+1]]
|
||||
}
|
@ -16,6 +16,10 @@ func (n *Node) GetNode() *Node {
|
||||
return n
|
||||
}
|
||||
|
||||
func (n *Node) GetPosition() *position.Position {
|
||||
return n.Position
|
||||
}
|
||||
|
||||
// Root node
|
||||
type Root struct {
|
||||
Node
|
||||
|
@ -121,7 +121,7 @@ func (v *Dump) printNode(n *ast.Node) {
|
||||
key := token.Position(k)
|
||||
|
||||
v.printIndent(v.indent + 2)
|
||||
v.print("token." + key.String() + ": []token.Token{\n")
|
||||
v.print("token." + key.String() + ": []*token.Token{\n")
|
||||
|
||||
for _, tkn := range n.Tokens[key] {
|
||||
v.printIndent(v.indent + 3)
|
||||
|
@ -13,7 +13,7 @@ func ExampleDump() {
|
||||
stxTree := &ast.Root{
|
||||
Node: ast.Node{
|
||||
Tokens: token.Collection{
|
||||
token.Start: []token.Token{
|
||||
token.Start: []*token.Token{
|
||||
{
|
||||
ID: token.T_WHITESPACE,
|
||||
Value: []byte(" "),
|
||||
@ -44,7 +44,7 @@ func ExampleDump() {
|
||||
//&ast.Root{
|
||||
// Node: ast.Node{
|
||||
// Tokens: token.Collection{
|
||||
// token.Start: []token.Token{
|
||||
// token.Start: []*token.Token{
|
||||
// {
|
||||
// ID: token.T_WHITESPACE,
|
||||
// Value: []byte(" "),
|
||||
|
14
pkg/ast/visitor/filter_tokens.go
Normal file
14
pkg/ast/visitor/filter_tokens.go
Normal file
@ -0,0 +1,14 @@
|
||||
package visitor
|
||||
|
||||
import (
|
||||
"github.com/z7zmey/php-parser/pkg/ast"
|
||||
)
|
||||
|
||||
type FilterTokens struct {
|
||||
Null
|
||||
}
|
||||
|
||||
func (v *FilterTokens) EnterNode(n ast.Vertex) bool {
|
||||
n.GetNode().Tokens = nil
|
||||
return true
|
||||
}
|
@ -29,7 +29,7 @@ func Parse(src []byte, ver string, cfg Config) (ast.Vertex, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lexer := scanner.NewLexer(src, ver, cfg.WithTokens, cfg.ErrorHandlerFunc)
|
||||
lexer := scanner.NewLexer(src, ver, cfg.ErrorHandlerFunc)
|
||||
|
||||
if r == -1 {
|
||||
parser = php5.NewParser(lexer, cfg.ErrorHandlerFunc)
|
||||
|
29
pkg/position/pool.go
Normal file
29
pkg/position/pool.go
Normal file
@ -0,0 +1,29 @@
|
||||
package position
|
||||
|
||||
const DefaultBlockSize = 1024
|
||||
|
||||
type Pool struct {
|
||||
block []Position
|
||||
off int
|
||||
}
|
||||
|
||||
func NewPool(blockSize int) *Pool {
|
||||
return &Pool{
|
||||
block: make([]Position, blockSize),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Pool) Get() *Position {
|
||||
if len(p.block) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(p.block) == p.off {
|
||||
p.block = make([]Position, len(p.block))
|
||||
p.off = 0
|
||||
}
|
||||
|
||||
p.off++
|
||||
|
||||
return &p.block[p.off-1]
|
||||
}
|
@ -12,7 +12,7 @@ import (
|
||||
)
|
||||
|
||||
func parsePhp5(src string) ast.Vertex {
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", true, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "5.6", nil)
|
||||
php5parser := php5.NewParser(lexer, nil)
|
||||
php5parser.Parse()
|
||||
|
||||
|
@ -29,7 +29,7 @@ abstract class Bar extends Baz
|
||||
|
||||
// parse
|
||||
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", true, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
||||
php7parser := php7.NewParser(lexer, nil)
|
||||
php7parser.Parse()
|
||||
|
||||
@ -61,7 +61,7 @@ abstract class Bar extends Baz
|
||||
}
|
||||
|
||||
func parse(src string) ast.Vertex {
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", true, nil)
|
||||
lexer := scanner.NewLexer([]byte(src), "7.4", nil)
|
||||
php7parser := php7.NewParser(lexer, nil)
|
||||
php7parser.Parse()
|
||||
|
||||
|
@ -75,7 +75,7 @@ func TestPrinterPrintFileInlineHtml(t *testing.T) {
|
||||
Expr: &ast.ExprVariable{
|
||||
Node: ast.Node{
|
||||
Tokens: token.Collection{
|
||||
token.Start: []token.Token{
|
||||
token.Start: []*token.Token{
|
||||
{
|
||||
ID: token.ID('$'),
|
||||
Value: []byte("$"),
|
||||
@ -93,7 +93,7 @@ func TestPrinterPrintFileInlineHtml(t *testing.T) {
|
||||
Expr: &ast.ExprVariable{
|
||||
Node: ast.Node{
|
||||
Tokens: token.Collection{
|
||||
token.Start: []token.Token{
|
||||
token.Start: []*token.Token{
|
||||
{
|
||||
ID: token.ID('$'),
|
||||
Value: []byte("$"),
|
||||
|
29
pkg/token/pool.go
Normal file
29
pkg/token/pool.go
Normal file
@ -0,0 +1,29 @@
|
||||
package token
|
||||
|
||||
const DefaultBlockSize = 1024
|
||||
|
||||
type Pool struct {
|
||||
block []Token
|
||||
off int
|
||||
}
|
||||
|
||||
func NewPool(blockSize int) *Pool {
|
||||
return &Pool{
|
||||
block: make([]Token, blockSize),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Pool) Get() *Token {
|
||||
if len(p.block) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(p.block) == p.off {
|
||||
p.block = make([]Token, len(p.block))
|
||||
p.off = 0
|
||||
}
|
||||
|
||||
p.off++
|
||||
|
||||
return &p.block[p.off-1]
|
||||
}
|
173
pkg/token/pool_bench_test.go
Normal file
173
pkg/token/pool_bench_test.go
Normal file
@ -0,0 +1,173 @@
|
||||
package token
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
const amount = 100000
|
||||
|
||||
func BenchmarkPlain(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
buf = append(buf, &Token{})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSlice128(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 128)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSlice512(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 512)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSlice1024(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 1024)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSlice2048(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 2048)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkBlockAppend128(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 128)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
if len(slc) == 128 {
|
||||
slc = make([]Token, 0, 128)
|
||||
}
|
||||
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkBlockAppend512(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 512)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
if len(slc) == 512 {
|
||||
slc = make([]Token, 0, 512)
|
||||
}
|
||||
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkBlockAppend1024(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 1024)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
if len(slc) == 1024 {
|
||||
slc = make([]Token, 0, 1024)
|
||||
}
|
||||
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkBlockAppend2048(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf := make([]*Token, 0, amount)
|
||||
slc := make([]Token, 0, 2048)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
if len(slc) == 2048 {
|
||||
slc = make([]Token, 0, 2048)
|
||||
}
|
||||
|
||||
slc = append(slc, Token{})
|
||||
buf = append(buf, &slc[len(slc)-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPool128(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
pool := NewPool(128)
|
||||
buf := make([]*Token, 0, amount)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
buf = append(buf, pool.Get())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPool512(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
pool := NewPool(512)
|
||||
buf := make([]*Token, 0, amount)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
buf = append(buf, pool.Get())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPool1024(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
pool := NewPool(1024)
|
||||
buf := make([]*Token, 0, amount)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
buf = append(buf, pool.Get())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPool2048(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
pool := NewPool(2048)
|
||||
buf := make([]*Token, 0, amount)
|
||||
|
||||
for i := 0; i < amount; i++ {
|
||||
buf = append(buf, pool.Get())
|
||||
}
|
||||
}
|
||||
}
|
@ -62,7 +62,7 @@ const (
|
||||
CloseParenthesisToken
|
||||
)
|
||||
|
||||
type Collection map[Position][]Token
|
||||
type Collection map[Position][]*Token
|
||||
|
||||
func (c Collection) IsEmpty() bool {
|
||||
for _, v := range c {
|
||||
|
@ -1,5 +1,7 @@
|
||||
package token
|
||||
|
||||
import "github.com/z7zmey/php-parser/pkg/position"
|
||||
|
||||
//go:generate stringer -type=ID -output ./token_string.go
|
||||
type ID int
|
||||
|
||||
@ -147,4 +149,11 @@ const (
|
||||
type Token struct {
|
||||
ID ID
|
||||
Value []byte
|
||||
Position *position.Position
|
||||
SkippedTokens []*Token
|
||||
Skipped []byte
|
||||
}
|
||||
|
||||
func (t *Token) GetPosition() *position.Position {
|
||||
return t.Position
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user