2018-01-26 13:24:56 +00:00
|
|
|
package php5
|
|
|
|
|
|
|
|
import (
|
2020-05-13 17:18:53 +00:00
|
|
|
"bytes"
|
2020-05-17 19:56:32 +00:00
|
|
|
"fmt"
|
2020-05-12 21:16:36 +00:00
|
|
|
|
|
|
|
"github.com/z7zmey/php-parser/internal/positionbuilder"
|
2020-05-13 17:18:53 +00:00
|
|
|
"github.com/z7zmey/php-parser/internal/scanner"
|
2020-05-12 21:16:36 +00:00
|
|
|
"github.com/z7zmey/php-parser/pkg/ast"
|
|
|
|
"github.com/z7zmey/php-parser/pkg/errors"
|
|
|
|
"github.com/z7zmey/php-parser/pkg/position"
|
2020-05-13 17:18:53 +00:00
|
|
|
"github.com/z7zmey/php-parser/pkg/token"
|
2018-01-26 13:24:56 +00:00
|
|
|
)
|
|
|
|
|
2018-04-15 19:56:20 +00:00
|
|
|
func (lval *yySymType) Token(t *scanner.Token) {
|
2018-04-09 21:52:38 +00:00
|
|
|
lval.token = t
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parser structure
|
|
|
|
type Parser struct {
|
2019-03-10 21:37:01 +00:00
|
|
|
Lexer scanner.Scanner
|
2018-04-15 20:04:24 +00:00
|
|
|
currentToken *scanner.Token
|
2019-12-26 15:57:56 +00:00
|
|
|
positionBuilder *positionbuilder.PositionBuilder
|
2020-05-12 21:16:36 +00:00
|
|
|
rootNode ast.Vertex
|
2018-04-09 21:52:38 +00:00
|
|
|
}
|
2018-01-26 13:24:56 +00:00
|
|
|
|
2018-04-09 21:52:38 +00:00
|
|
|
// NewParser creates and returns new Parser
|
2019-12-26 15:57:56 +00:00
|
|
|
func NewParser(src []byte, v string) *Parser {
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := scanner.NewLexer(src)
|
2019-12-26 15:57:56 +00:00
|
|
|
lexer.PHPVersion = v
|
2018-04-09 21:52:38 +00:00
|
|
|
|
|
|
|
return &Parser{
|
2018-04-10 13:19:47 +00:00
|
|
|
lexer,
|
2018-04-09 21:52:38 +00:00
|
|
|
nil,
|
|
|
|
nil,
|
|
|
|
nil,
|
|
|
|
}
|
|
|
|
}
|
2018-02-04 16:51:44 +00:00
|
|
|
|
2018-04-09 21:52:38 +00:00
|
|
|
// Lex proxy to lexer Lex
|
|
|
|
func (l *Parser) Lex(lval *yySymType) int {
|
|
|
|
t := l.Lexer.Lex(lval)
|
2018-04-15 20:04:24 +00:00
|
|
|
l.currentToken = lval.token
|
2018-04-09 21:52:38 +00:00
|
|
|
return t
|
|
|
|
}
|
|
|
|
|
|
|
|
func (l *Parser) Error(msg string) {
|
2018-11-05 15:14:09 +00:00
|
|
|
pos := &position.Position{
|
|
|
|
StartLine: l.currentToken.StartLine,
|
|
|
|
EndLine: l.currentToken.EndLine,
|
|
|
|
StartPos: l.currentToken.StartPos,
|
|
|
|
EndPos: l.currentToken.EndPos,
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
l.Lexer.AddError(errors.NewError(msg, pos))
|
2018-04-09 21:52:38 +00:00
|
|
|
}
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
func (l *Parser) WithTokens() {
|
2020-05-13 17:18:53 +00:00
|
|
|
l.Lexer.SetWithTokens(true)
|
2018-06-29 21:51:11 +00:00
|
|
|
}
|
|
|
|
|
2018-04-09 21:52:38 +00:00
|
|
|
// Parse the php7 Parser entrypoint
|
|
|
|
func (l *Parser) Parse() int {
|
|
|
|
// init
|
2019-03-10 21:37:01 +00:00
|
|
|
l.Lexer.SetErrors(nil)
|
2018-04-09 21:52:38 +00:00
|
|
|
l.rootNode = nil
|
2019-12-26 15:57:56 +00:00
|
|
|
l.positionBuilder = &positionbuilder.PositionBuilder{}
|
2018-04-09 21:52:38 +00:00
|
|
|
|
|
|
|
// parse
|
|
|
|
|
|
|
|
return yyParse(l)
|
2018-01-26 13:24:56 +00:00
|
|
|
}
|
|
|
|
|
2018-04-09 21:52:38 +00:00
|
|
|
// GetRootNode returns root node
|
2020-05-12 21:16:36 +00:00
|
|
|
func (l *Parser) GetRootNode() ast.Vertex {
|
2018-04-09 21:52:38 +00:00
|
|
|
return l.rootNode
|
2018-01-26 13:24:56 +00:00
|
|
|
}
|
|
|
|
|
2018-04-09 21:52:38 +00:00
|
|
|
// GetErrors returns errors list
|
|
|
|
func (l *Parser) GetErrors() []*errors.Error {
|
2019-03-10 21:37:01 +00:00
|
|
|
return l.Lexer.GetErrors()
|
2018-01-29 19:12:12 +00:00
|
|
|
}
|
|
|
|
|
2018-06-06 23:25:27 +00:00
|
|
|
// helpers
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
func lastNode(nn []ast.Vertex) ast.Vertex {
|
2018-06-26 08:57:17 +00:00
|
|
|
if len(nn) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
2018-06-06 23:25:27 +00:00
|
|
|
return nn[len(nn)-1]
|
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
func firstNode(nn []ast.Vertex) ast.Vertex {
|
2018-06-06 23:25:27 +00:00
|
|
|
return nn[0]
|
|
|
|
}
|
2018-06-10 11:53:10 +00:00
|
|
|
|
|
|
|
func isDollar(r rune) bool {
|
|
|
|
return r == '$'
|
|
|
|
}
|
2018-06-10 23:41:12 +00:00
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
func (l *Parser) MoveFreeFloating(src ast.Vertex, dst ast.Vertex) {
|
2019-03-10 21:37:01 +00:00
|
|
|
if l.Lexer.GetWithFreeFloating() == false {
|
2019-02-13 20:18:07 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
if src.GetNode().Tokens == nil {
|
2019-02-13 20:18:07 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
l.setFreeFloating(dst, token.Start, src.GetNode().Tokens[token.Start])
|
|
|
|
delete(src.GetNode().Tokens, token.Start)
|
2019-01-02 21:22:28 +00:00
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
func (l *Parser) setFreeFloating(dst ast.Vertex, p token.Position, strings []token.Token) {
|
2019-03-10 21:37:01 +00:00
|
|
|
if l.Lexer.GetWithFreeFloating() == false {
|
2018-07-29 08:44:38 +00:00
|
|
|
return
|
2018-06-29 21:51:11 +00:00
|
|
|
}
|
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
if len(strings) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
dstCollection := &dst.GetNode().Tokens
|
2019-02-13 20:18:07 +00:00
|
|
|
if *dstCollection == nil {
|
2020-05-13 17:18:53 +00:00
|
|
|
*dstCollection = make(token.Collection)
|
2018-07-29 08:44:38 +00:00
|
|
|
}
|
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
(*dstCollection)[p] = strings
|
2018-07-29 08:44:38 +00:00
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
func (l *Parser) GetFreeFloatingToken(t *scanner.Token) []token.Token {
|
2019-03-10 21:37:01 +00:00
|
|
|
if l.Lexer.GetWithFreeFloating() == false {
|
2020-05-13 17:18:53 +00:00
|
|
|
return []token.Token{}
|
2018-10-24 14:04:13 +00:00
|
|
|
}
|
|
|
|
|
2020-05-17 19:56:32 +00:00
|
|
|
return []token.Token{
|
|
|
|
{
|
|
|
|
ID: token.ID(t.ID),
|
|
|
|
Value: t.Value,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (l *Parser) addDollarToken(v ast.Vertex) {
|
|
|
|
if l.Lexer.GetWithFreeFloating() == false {
|
|
|
|
return
|
|
|
|
}
|
2018-07-29 08:44:38 +00:00
|
|
|
|
2020-05-17 19:56:32 +00:00
|
|
|
l.setFreeFloating(v, token.Dollar, []token.Token{
|
|
|
|
{
|
|
|
|
ID: token.ID('$'),
|
|
|
|
Value: []byte("$"),
|
|
|
|
},
|
|
|
|
})
|
2019-02-13 20:18:07 +00:00
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
func (l *Parser) splitSemiColonAndPhpCloseTag(htmlNode ast.Vertex, prevNode ast.Vertex) {
|
2019-03-10 21:37:01 +00:00
|
|
|
if l.Lexer.GetWithFreeFloating() == false {
|
2019-02-13 20:18:07 +00:00
|
|
|
return
|
2018-07-29 08:44:38 +00:00
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
semiColon := prevNode.GetNode().Tokens[token.SemiColon]
|
|
|
|
delete(prevNode.GetNode().Tokens, token.SemiColon)
|
2019-02-13 20:18:07 +00:00
|
|
|
if len(semiColon) == 0 {
|
|
|
|
return
|
|
|
|
}
|
2018-06-29 21:51:11 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
if semiColon[0].Value[0] == ';' {
|
2020-05-13 17:18:53 +00:00
|
|
|
l.setFreeFloating(prevNode, token.SemiColon, []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-13 17:18:53 +00:00
|
|
|
ID: token.ID(';'),
|
|
|
|
Value: semiColon[0].Value[0:1],
|
2019-02-13 20:18:07 +00:00
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
2018-12-17 13:24:13 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
vlen := len(semiColon[0].Value)
|
2020-05-17 19:56:32 +00:00
|
|
|
fmt.Printf("vlen: %q\n", string(semiColon[0].Value))
|
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
tlen := 2
|
2020-05-13 17:18:53 +00:00
|
|
|
if bytes.HasSuffix(semiColon[0].Value, []byte("?>\n")) {
|
2019-02-13 20:18:07 +00:00
|
|
|
tlen = 3
|
2018-12-17 13:24:13 +00:00
|
|
|
}
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
phpCloseTag := []token.Token{}
|
2019-02-13 20:18:07 +00:00
|
|
|
if vlen-tlen > 1 {
|
2020-05-13 17:18:53 +00:00
|
|
|
phpCloseTag = append(phpCloseTag, token.Token{
|
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: semiColon[0].Value[1 : vlen-tlen],
|
2019-02-13 20:18:07 +00:00
|
|
|
})
|
2018-12-17 13:24:13 +00:00
|
|
|
}
|
2019-02-13 20:18:07 +00:00
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
phpCloseTag = append(phpCloseTag, token.Token{
|
|
|
|
ID: T_CLOSE_TAG,
|
|
|
|
Value: semiColon[0].Value[vlen-tlen:],
|
2019-02-13 20:18:07 +00:00
|
|
|
})
|
|
|
|
|
2020-05-13 17:18:53 +00:00
|
|
|
l.setFreeFloating(htmlNode, token.Start, append(phpCloseTag, htmlNode.GetNode().Tokens[token.Start]...))
|
2018-12-17 13:24:13 +00:00
|
|
|
}
|
|
|
|
|
2018-06-10 23:41:12 +00:00
|
|
|
func (p *Parser) returnTokenToPool(yyDollar []yySymType, yyVAL *yySymType) {
|
|
|
|
for i := 1; i < len(yyDollar); i++ {
|
|
|
|
if yyDollar[i].token != nil {
|
2019-03-10 21:37:01 +00:00
|
|
|
p.Lexer.ReturnTokenToPool(yyDollar[i].token)
|
2018-06-10 23:41:12 +00:00
|
|
|
}
|
|
|
|
yyDollar[i].token = nil
|
|
|
|
}
|
|
|
|
yyVAL.token = nil
|
|
|
|
}
|