Merge branch 'master' into refactoring

This commit is contained in:
Vadym Slizov 2020-07-04 18:41:12 +03:00
commit f12218d083
4 changed files with 22 additions and 9 deletions

View File

@ -38,7 +38,7 @@ func (p *Parser) Error(msg string) {
if p.errHandlerFunc == nil { if p.errHandlerFunc == nil {
return return
} }
var pos = p.currentToken.Position var pos = p.currentToken.Position
p.errHandlerFunc(errors.NewError(msg, &pos)) p.errHandlerFunc(errors.NewError(msg, &pos))
} }

Binary file not shown.

View File

@ -408,10 +408,10 @@ func (lex *Lexer) Lex() *Token {
*|; *|;
backqote := |* backqote := |*
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" => {lex.ungetCnt(1); fcall string_var;}; "$" varname_first => {lex.ungetCnt(2); fcall string_var;};
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;}; '`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
any_line* when is_not_backqoute_end_or_var => { any_line* when is_not_backqoute_end_or_var => {
lex.setTokenPosition(token); lex.setTokenPosition(token);
tok = T_ENCAPSED_AND_WHITESPACE; tok = T_ENCAPSED_AND_WHITESPACE;
@ -420,10 +420,10 @@ func (lex *Lexer) Lex() *Token {
*|; *|;
template_string := |* template_string := |*
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;}; "{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;}; "${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" => {lex.ungetCnt(1); fcall string_var;}; "$" varname_first => {lex.ungetCnt(2); fcall string_var;};
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;}; '"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
any_line* when is_not_string_end_or_var => { any_line* when is_not_string_end_or_var => {
lex.setTokenPosition(token); lex.setTokenPosition(token);
tok = T_ENCAPSED_AND_WHITESPACE; tok = T_ENCAPSED_AND_WHITESPACE;

View File

@ -586,6 +586,8 @@ func TestTeplateStringTokens(t *testing.T) {
"$/$foo" "$/$foo"
"$0$foo" "$0$foo"
"$foo$"
` `
expected := []string{ expected := []string{
@ -641,6 +643,11 @@ func TestTeplateStringTokens(t *testing.T) {
T_ENCAPSED_AND_WHITESPACE.String(), T_ENCAPSED_AND_WHITESPACE.String(),
T_VARIABLE.String(), T_VARIABLE.String(),
TokenID(int('"')).String(), TokenID(int('"')).String(),
TokenID(int('"')).String(),
T_VARIABLE.String(),
T_ENCAPSED_AND_WHITESPACE.String(),
TokenID(int('"')).String(),
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)
@ -671,6 +678,7 @@ func TestBackquoteStringTokens(t *testing.T) {
` + "`$foo/100`" + ` ` + "`$foo/100`" + `
` + "`$/$foo`" + ` ` + "`$/$foo`" + `
` + "`$0$foo`" + ` ` + "`$0$foo`" + `
` + "`$foo$`" + `
` `
expected := []string{ expected := []string{
@ -726,6 +734,11 @@ func TestBackquoteStringTokens(t *testing.T) {
T_ENCAPSED_AND_WHITESPACE.String(), T_ENCAPSED_AND_WHITESPACE.String(),
T_VARIABLE.String(), T_VARIABLE.String(),
TokenID(int('`')).String(), TokenID(int('`')).String(),
TokenID(int('`')).String(),
T_VARIABLE.String(),
T_ENCAPSED_AND_WHITESPACE.String(),
TokenID(int('`')).String(),
} }
lexer := NewLexer([]byte(src), "7.4", false, nil) lexer := NewLexer([]byte(src), "7.4", false, nil)