Merge branch 'master' into refactoring

This commit is contained in:
Vadym Slizov 2020-07-04 18:41:12 +03:00
commit f12218d083
4 changed files with 22 additions and 9 deletions

Binary file not shown.

View File

@ -408,10 +408,10 @@ func (lex *Lexer) Lex() *Token {
*|;
backqote := |*
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" => {lex.ungetCnt(1); fcall string_var;};
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
any_line* when is_not_backqoute_end_or_var => {
lex.setTokenPosition(token);
tok = T_ENCAPSED_AND_WHITESPACE;
@ -420,10 +420,10 @@ func (lex *Lexer) Lex() *Token {
*|;
template_string := |*
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" => {lex.ungetCnt(1); fcall string_var;};
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
any_line* when is_not_string_end_or_var => {
lex.setTokenPosition(token);
tok = T_ENCAPSED_AND_WHITESPACE;

View File

@ -586,6 +586,8 @@ func TestTeplateStringTokens(t *testing.T) {
"$/$foo"
"$0$foo"
"$foo$"
`
expected := []string{
@ -641,6 +643,11 @@ func TestTeplateStringTokens(t *testing.T) {
T_ENCAPSED_AND_WHITESPACE.String(),
T_VARIABLE.String(),
TokenID(int('"')).String(),
TokenID(int('"')).String(),
T_VARIABLE.String(),
T_ENCAPSED_AND_WHITESPACE.String(),
TokenID(int('"')).String(),
}
lexer := NewLexer([]byte(src), "7.4", false, nil)
@ -671,6 +678,7 @@ func TestBackquoteStringTokens(t *testing.T) {
` + "`$foo/100`" + `
` + "`$/$foo`" + `
` + "`$0$foo`" + `
` + "`$foo$`" + `
`
expected := []string{
@ -726,6 +734,11 @@ func TestBackquoteStringTokens(t *testing.T) {
T_ENCAPSED_AND_WHITESPACE.String(),
T_VARIABLE.String(),
TokenID(int('`')).String(),
TokenID(int('`')).String(),
T_VARIABLE.String(),
T_ENCAPSED_AND_WHITESPACE.String(),
TokenID(int('`')).String(),
}
lexer := NewLexer([]byte(src), "7.4", false, nil)