Merge branch 'master' into refactoring
This commit is contained in:
commit
f12218d083
BIN
internal/scanner/scanner.go
generated
BIN
internal/scanner/scanner.go
generated
Binary file not shown.
@ -410,7 +410,7 @@ func (lex *Lexer) Lex() *Token {
|
||||
backqote := |*
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"$" => {lex.ungetCnt(1); fcall string_var;};
|
||||
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
|
||||
any_line* when is_not_backqoute_end_or_var => {
|
||||
lex.setTokenPosition(token);
|
||||
@ -422,7 +422,7 @@ func (lex *Lexer) Lex() *Token {
|
||||
template_string := |*
|
||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||
"$" => {lex.ungetCnt(1); fcall string_var;};
|
||||
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
|
||||
any_line* when is_not_string_end_or_var => {
|
||||
lex.setTokenPosition(token);
|
||||
|
@ -586,6 +586,8 @@ func TestTeplateStringTokens(t *testing.T) {
|
||||
|
||||
"$/$foo"
|
||||
"$0$foo"
|
||||
|
||||
"$foo$"
|
||||
`
|
||||
|
||||
expected := []string{
|
||||
@ -641,6 +643,11 @@ func TestTeplateStringTokens(t *testing.T) {
|
||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||
T_VARIABLE.String(),
|
||||
TokenID(int('"')).String(),
|
||||
|
||||
TokenID(int('"')).String(),
|
||||
T_VARIABLE.String(),
|
||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||
TokenID(int('"')).String(),
|
||||
}
|
||||
|
||||
lexer := NewLexer([]byte(src), "7.4", false, nil)
|
||||
@ -671,6 +678,7 @@ func TestBackquoteStringTokens(t *testing.T) {
|
||||
` + "`$foo/100`" + `
|
||||
` + "`$/$foo`" + `
|
||||
` + "`$0$foo`" + `
|
||||
` + "`$foo$`" + `
|
||||
`
|
||||
|
||||
expected := []string{
|
||||
@ -726,6 +734,11 @@ func TestBackquoteStringTokens(t *testing.T) {
|
||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||
T_VARIABLE.String(),
|
||||
TokenID(int('`')).String(),
|
||||
|
||||
TokenID(int('`')).String(),
|
||||
T_VARIABLE.String(),
|
||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||
TokenID(int('`')).String(),
|
||||
}
|
||||
|
||||
lexer := NewLexer([]byte(src), "7.4", false, nil)
|
||||
|
Loading…
Reference in New Issue
Block a user