Merge branch 'master' into refactoring
This commit is contained in:
commit
f12218d083
BIN
internal/scanner/scanner.go
generated
BIN
internal/scanner/scanner.go
generated
Binary file not shown.
@ -408,10 +408,10 @@ func (lex *Lexer) Lex() *Token {
|
|||||||
*|;
|
*|;
|
||||||
|
|
||||||
backqote := |*
|
backqote := |*
|
||||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||||
"$" => {lex.ungetCnt(1); fcall string_var;};
|
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||||
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
|
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
|
||||||
any_line* when is_not_backqoute_end_or_var => {
|
any_line* when is_not_backqoute_end_or_var => {
|
||||||
lex.setTokenPosition(token);
|
lex.setTokenPosition(token);
|
||||||
tok = T_ENCAPSED_AND_WHITESPACE;
|
tok = T_ENCAPSED_AND_WHITESPACE;
|
||||||
@ -420,10 +420,10 @@ func (lex *Lexer) Lex() *Token {
|
|||||||
*|;
|
*|;
|
||||||
|
|
||||||
template_string := |*
|
template_string := |*
|
||||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||||
"$" => {lex.ungetCnt(1); fcall string_var;};
|
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||||
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
|
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
|
||||||
any_line* when is_not_string_end_or_var => {
|
any_line* when is_not_string_end_or_var => {
|
||||||
lex.setTokenPosition(token);
|
lex.setTokenPosition(token);
|
||||||
tok = T_ENCAPSED_AND_WHITESPACE;
|
tok = T_ENCAPSED_AND_WHITESPACE;
|
||||||
|
@ -586,6 +586,8 @@ func TestTeplateStringTokens(t *testing.T) {
|
|||||||
|
|
||||||
"$/$foo"
|
"$/$foo"
|
||||||
"$0$foo"
|
"$0$foo"
|
||||||
|
|
||||||
|
"$foo$"
|
||||||
`
|
`
|
||||||
|
|
||||||
expected := []string{
|
expected := []string{
|
||||||
@ -641,6 +643,11 @@ func TestTeplateStringTokens(t *testing.T) {
|
|||||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
T_VARIABLE.String(),
|
T_VARIABLE.String(),
|
||||||
TokenID(int('"')).String(),
|
TokenID(int('"')).String(),
|
||||||
|
|
||||||
|
TokenID(int('"')).String(),
|
||||||
|
T_VARIABLE.String(),
|
||||||
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
|
TokenID(int('"')).String(),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := NewLexer([]byte(src), "7.4", false, nil)
|
lexer := NewLexer([]byte(src), "7.4", false, nil)
|
||||||
@ -671,6 +678,7 @@ func TestBackquoteStringTokens(t *testing.T) {
|
|||||||
` + "`$foo/100`" + `
|
` + "`$foo/100`" + `
|
||||||
` + "`$/$foo`" + `
|
` + "`$/$foo`" + `
|
||||||
` + "`$0$foo`" + `
|
` + "`$0$foo`" + `
|
||||||
|
` + "`$foo$`" + `
|
||||||
`
|
`
|
||||||
|
|
||||||
expected := []string{
|
expected := []string{
|
||||||
@ -726,6 +734,11 @@ func TestBackquoteStringTokens(t *testing.T) {
|
|||||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
T_VARIABLE.String(),
|
T_VARIABLE.String(),
|
||||||
TokenID(int('`')).String(),
|
TokenID(int('`')).String(),
|
||||||
|
|
||||||
|
TokenID(int('`')).String(),
|
||||||
|
T_VARIABLE.String(),
|
||||||
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
|
TokenID(int('`')).String(),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := NewLexer([]byte(src), "7.4", false, nil)
|
lexer := NewLexer([]byte(src), "7.4", false, nil)
|
||||||
|
Loading…
Reference in New Issue
Block a user