Merge pull request #108 from z7zmey/issue-107
Fix infinite loop for "$foo$"
This commit is contained in:
commit
ee48d84609
11730
scanner/scanner.go
11730
scanner/scanner.go
File diff suppressed because it is too large
Load Diff
@ -421,7 +421,7 @@ func (lex *Lexer) Lex(lval Lval) int {
|
|||||||
backqote := |*
|
backqote := |*
|
||||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||||
"$" => {lex.ungetCnt(1); fcall string_var;};
|
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||||
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
|
'`' => {lex.setTokenPosition(token); tok = TokenID(int('`')); fnext php; fbreak;};
|
||||||
any_line* when is_not_backqoute_end_or_var => {
|
any_line* when is_not_backqoute_end_or_var => {
|
||||||
lex.setTokenPosition(token);
|
lex.setTokenPosition(token);
|
||||||
@ -433,7 +433,7 @@ func (lex *Lexer) Lex(lval Lval) int {
|
|||||||
template_string := |*
|
template_string := |*
|
||||||
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(token); tok = T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
|
||||||
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
"${" => {lex.setTokenPosition(token); tok = T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
|
||||||
"$" => {lex.ungetCnt(1); fcall string_var;};
|
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
|
||||||
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
|
'"' => {lex.setTokenPosition(token); tok = TokenID(int('"')); fnext php; fbreak;};
|
||||||
any_line* when is_not_string_end_or_var => {
|
any_line* when is_not_string_end_or_var => {
|
||||||
lex.setTokenPosition(token);
|
lex.setTokenPosition(token);
|
||||||
|
@ -598,6 +598,8 @@ func TestTeplateStringTokens(t *testing.T) {
|
|||||||
|
|
||||||
"$/$foo"
|
"$/$foo"
|
||||||
"$0$foo"
|
"$0$foo"
|
||||||
|
|
||||||
|
"$foo$"
|
||||||
`
|
`
|
||||||
|
|
||||||
expected := []string{
|
expected := []string{
|
||||||
@ -653,6 +655,11 @@ func TestTeplateStringTokens(t *testing.T) {
|
|||||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
T_VARIABLE.String(),
|
T_VARIABLE.String(),
|
||||||
TokenID(int('"')).String(),
|
TokenID(int('"')).String(),
|
||||||
|
|
||||||
|
TokenID(int('"')).String(),
|
||||||
|
T_VARIABLE.String(),
|
||||||
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
|
TokenID(int('"')).String(),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := NewLexer([]byte(src))
|
lexer := NewLexer([]byte(src))
|
||||||
@ -684,6 +691,7 @@ func TestBackquoteStringTokens(t *testing.T) {
|
|||||||
` + "`$foo/100`" + `
|
` + "`$foo/100`" + `
|
||||||
` + "`$/$foo`" + `
|
` + "`$/$foo`" + `
|
||||||
` + "`$0$foo`" + `
|
` + "`$0$foo`" + `
|
||||||
|
` + "`$foo$`" + `
|
||||||
`
|
`
|
||||||
|
|
||||||
expected := []string{
|
expected := []string{
|
||||||
@ -739,6 +747,11 @@ func TestBackquoteStringTokens(t *testing.T) {
|
|||||||
T_ENCAPSED_AND_WHITESPACE.String(),
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
T_VARIABLE.String(),
|
T_VARIABLE.String(),
|
||||||
TokenID(int('`')).String(),
|
TokenID(int('`')).String(),
|
||||||
|
|
||||||
|
TokenID(int('`')).String(),
|
||||||
|
T_VARIABLE.String(),
|
||||||
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
||||||
|
TokenID(int('`')).String(),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := NewLexer([]byte(src))
|
lexer := NewLexer([]byte(src))
|
||||||
|
Loading…
Reference in New Issue
Block a user