2019-03-10 21:37:01 +00:00
|
|
|
package scanner
|
2018-02-17 10:53:10 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"testing"
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
"github.com/z7zmey/php-parser/pkg/token"
|
2019-02-25 16:11:28 +00:00
|
|
|
"gotest.tools/assert"
|
2018-02-17 10:53:10 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
type lval struct {
|
2019-03-10 21:37:01 +00:00
|
|
|
Tkn *Token
|
2018-02-17 10:53:10 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
func (lv *lval) Token(t *Token) {
|
2018-03-29 13:46:21 +00:00
|
|
|
lv.Tkn = t
|
2018-02-17 10:53:10 +00:00
|
|
|
}
|
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
func TestTokens(t *testing.T) {
|
2018-02-17 10:53:10 +00:00
|
|
|
src := `inline html -
|
|
|
|
<? ?>
|
|
|
|
<?= ?>
|
|
|
|
<?php
|
|
|
|
|
|
|
|
abstract
|
|
|
|
array
|
|
|
|
as
|
|
|
|
break
|
|
|
|
callable
|
|
|
|
case
|
|
|
|
catch
|
|
|
|
class
|
|
|
|
clone
|
|
|
|
const
|
|
|
|
continue
|
|
|
|
declare
|
|
|
|
default
|
|
|
|
do
|
|
|
|
echo
|
|
|
|
else
|
|
|
|
elseif
|
|
|
|
empty
|
|
|
|
enddeclare
|
|
|
|
endfor
|
|
|
|
endforeach
|
|
|
|
endif
|
|
|
|
endswitch
|
|
|
|
endwhile
|
|
|
|
eval
|
|
|
|
exit
|
|
|
|
extends
|
|
|
|
final
|
|
|
|
finally
|
|
|
|
for
|
|
|
|
foreach
|
|
|
|
function
|
|
|
|
cfunction
|
|
|
|
global
|
|
|
|
goto
|
|
|
|
if
|
|
|
|
isset
|
|
|
|
implements
|
|
|
|
instanceof
|
|
|
|
insteadof
|
|
|
|
interface
|
|
|
|
list
|
|
|
|
namespace
|
|
|
|
private
|
|
|
|
public
|
|
|
|
print
|
|
|
|
protected
|
|
|
|
return
|
|
|
|
static
|
|
|
|
switch
|
|
|
|
throw
|
|
|
|
trait
|
|
|
|
try
|
|
|
|
unset
|
|
|
|
use
|
|
|
|
var
|
|
|
|
while
|
|
|
|
yield ` + "\t\r\n" + ` from
|
|
|
|
yield
|
|
|
|
include
|
|
|
|
include_once
|
|
|
|
require
|
|
|
|
require_once
|
|
|
|
|
|
|
|
__CLASS__
|
|
|
|
__DIR__
|
|
|
|
__FILE__
|
|
|
|
__FUNCTION__
|
|
|
|
__LINE__
|
|
|
|
__NAMESPACE__
|
|
|
|
__METHOD__
|
|
|
|
__TRAIT__
|
|
|
|
__halt_compiler
|
|
|
|
|
|
|
|
new
|
|
|
|
and
|
|
|
|
or
|
|
|
|
xor
|
|
|
|
|
|
|
|
\
|
|
|
|
...
|
|
|
|
::
|
|
|
|
&&
|
|
|
|
||
|
|
|
|
&=
|
|
|
|
|=
|
|
|
|
.=
|
|
|
|
*=
|
|
|
|
**=
|
|
|
|
/=
|
|
|
|
+=
|
|
|
|
-=
|
|
|
|
^=
|
|
|
|
%=
|
|
|
|
--
|
|
|
|
++
|
|
|
|
=>
|
|
|
|
<=>
|
|
|
|
!=
|
|
|
|
<>
|
|
|
|
!==
|
|
|
|
==
|
|
|
|
===
|
|
|
|
<<=
|
|
|
|
>>=
|
|
|
|
>=
|
|
|
|
<=
|
|
|
|
**
|
|
|
|
<<
|
|
|
|
>>
|
|
|
|
??
|
|
|
|
|
|
|
|
# inline comment
|
|
|
|
// inline comment
|
|
|
|
|
|
|
|
/*
|
|
|
|
multiline comment
|
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
|
|
|
* PHP Doc comment
|
|
|
|
*/
|
|
|
|
|
|
|
|
;
|
|
|
|
:
|
|
|
|
,
|
|
|
|
.
|
|
|
|
[
|
|
|
|
]
|
|
|
|
(
|
|
|
|
)
|
|
|
|
|
|
|
|
|
/
|
|
|
|
^
|
|
|
|
&
|
|
|
|
+
|
|
|
|
-
|
|
|
|
*
|
|
|
|
=
|
|
|
|
%
|
|
|
|
!
|
|
|
|
~
|
|
|
|
$
|
|
|
|
<
|
|
|
|
>
|
|
|
|
?
|
|
|
|
@
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
$var
|
|
|
|
str
|
|
|
|
|
|
|
|
-> ` + "\t\r\n" + ` ->prop
|
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
( array )
|
2018-02-17 16:25:57 +00:00
|
|
|
( bool )
|
|
|
|
( boolean )
|
|
|
|
( real )
|
|
|
|
( double )
|
|
|
|
( float )
|
|
|
|
( int )
|
|
|
|
( integer )
|
|
|
|
( object )
|
|
|
|
( string )
|
2018-06-05 09:24:24 +00:00
|
|
|
( binary )
|
2018-02-17 16:25:57 +00:00
|
|
|
( unset )
|
|
|
|
|
2018-02-17 10:53:10 +00:00
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
T_INLINE_HTML.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
T_INLINE_HTML.String(),
|
|
|
|
T_ECHO.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
T_INLINE_HTML.String(),
|
|
|
|
|
|
|
|
T_ABSTRACT.String(),
|
|
|
|
T_ARRAY.String(),
|
|
|
|
T_AS.String(),
|
|
|
|
T_BREAK.String(),
|
|
|
|
T_CALLABLE.String(),
|
|
|
|
T_CASE.String(),
|
|
|
|
T_CATCH.String(),
|
|
|
|
T_CLASS.String(),
|
|
|
|
T_CLONE.String(),
|
|
|
|
T_CONST.String(),
|
|
|
|
T_CONTINUE.String(),
|
|
|
|
T_DECLARE.String(),
|
|
|
|
T_DEFAULT.String(),
|
|
|
|
T_DO.String(),
|
|
|
|
T_ECHO.String(),
|
|
|
|
T_ELSE.String(),
|
|
|
|
T_ELSEIF.String(),
|
|
|
|
T_EMPTY.String(),
|
|
|
|
T_ENDDECLARE.String(),
|
|
|
|
T_ENDFOR.String(),
|
|
|
|
T_ENDFOREACH.String(),
|
|
|
|
T_ENDIF.String(),
|
|
|
|
T_ENDSWITCH.String(),
|
|
|
|
T_ENDWHILE.String(),
|
|
|
|
T_EVAL.String(),
|
|
|
|
T_EXIT.String(),
|
|
|
|
T_EXTENDS.String(),
|
|
|
|
T_FINAL.String(),
|
|
|
|
T_FINALLY.String(),
|
|
|
|
T_FOR.String(),
|
|
|
|
T_FOREACH.String(),
|
|
|
|
T_FUNCTION.String(),
|
|
|
|
T_FUNCTION.String(),
|
|
|
|
T_GLOBAL.String(),
|
|
|
|
T_GOTO.String(),
|
|
|
|
T_IF.String(),
|
|
|
|
T_ISSET.String(),
|
|
|
|
T_IMPLEMENTS.String(),
|
|
|
|
T_INSTANCEOF.String(),
|
|
|
|
T_INSTEADOF.String(),
|
|
|
|
T_INTERFACE.String(),
|
|
|
|
T_LIST.String(),
|
|
|
|
T_NAMESPACE.String(),
|
|
|
|
T_PRIVATE.String(),
|
|
|
|
T_PUBLIC.String(),
|
|
|
|
T_PRINT.String(),
|
|
|
|
T_PROTECTED.String(),
|
|
|
|
T_RETURN.String(),
|
|
|
|
T_STATIC.String(),
|
|
|
|
T_SWITCH.String(),
|
|
|
|
T_THROW.String(),
|
|
|
|
T_TRAIT.String(),
|
|
|
|
T_TRY.String(),
|
|
|
|
T_UNSET.String(),
|
|
|
|
T_USE.String(),
|
|
|
|
T_VAR.String(),
|
|
|
|
T_WHILE.String(),
|
|
|
|
T_YIELD_FROM.String(),
|
|
|
|
T_YIELD.String(),
|
|
|
|
T_INCLUDE.String(),
|
|
|
|
T_INCLUDE_ONCE.String(),
|
|
|
|
T_REQUIRE.String(),
|
|
|
|
T_REQUIRE_ONCE.String(),
|
|
|
|
|
|
|
|
T_CLASS_C.String(),
|
|
|
|
T_DIR.String(),
|
|
|
|
T_FILE.String(),
|
|
|
|
T_FUNC_C.String(),
|
|
|
|
T_LINE.String(),
|
|
|
|
T_NS_C.String(),
|
|
|
|
T_METHOD_C.String(),
|
|
|
|
T_TRAIT_C.String(),
|
|
|
|
T_HALT_COMPILER.String(),
|
|
|
|
|
|
|
|
T_NEW.String(),
|
|
|
|
T_LOGICAL_AND.String(),
|
|
|
|
T_LOGICAL_OR.String(),
|
|
|
|
T_LOGICAL_XOR.String(),
|
|
|
|
|
|
|
|
T_NS_SEPARATOR.String(),
|
|
|
|
T_ELLIPSIS.String(),
|
|
|
|
T_PAAMAYIM_NEKUDOTAYIM.String(),
|
|
|
|
T_BOOLEAN_AND.String(),
|
|
|
|
T_BOOLEAN_OR.String(),
|
|
|
|
T_AND_EQUAL.String(),
|
|
|
|
T_OR_EQUAL.String(),
|
|
|
|
T_CONCAT_EQUAL.String(),
|
|
|
|
T_MUL_EQUAL.String(),
|
|
|
|
T_POW_EQUAL.String(),
|
|
|
|
T_DIV_EQUAL.String(),
|
|
|
|
T_PLUS_EQUAL.String(),
|
|
|
|
T_MINUS_EQUAL.String(),
|
|
|
|
T_XOR_EQUAL.String(),
|
|
|
|
T_MOD_EQUAL.String(),
|
|
|
|
T_DEC.String(),
|
|
|
|
T_INC.String(),
|
|
|
|
T_DOUBLE_ARROW.String(),
|
|
|
|
T_SPACESHIP.String(),
|
|
|
|
T_IS_NOT_EQUAL.String(),
|
|
|
|
T_IS_NOT_EQUAL.String(),
|
|
|
|
T_IS_NOT_IDENTICAL.String(),
|
|
|
|
T_IS_EQUAL.String(),
|
|
|
|
T_IS_IDENTICAL.String(),
|
|
|
|
T_SL_EQUAL.String(),
|
|
|
|
T_SR_EQUAL.String(),
|
|
|
|
T_IS_GREATER_OR_EQUAL.String(),
|
|
|
|
T_IS_SMALLER_OR_EQUAL.String(),
|
|
|
|
T_POW.String(),
|
|
|
|
T_SL.String(),
|
|
|
|
T_SR.String(),
|
|
|
|
T_COALESCE.String(),
|
|
|
|
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
TokenID(int(':')).String(),
|
|
|
|
TokenID(int(',')).String(),
|
|
|
|
TokenID(int('.')).String(),
|
|
|
|
TokenID(int('[')).String(),
|
|
|
|
TokenID(int(']')).String(),
|
|
|
|
TokenID(int('(')).String(),
|
|
|
|
TokenID(int(')')).String(),
|
|
|
|
TokenID(int('|')).String(),
|
|
|
|
TokenID(int('/')).String(),
|
|
|
|
TokenID(int('^')).String(),
|
|
|
|
TokenID(int('&')).String(),
|
|
|
|
TokenID(int('+')).String(),
|
|
|
|
TokenID(int('-')).String(),
|
|
|
|
TokenID(int('*')).String(),
|
|
|
|
TokenID(int('=')).String(),
|
|
|
|
TokenID(int('%')).String(),
|
|
|
|
TokenID(int('!')).String(),
|
|
|
|
TokenID(int('~')).String(),
|
|
|
|
TokenID(int('$')).String(),
|
|
|
|
TokenID(int('<')).String(),
|
|
|
|
TokenID(int('>')).String(),
|
|
|
|
TokenID(int('?')).String(),
|
|
|
|
TokenID(int('@')).String(),
|
|
|
|
TokenID(int('{')).String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_STRING.String(),
|
|
|
|
|
|
|
|
T_OBJECT_OPERATOR.String(),
|
|
|
|
T_OBJECT_OPERATOR.String(),
|
|
|
|
T_STRING.String(),
|
|
|
|
|
|
|
|
T_ARRAY_CAST.String(),
|
|
|
|
T_BOOL_CAST.String(),
|
|
|
|
T_BOOL_CAST.String(),
|
|
|
|
T_DOUBLE_CAST.String(),
|
|
|
|
T_DOUBLE_CAST.String(),
|
|
|
|
T_DOUBLE_CAST.String(),
|
|
|
|
T_INT_CAST.String(),
|
|
|
|
T_INT_CAST.String(),
|
|
|
|
T_OBJECT_CAST.String(),
|
|
|
|
T_STRING_CAST.String(),
|
|
|
|
T_STRING_CAST.String(),
|
|
|
|
T_UNSET_CAST.String(),
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 08:11:41 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 08:11:41 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-03-30 08:11:41 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-12-29 22:20:20 +00:00
|
|
|
func TestShebang(t *testing.T) {
|
|
|
|
src := `#!/usr/bin/env php
|
|
|
|
<?php
|
|
|
|
0.1
|
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
"#!/usr/bin/env php\n",
|
|
|
|
"<?php",
|
|
|
|
"\n",
|
|
|
|
}
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2019-12-29 22:20:20 +00:00
|
|
|
lv := &lval{}
|
|
|
|
actual := []string{}
|
|
|
|
|
|
|
|
token := lexer.Lex(lv)
|
|
|
|
assert.Equal(t, token, int(T_DNUMBER))
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
for _, tt := range lv.Tkn.Tokens {
|
|
|
|
actual = append(actual, string(tt.Value))
|
2019-12-29 22:20:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestShebangHtml(t *testing.T) {
|
|
|
|
src := `#!/usr/bin/env php
|
|
|
|
<br/><?php
|
|
|
|
0.1
|
|
|
|
`
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2019-12-29 22:20:20 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
token := lexer.Lex(lv)
|
|
|
|
assert.Equal(t, token, int(T_INLINE_HTML))
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, string(lv.Tkn.Tokens[0].Value), "#!/usr/bin/env php\n")
|
2019-12-29 22:20:20 +00:00
|
|
|
|
|
|
|
token = lexer.Lex(lv)
|
|
|
|
assert.Equal(t, token, int(T_DNUMBER))
|
|
|
|
}
|
|
|
|
|
2019-12-26 21:54:44 +00:00
|
|
|
func TestNumberTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
0.1
|
|
|
|
.1
|
|
|
|
1e10
|
|
|
|
.1e10
|
|
|
|
|
|
|
|
0b01111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111
|
|
|
|
0b10111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111
|
|
|
|
|
|
|
|
0x0_7FFF_FFFF_FFFF_FFFF
|
|
|
|
0x8111_1111_1111_1111
|
|
|
|
|
|
|
|
92233_72036_85477_5807
|
|
|
|
0_77777_77777_77777_77777_7
|
|
|
|
|
|
|
|
92233_72036_85477_5808
|
|
|
|
0_77777_77777_77777_77777_70
|
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
|
|
|
|
T_LNUMBER.String(),
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
|
|
|
|
T_LNUMBER.String(),
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
|
|
|
|
T_LNUMBER.String(),
|
|
|
|
T_LNUMBER.String(),
|
|
|
|
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
T_DNUMBER.String(),
|
|
|
|
}
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2019-12-26 21:54:44 +00:00
|
|
|
lv := &lval{}
|
|
|
|
actual := []string{}
|
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
|
|
|
if token == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
actual = append(actual, TokenID(token).String())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
2019-06-07 06:33:35 +00:00
|
|
|
func TestConstantStrings(t *testing.T) {
|
|
|
|
src := `<?
|
|
|
|
'str'
|
|
|
|
'\''
|
|
|
|
'\\'
|
|
|
|
|
|
|
|
b"str"
|
|
|
|
"\""
|
|
|
|
"\\"
|
|
|
|
|
|
|
|
"\$var"
|
|
|
|
"$4"
|
|
|
|
"$"
|
|
|
|
"$\\"
|
|
|
|
|
|
|
|
"{"
|
|
|
|
"{a"
|
|
|
|
"\{$"
|
2020-01-08 07:31:40 +00:00
|
|
|
"{\""
|
2019-06-07 06:33:35 +00:00
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
2020-01-08 07:47:23 +00:00
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
2019-06-07 06:33:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2019-06-07 06:33:35 +00:00
|
|
|
lv := &lval{}
|
|
|
|
actual := []string{}
|
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
|
|
|
if token == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
actual = append(actual, TokenID(token).String())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
2018-07-23 16:27:06 +00:00
|
|
|
func TestSingleQuoteStringTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
'str $var str'
|
|
|
|
|
|
|
|
'\''
|
|
|
|
|
|
|
|
'\'
|
|
|
|
'
|
|
|
|
|
|
|
|
'\
|
|
|
|
\''
|
|
|
|
|
|
|
|
'\\'
|
|
|
|
|
|
|
|
'\\
|
|
|
|
'
|
|
|
|
|
|
|
|
'\
|
|
|
|
\''
|
|
|
|
`
|
|
|
|
|
2018-07-24 19:24:32 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
2018-07-23 16:27:06 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2018-03-30 08:11:41 +00:00
|
|
|
lv := &lval{}
|
2018-07-24 19:24:32 +00:00
|
|
|
actual := []string{}
|
2018-03-30 08:11:41 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-03-30 08:11:41 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestTeplateStringTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
2018-03-30 09:17:25 +00:00
|
|
|
"foo $a"
|
|
|
|
|
|
|
|
"foo $a{$b}"
|
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
"test $var {$var} ${var_name} {s $ \$a "
|
|
|
|
|
|
|
|
"{$var}"
|
2018-03-30 11:28:50 +00:00
|
|
|
|
|
|
|
"$foo/"
|
|
|
|
"$foo/100;"
|
|
|
|
|
|
|
|
"$/$foo"
|
|
|
|
"$0$foo"
|
2018-03-30 08:11:41 +00:00
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_DOLLAR_OPEN_CURLY_BRACES.String(),
|
|
|
|
T_STRING_VARNAME.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
|
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('"')).String(),
|
2018-03-30 15:35:18 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 15:35:18 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 15:35:18 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-03-30 15:35:18 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-03-30 15:35:18 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 15:35:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestBackquoteStringTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
` + "`foo $a`" + `
|
|
|
|
` + "`foo $a{$b}`" + `
|
|
|
|
|
|
|
|
` + "`test $var {$var} ${var_name} {s $ \\$a `" + `
|
|
|
|
|
|
|
|
` + "`{$var}`" + `
|
|
|
|
` + "`$foo/`" + `
|
|
|
|
` + "`$foo/100`" + `
|
|
|
|
` + "`$/$foo`" + `
|
|
|
|
` + "`$0$foo`" + `
|
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_DOLLAR_OPEN_CURLY_BRACES.String(),
|
|
|
|
T_STRING_VARNAME.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
|
|
|
|
TokenID(int('`')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('`')).String(),
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
2018-02-17 16:25:57 +00:00
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 08:11:41 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 08:11:41 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-03-30 08:11:41 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<CAT
|
|
|
|
test
|
|
|
|
CAT;
|
2018-02-17 16:25:57 +00:00
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
<<<'CAT'
|
|
|
|
test
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<"CAT"
|
|
|
|
$var->prop
|
|
|
|
$var[1]
|
|
|
|
$var[0x1]
|
|
|
|
$var[0b1]
|
|
|
|
$var[var_name]
|
|
|
|
$var[$var]
|
|
|
|
|
|
|
|
{$var}
|
|
|
|
${var_name}
|
|
|
|
{s $ \$a
|
|
|
|
CAT;
|
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_OBJECT_OPERATOR.String(),
|
|
|
|
T_STRING.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('[')).String(),
|
|
|
|
T_NUM_STRING.String(),
|
|
|
|
TokenID(int(']')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('[')).String(),
|
|
|
|
T_NUM_STRING.String(),
|
|
|
|
TokenID(int(']')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('[')).String(),
|
|
|
|
T_NUM_STRING.String(),
|
|
|
|
TokenID(int(']')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('[')).String(),
|
|
|
|
T_STRING.String(),
|
|
|
|
TokenID(int(']')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('[')).String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int(']')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_DOLLAR_OPEN_CURLY_BRACES.String(),
|
|
|
|
T_STRING_VARNAME.String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
2018-02-17 10:53:10 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-02-17 10:53:10 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-02-17 10:53:10 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-02-17 10:53:10 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-02-17 10:53:10 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-02-17 10:53:10 +00:00
|
|
|
}
|
2018-03-29 13:46:21 +00:00
|
|
|
|
2018-04-05 21:24:00 +00:00
|
|
|
func TestHereDocTokens2(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<CAT
|
|
|
|
$foo/
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$foo/100
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$/$foo
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$0$foo
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$foo$bar\
|
|
|
|
CAT
|
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
2018-04-05 21:24:00 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-04-05 21:24:00 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-04-05 21:24:00 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-04-05 21:24:00 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-07-14 15:00:48 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-14 15:00:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokens3(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
|
|
|
|
<<<"CAT"
|
|
|
|
\\{$a['b']}
|
|
|
|
CAT;
|
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_CURLY_OPEN.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('[')).String(),
|
|
|
|
T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
TokenID(int(']')).String(),
|
|
|
|
TokenID(int('}')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
2018-07-14 15:00:48 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-07-14 15:00:48 +00:00
|
|
|
lv := &lval{}
|
|
|
|
actual := []string{}
|
|
|
|
|
2019-12-26 13:41:06 +00:00
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
|
|
|
if token == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
actual = append(actual, TokenID(token).String())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokens73(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<"CAT"
|
|
|
|
text
|
|
|
|
CAT, $b`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(',')).String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
}
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2019-12-26 13:41:06 +00:00
|
|
|
lv := &lval{}
|
|
|
|
actual := []string{}
|
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
|
|
|
if token == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
actual = append(actual, TokenID(token).String())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokensBefore73(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<"CAT"
|
|
|
|
CAT
|
|
|
|
CAT;`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
|
|
|
|
T_START_HEREDOC.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_END_HEREDOC.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
}
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
|
|
|
lexer.PHPVersion = "7.2"
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2019-12-26 13:41:06 +00:00
|
|
|
lv := &lval{}
|
|
|
|
actual := []string{}
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-07-14 15:00:48 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-04-05 21:24:00 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-05 21:24:00 +00:00
|
|
|
}
|
|
|
|
|
2018-04-01 21:02:13 +00:00
|
|
|
func TestInlineHtmlNopTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
$a; ?> test <?php
|
|
|
|
$a ?> test
|
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
T_INLINE_HTML.String(),
|
2018-04-01 21:02:13 +00:00
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int(';')).String(),
|
|
|
|
T_INLINE_HTML.String(),
|
2018-04-01 21:02:13 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-04-01 21:02:13 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-04-01 21:02:13 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-04-01 21:02:13 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-04-01 21:02:13 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-01 21:02:13 +00:00
|
|
|
}
|
|
|
|
|
2018-03-29 13:46:21 +00:00
|
|
|
func TestStringTokensAfterVariable(t *testing.T) {
|
|
|
|
src := `<?php "test \"$var\""`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
TokenID(int('"')).String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
T_VARIABLE.String(),
|
|
|
|
T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
TokenID(int('"')).String(),
|
2018-03-29 13:46:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
expectedTokens := []string{
|
|
|
|
"\"",
|
|
|
|
"test \\\"",
|
|
|
|
"$var",
|
|
|
|
"\\\"",
|
|
|
|
"\"",
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2018-03-29 13:46:21 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-29 13:46:21 +00:00
|
|
|
actualTokens := []string{}
|
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-03-29 13:46:21 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actualTokens = append(actualTokens, string(lv.Tkn.Value))
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-03-29 13:46:21 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
assert.DeepEqual(t, expectedTokens, actualTokens)
|
2018-03-29 13:46:21 +00:00
|
|
|
}
|
2018-03-30 11:28:50 +00:00
|
|
|
|
|
|
|
func TestSlashAfterVariable(t *testing.T) {
|
|
|
|
src := `<?php $foo/3`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2019-03-10 21:37:01 +00:00
|
|
|
T_VARIABLE.String(),
|
|
|
|
TokenID(int('/')).String(),
|
|
|
|
T_LNUMBER.String(),
|
2018-03-30 11:28:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
expectedTokens := []string{
|
|
|
|
"$foo",
|
|
|
|
"/",
|
|
|
|
"3",
|
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2018-03-30 11:28:50 +00:00
|
|
|
lv := &lval{}
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 11:28:50 +00:00
|
|
|
actualTokens := []string{}
|
|
|
|
|
|
|
|
for {
|
|
|
|
token := lexer.Lex(lv)
|
2019-03-10 21:37:01 +00:00
|
|
|
if token == 0 {
|
2018-03-30 11:28:50 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actualTokens = append(actualTokens, string(lv.Tkn.Value))
|
2019-03-10 21:37:01 +00:00
|
|
|
actual = append(actual, TokenID(token).String())
|
2018-03-30 11:28:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
assert.DeepEqual(t, expectedTokens, actualTokens)
|
2018-03-30 11:28:50 +00:00
|
|
|
}
|
2018-03-30 14:15:26 +00:00
|
|
|
|
|
|
|
func TestCommentEnd(t *testing.T) {
|
|
|
|
src := `<?php //test`
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 14:15:26 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lexer.Tokens
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentNewLine(t *testing.T) {
|
|
|
|
src := "<?php //test\n$a"
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test\n"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 14:15:26 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentNewLine1(t *testing.T) {
|
|
|
|
src := "<?php //test\r$a"
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test\r"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 14:15:26 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentNewLine2(t *testing.T) {
|
|
|
|
src := "<?php #test\r\n$a"
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("#test\r\n"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 14:15:26 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentWithPhpEndTag(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
//test?> test`
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-03-30 14:15:26 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
2018-04-10 21:58:57 +00:00
|
|
|
|
|
|
|
func TestInlineComment(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
/*test*/`
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("/*test*/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-04-10 21:58:57 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2018-07-23 17:33:45 +00:00
|
|
|
func TestInlineComment2(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
/*/*/`
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("/*/*/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-23 17:33:45 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-07-23 17:33:45 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lexer.Tokens
|
2018-07-23 17:33:45 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-23 17:33:45 +00:00
|
|
|
}
|
|
|
|
|
2018-04-10 21:58:57 +00:00
|
|
|
func TestEmptyInlineComment(t *testing.T) {
|
|
|
|
src := `<?php
|
2018-07-23 17:33:45 +00:00
|
|
|
/**/ `
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("/**/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-04-10 21:58:57 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lexer.Tokens
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestEmptyInlineComment2(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
/***/`
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_DOC_COMMENT,
|
|
|
|
Value: []byte("/***/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-04-10 21:58:57 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestMethodCallTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
$a -> bar ( '' ) ;`
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-07-02 17:48:55 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected = []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected = []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected = []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected = []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected = []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected = []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestYieldFromTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
yield from $a`
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2020-05-12 21:16:36 +00:00
|
|
|
lexer.WithTokens = true
|
2018-07-02 17:48:55 +00:00
|
|
|
lv := &lval{}
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected := []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-05-12 21:16:36 +00:00
|
|
|
expected = []token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = lv.Tkn.Tokens
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
2018-11-05 14:56:27 +00:00
|
|
|
|
2019-06-06 18:32:58 +00:00
|
|
|
func TestVarNameByteChars(t *testing.T) {
|
|
|
|
src := "<?php $\x80 $\xff"
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, "$\x80", string(lv.Tkn.Value))
|
2019-06-06 18:32:58 +00:00
|
|
|
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, "$\xff", string(lv.Tkn.Value))
|
2019-06-06 18:32:58 +00:00
|
|
|
}
|
|
|
|
|
2019-06-07 06:33:35 +00:00
|
|
|
func TestStringVarNameByteChars(t *testing.T) {
|
|
|
|
src := "<?php \"$\x80 $\xff\""
|
|
|
|
|
|
|
|
lexer := NewLexer([]byte(src))
|
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, "\"", string(lv.Tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, "$\x80", string(lv.Tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, " ", string(lv.Tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, "$\xff", string(lv.Tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
assert.Equal(t, "\"", string(lv.Tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
}
|
|
|
|
|
2018-11-05 14:56:27 +00:00
|
|
|
func TestIgnoreControllCharacters(t *testing.T) {
|
|
|
|
src := "<?php \004 echo $b;"
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2018-11-05 14:56:27 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
expected := "echo"
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := string(lv.Tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "$b"
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = string(lv.Tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestIgnoreControllCharactersAtStringVarOffset(t *testing.T) {
|
|
|
|
src := "<?php \"$a[test\004]\";"
|
|
|
|
|
2019-03-10 21:37:01 +00:00
|
|
|
lexer := NewLexer([]byte(src))
|
2018-11-05 14:56:27 +00:00
|
|
|
lv := &lval{}
|
|
|
|
|
|
|
|
expected := "\""
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual := string(lv.Tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "$a"
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = string(lv.Tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "["
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = string(lv.Tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "test"
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = string(lv.Tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "]"
|
|
|
|
lexer.Lex(lv)
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = string(lv.Tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
}
|