2023-03-25 14:02:34 +00:00
|
|
|
package php7
|
2018-02-17 10:53:10 +00:00
|
|
|
|
|
|
|
import (
|
2020-08-17 17:31:04 +00:00
|
|
|
"gotest.tools/assert"
|
2018-02-17 10:53:10 +00:00
|
|
|
"testing"
|
|
|
|
|
2023-12-09 21:36:19 +00:00
|
|
|
"git.maride.cc/maride/php-parser/pkg/conf"
|
|
|
|
"git.maride.cc/maride/php-parser/pkg/errors"
|
|
|
|
"git.maride.cc/maride/php-parser/pkg/position"
|
|
|
|
"git.maride.cc/maride/php-parser/pkg/token"
|
|
|
|
"git.maride.cc/maride/php-parser/pkg/version"
|
2018-02-17 10:53:10 +00:00
|
|
|
)
|
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
func TestTokens(t *testing.T) {
|
2018-02-17 10:53:10 +00:00
|
|
|
src := `inline html -
|
|
|
|
<? ?>
|
|
|
|
<?= ?>
|
|
|
|
<?php
|
|
|
|
|
|
|
|
abstract
|
|
|
|
array
|
|
|
|
as
|
|
|
|
break
|
|
|
|
callable
|
|
|
|
case
|
|
|
|
catch
|
|
|
|
class
|
|
|
|
clone
|
|
|
|
const
|
|
|
|
continue
|
|
|
|
declare
|
|
|
|
default
|
|
|
|
do
|
|
|
|
echo
|
|
|
|
else
|
|
|
|
elseif
|
|
|
|
empty
|
|
|
|
enddeclare
|
|
|
|
endfor
|
|
|
|
endforeach
|
|
|
|
endif
|
|
|
|
endswitch
|
|
|
|
endwhile
|
|
|
|
eval
|
|
|
|
exit
|
|
|
|
extends
|
|
|
|
final
|
|
|
|
finally
|
|
|
|
for
|
|
|
|
foreach
|
|
|
|
function
|
|
|
|
cfunction
|
|
|
|
global
|
|
|
|
goto
|
|
|
|
if
|
|
|
|
isset
|
|
|
|
implements
|
|
|
|
instanceof
|
|
|
|
insteadof
|
|
|
|
interface
|
|
|
|
list
|
|
|
|
namespace
|
|
|
|
private
|
|
|
|
public
|
|
|
|
print
|
|
|
|
protected
|
|
|
|
return
|
|
|
|
static
|
|
|
|
switch
|
|
|
|
throw
|
|
|
|
trait
|
|
|
|
try
|
|
|
|
unset
|
|
|
|
use
|
|
|
|
var
|
|
|
|
while
|
|
|
|
yield ` + "\t\r\n" + ` from
|
|
|
|
yield
|
|
|
|
include
|
|
|
|
include_once
|
|
|
|
require
|
|
|
|
require_once
|
|
|
|
|
|
|
|
__CLASS__
|
|
|
|
__DIR__
|
|
|
|
__FILE__
|
|
|
|
__FUNCTION__
|
|
|
|
__LINE__
|
|
|
|
__NAMESPACE__
|
|
|
|
__METHOD__
|
|
|
|
__TRAIT__
|
|
|
|
__halt_compiler
|
|
|
|
|
|
|
|
new
|
|
|
|
and
|
|
|
|
or
|
|
|
|
xor
|
|
|
|
|
|
|
|
\
|
|
|
|
...
|
|
|
|
::
|
|
|
|
&&
|
|
|
|
||
|
|
|
|
&=
|
|
|
|
|=
|
|
|
|
.=
|
|
|
|
*=
|
|
|
|
**=
|
|
|
|
/=
|
|
|
|
+=
|
|
|
|
-=
|
|
|
|
^=
|
|
|
|
%=
|
|
|
|
--
|
|
|
|
++
|
|
|
|
=>
|
|
|
|
<=>
|
|
|
|
!=
|
|
|
|
<>
|
|
|
|
!==
|
|
|
|
==
|
|
|
|
===
|
|
|
|
<<=
|
|
|
|
>>=
|
|
|
|
>=
|
|
|
|
<=
|
|
|
|
**
|
|
|
|
<<
|
|
|
|
>>
|
|
|
|
??
|
|
|
|
|
|
|
|
# inline comment
|
|
|
|
// inline comment
|
|
|
|
|
|
|
|
/*
|
|
|
|
multiline comment
|
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
|
|
|
* PHP Doc comment
|
|
|
|
*/
|
|
|
|
|
|
|
|
;
|
|
|
|
:
|
|
|
|
,
|
|
|
|
.
|
|
|
|
[
|
|
|
|
]
|
|
|
|
(
|
|
|
|
)
|
|
|
|
|
|
|
|
|
/
|
|
|
|
^
|
|
|
|
&
|
|
|
|
+
|
|
|
|
-
|
|
|
|
*
|
|
|
|
=
|
|
|
|
%
|
|
|
|
!
|
|
|
|
~
|
|
|
|
$
|
|
|
|
<
|
|
|
|
>
|
|
|
|
?
|
|
|
|
@
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
$var
|
|
|
|
str
|
|
|
|
|
|
|
|
-> ` + "\t\r\n" + ` ->prop
|
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
( array )
|
2018-02-17 16:25:57 +00:00
|
|
|
( bool )
|
|
|
|
( boolean )
|
|
|
|
( real )
|
|
|
|
( double )
|
|
|
|
( float )
|
|
|
|
( int )
|
|
|
|
( integer )
|
|
|
|
( object )
|
|
|
|
( string )
|
2018-06-05 09:24:24 +00:00
|
|
|
( binary )
|
2018-02-17 16:25:57 +00:00
|
|
|
( unset )
|
|
|
|
|
2018-02-17 10:53:10 +00:00
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_INLINE_HTML.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
token.T_INLINE_HTML.String(),
|
|
|
|
token.T_ECHO.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
token.T_INLINE_HTML.String(),
|
|
|
|
|
|
|
|
token.T_ABSTRACT.String(),
|
|
|
|
token.T_ARRAY.String(),
|
|
|
|
token.T_AS.String(),
|
|
|
|
token.T_BREAK.String(),
|
|
|
|
token.T_CALLABLE.String(),
|
|
|
|
token.T_CASE.String(),
|
|
|
|
token.T_CATCH.String(),
|
|
|
|
token.T_CLASS.String(),
|
|
|
|
token.T_CLONE.String(),
|
|
|
|
token.T_CONST.String(),
|
|
|
|
token.T_CONTINUE.String(),
|
|
|
|
token.T_DECLARE.String(),
|
|
|
|
token.T_DEFAULT.String(),
|
|
|
|
token.T_DO.String(),
|
|
|
|
token.T_ECHO.String(),
|
|
|
|
token.T_ELSE.String(),
|
|
|
|
token.T_ELSEIF.String(),
|
|
|
|
token.T_EMPTY.String(),
|
|
|
|
token.T_ENDDECLARE.String(),
|
|
|
|
token.T_ENDFOR.String(),
|
|
|
|
token.T_ENDFOREACH.String(),
|
|
|
|
token.T_ENDIF.String(),
|
|
|
|
token.T_ENDSWITCH.String(),
|
|
|
|
token.T_ENDWHILE.String(),
|
|
|
|
token.T_EVAL.String(),
|
|
|
|
token.T_EXIT.String(),
|
|
|
|
token.T_EXTENDS.String(),
|
|
|
|
token.T_FINAL.String(),
|
|
|
|
token.T_FINALLY.String(),
|
|
|
|
token.T_FOR.String(),
|
|
|
|
token.T_FOREACH.String(),
|
|
|
|
token.T_FUNCTION.String(),
|
|
|
|
token.T_FUNCTION.String(),
|
|
|
|
token.T_GLOBAL.String(),
|
|
|
|
token.T_GOTO.String(),
|
|
|
|
token.T_IF.String(),
|
|
|
|
token.T_ISSET.String(),
|
|
|
|
token.T_IMPLEMENTS.String(),
|
|
|
|
token.T_INSTANCEOF.String(),
|
|
|
|
token.T_INSTEADOF.String(),
|
|
|
|
token.T_INTERFACE.String(),
|
|
|
|
token.T_LIST.String(),
|
|
|
|
token.T_NAMESPACE.String(),
|
|
|
|
token.T_PRIVATE.String(),
|
|
|
|
token.T_PUBLIC.String(),
|
|
|
|
token.T_PRINT.String(),
|
|
|
|
token.T_PROTECTED.String(),
|
|
|
|
token.T_RETURN.String(),
|
|
|
|
token.T_STATIC.String(),
|
|
|
|
token.T_SWITCH.String(),
|
|
|
|
token.T_THROW.String(),
|
|
|
|
token.T_TRAIT.String(),
|
|
|
|
token.T_TRY.String(),
|
|
|
|
token.T_UNSET.String(),
|
|
|
|
token.T_USE.String(),
|
|
|
|
token.T_VAR.String(),
|
|
|
|
token.T_WHILE.String(),
|
|
|
|
token.T_YIELD_FROM.String(),
|
|
|
|
token.T_YIELD.String(),
|
|
|
|
token.T_INCLUDE.String(),
|
|
|
|
token.T_INCLUDE_ONCE.String(),
|
|
|
|
token.T_REQUIRE.String(),
|
|
|
|
token.T_REQUIRE_ONCE.String(),
|
|
|
|
|
|
|
|
token.T_CLASS_C.String(),
|
|
|
|
token.T_DIR.String(),
|
|
|
|
token.T_FILE.String(),
|
|
|
|
token.T_FUNC_C.String(),
|
|
|
|
token.T_LINE.String(),
|
|
|
|
token.T_NS_C.String(),
|
|
|
|
token.T_METHOD_C.String(),
|
|
|
|
token.T_TRAIT_C.String(),
|
|
|
|
token.T_HALT_COMPILER.String(),
|
|
|
|
|
|
|
|
token.T_NEW.String(),
|
|
|
|
token.T_LOGICAL_AND.String(),
|
|
|
|
token.T_LOGICAL_OR.String(),
|
|
|
|
token.T_LOGICAL_XOR.String(),
|
|
|
|
|
|
|
|
token.T_NS_SEPARATOR.String(),
|
|
|
|
token.T_ELLIPSIS.String(),
|
|
|
|
token.T_PAAMAYIM_NEKUDOTAYIM.String(),
|
|
|
|
token.T_BOOLEAN_AND.String(),
|
|
|
|
token.T_BOOLEAN_OR.String(),
|
|
|
|
token.T_AND_EQUAL.String(),
|
|
|
|
token.T_OR_EQUAL.String(),
|
|
|
|
token.T_CONCAT_EQUAL.String(),
|
|
|
|
token.T_MUL_EQUAL.String(),
|
|
|
|
token.T_POW_EQUAL.String(),
|
|
|
|
token.T_DIV_EQUAL.String(),
|
|
|
|
token.T_PLUS_EQUAL.String(),
|
|
|
|
token.T_MINUS_EQUAL.String(),
|
|
|
|
token.T_XOR_EQUAL.String(),
|
|
|
|
token.T_MOD_EQUAL.String(),
|
|
|
|
token.T_DEC.String(),
|
|
|
|
token.T_INC.String(),
|
|
|
|
token.T_DOUBLE_ARROW.String(),
|
|
|
|
token.T_SPACESHIP.String(),
|
|
|
|
token.T_IS_NOT_EQUAL.String(),
|
|
|
|
token.T_IS_NOT_EQUAL.String(),
|
|
|
|
token.T_IS_NOT_IDENTICAL.String(),
|
|
|
|
token.T_IS_EQUAL.String(),
|
|
|
|
token.T_IS_IDENTICAL.String(),
|
|
|
|
token.T_SL_EQUAL.String(),
|
|
|
|
token.T_SR_EQUAL.String(),
|
|
|
|
token.T_IS_GREATER_OR_EQUAL.String(),
|
|
|
|
token.T_IS_SMALLER_OR_EQUAL.String(),
|
|
|
|
token.T_POW.String(),
|
|
|
|
token.T_SL.String(),
|
|
|
|
token.T_SR.String(),
|
|
|
|
token.T_COALESCE.String(),
|
|
|
|
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
token.ID(int(':')).String(),
|
|
|
|
token.ID(int(',')).String(),
|
|
|
|
token.ID(int('.')).String(),
|
|
|
|
token.ID(int('[')).String(),
|
|
|
|
token.ID(int(']')).String(),
|
|
|
|
token.ID(int('(')).String(),
|
|
|
|
token.ID(int(')')).String(),
|
|
|
|
token.ID(int('|')).String(),
|
|
|
|
token.ID(int('/')).String(),
|
|
|
|
token.ID(int('^')).String(),
|
|
|
|
token.ID(int('&')).String(),
|
|
|
|
token.ID(int('+')).String(),
|
|
|
|
token.ID(int('-')).String(),
|
|
|
|
token.ID(int('*')).String(),
|
|
|
|
token.ID(int('=')).String(),
|
|
|
|
token.ID(int('%')).String(),
|
|
|
|
token.ID(int('!')).String(),
|
|
|
|
token.ID(int('~')).String(),
|
|
|
|
token.ID(int('$')).String(),
|
|
|
|
token.ID(int('<')).String(),
|
|
|
|
token.ID(int('>')).String(),
|
|
|
|
token.ID(int('?')).String(),
|
|
|
|
token.ID(int('@')).String(),
|
|
|
|
token.ID(int('{')).String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_STRING.String(),
|
|
|
|
|
|
|
|
token.T_OBJECT_OPERATOR.String(),
|
|
|
|
token.T_OBJECT_OPERATOR.String(),
|
|
|
|
token.T_STRING.String(),
|
|
|
|
|
|
|
|
token.T_ARRAY_CAST.String(),
|
|
|
|
token.T_BOOL_CAST.String(),
|
|
|
|
token.T_BOOL_CAST.String(),
|
|
|
|
token.T_DOUBLE_CAST.String(),
|
|
|
|
token.T_DOUBLE_CAST.String(),
|
|
|
|
token.T_DOUBLE_CAST.String(),
|
|
|
|
token.T_INT_CAST.String(),
|
|
|
|
token.T_INT_CAST.String(),
|
|
|
|
token.T_OBJECT_CAST.String(),
|
|
|
|
token.T_STRING_CAST.String(),
|
|
|
|
token.T_STRING_CAST.String(),
|
|
|
|
token.T_UNSET_CAST.String(),
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 08:11:41 +00:00
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-03-30 08:11:41 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-12-29 22:20:20 +00:00
|
|
|
func TestShebang(t *testing.T) {
|
|
|
|
src := `#!/usr/bin/env php
|
|
|
|
<?php
|
|
|
|
0.1
|
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
"#!/usr/bin/env php\n",
|
|
|
|
"<?php",
|
|
|
|
"\n",
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-12-29 22:20:20 +00:00
|
|
|
actual := []string{}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2020-08-17 17:31:04 +00:00
|
|
|
assert.Equal(t, tkn.ID, token.T_DNUMBER)
|
2019-12-29 22:20:20 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
for _, tt := range tkn.FreeFloating {
|
2020-05-12 21:16:36 +00:00
|
|
|
actual = append(actual, string(tt.Value))
|
2019-12-29 22:20:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestShebangHtml(t *testing.T) {
|
|
|
|
src := `#!/usr/bin/env php
|
|
|
|
<br/><?php
|
|
|
|
0.1
|
|
|
|
`
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-12-29 22:20:20 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2020-08-17 17:31:04 +00:00
|
|
|
assert.Equal(t, tkn.ID, token.T_INLINE_HTML)
|
2020-12-07 23:23:48 +00:00
|
|
|
assert.Equal(t, string(tkn.FreeFloating[0].Value), "#!/usr/bin/env php\n")
|
2019-12-29 22:20:20 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-08-17 17:31:04 +00:00
|
|
|
assert.Equal(t, tkn.ID, token.T_DNUMBER)
|
2019-12-29 22:20:20 +00:00
|
|
|
}
|
|
|
|
|
2019-12-26 21:54:44 +00:00
|
|
|
func TestNumberTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
0.1
|
|
|
|
.1
|
|
|
|
1e10
|
|
|
|
.1e10
|
|
|
|
|
|
|
|
0b01111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111
|
|
|
|
0b10111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111
|
|
|
|
|
|
|
|
0x0_7FFF_FFFF_FFFF_FFFF
|
|
|
|
0x8111_1111_1111_1111
|
|
|
|
|
|
|
|
92233_72036_85477_5807
|
|
|
|
0_77777_77777_77777_77777_7
|
|
|
|
|
|
|
|
92233_72036_85477_5808
|
|
|
|
0_77777_77777_77777_77777_70
|
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_DNUMBER.String(),
|
|
|
|
token.T_DNUMBER.String(),
|
|
|
|
token.T_DNUMBER.String(),
|
|
|
|
token.T_DNUMBER.String(),
|
2019-12-26 21:54:44 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_LNUMBER.String(),
|
|
|
|
token.T_DNUMBER.String(),
|
2019-12-26 21:54:44 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_LNUMBER.String(),
|
|
|
|
token.T_DNUMBER.String(),
|
2019-12-26 21:54:44 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_LNUMBER.String(),
|
|
|
|
token.T_LNUMBER.String(),
|
2019-12-26 21:54:44 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_DNUMBER.String(),
|
|
|
|
token.T_DNUMBER.String(),
|
2019-12-26 21:54:44 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-12-26 21:54:44 +00:00
|
|
|
actual := []string{}
|
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2019-12-26 21:54:44 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2019-12-26 21:54:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
2019-06-07 06:33:35 +00:00
|
|
|
func TestConstantStrings(t *testing.T) {
|
|
|
|
src := `<?
|
|
|
|
'str'
|
|
|
|
'\''
|
|
|
|
'\\'
|
|
|
|
|
|
|
|
b"str"
|
|
|
|
"\""
|
|
|
|
"\\"
|
|
|
|
|
|
|
|
"\$var"
|
|
|
|
"$4"
|
|
|
|
"$"
|
|
|
|
"$\\"
|
|
|
|
|
|
|
|
"{"
|
|
|
|
"{a"
|
|
|
|
"\{$"
|
2020-01-08 07:31:40 +00:00
|
|
|
"{\""
|
2019-06-07 06:33:35 +00:00
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
2019-06-07 06:33:35 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-06-07 06:33:35 +00:00
|
|
|
actual := []string{}
|
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2019-06-07 06:33:35 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2019-06-07 06:33:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
2018-07-23 16:27:06 +00:00
|
|
|
func TestSingleQuoteStringTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
'str $var str'
|
|
|
|
|
|
|
|
'\''
|
|
|
|
|
|
|
|
'\'
|
|
|
|
'
|
|
|
|
|
|
|
|
'\
|
|
|
|
\''
|
|
|
|
|
|
|
|
'\\'
|
|
|
|
|
|
|
|
'\\
|
|
|
|
'
|
|
|
|
|
|
|
|
'\
|
|
|
|
\''
|
|
|
|
`
|
|
|
|
|
2018-07-24 19:24:32 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
2018-07-23 16:27:06 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-24 19:24:32 +00:00
|
|
|
actual := []string{}
|
2018-03-30 08:11:41 +00:00
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-03-30 08:11:41 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestTeplateStringTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
2018-03-30 09:17:25 +00:00
|
|
|
"foo $a"
|
|
|
|
|
|
|
|
"foo $a{$b}"
|
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
"test $var {$var} ${var_name} {s $ \$a "
|
|
|
|
|
|
|
|
"{$var}"
|
2018-03-30 11:28:50 +00:00
|
|
|
|
|
|
|
"$foo/"
|
|
|
|
"$foo/100;"
|
|
|
|
|
|
|
|
"$/$foo"
|
|
|
|
"$0$foo"
|
2020-06-14 20:37:44 +00:00
|
|
|
|
|
|
|
"$foo$"
|
2018-03-30 08:11:41 +00:00
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_DOLLAR_OPEN_CURLY_BRACES.String(),
|
|
|
|
token.T_STRING_VARNAME.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 15:35:18 +00:00
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-03-30 15:35:18 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-03-30 15:35:18 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 15:35:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestBackquoteStringTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
` + "`foo $a`" + `
|
|
|
|
` + "`foo $a{$b}`" + `
|
|
|
|
|
|
|
|
` + "`test $var {$var} ${var_name} {s $ \\$a `" + `
|
|
|
|
|
|
|
|
` + "`{$var}`" + `
|
|
|
|
` + "`$foo/`" + `
|
|
|
|
` + "`$foo/100`" + `
|
|
|
|
` + "`$/$foo`" + `
|
|
|
|
` + "`$0$foo`" + `
|
2020-06-14 20:37:44 +00:00
|
|
|
` + "`$foo$`" + `
|
2018-03-30 15:35:18 +00:00
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_DOLLAR_OPEN_CURLY_BRACES.String(),
|
|
|
|
token.T_STRING_VARNAME.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('`')).String(),
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 08:11:41 +00:00
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-03-30 08:11:41 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 08:11:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<CAT
|
|
|
|
test
|
|
|
|
CAT;
|
2018-02-17 16:25:57 +00:00
|
|
|
|
2018-03-30 08:11:41 +00:00
|
|
|
<<<'CAT'
|
|
|
|
test
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<"CAT"
|
|
|
|
$var->prop
|
|
|
|
$var[1]
|
|
|
|
$var[0x1]
|
|
|
|
$var[0b1]
|
|
|
|
$var[var_name]
|
|
|
|
$var[$var]
|
|
|
|
|
|
|
|
{$var}
|
|
|
|
${var_name}
|
|
|
|
{s $ \$a
|
|
|
|
CAT;
|
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
|
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
|
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_OBJECT_OPERATOR.String(),
|
|
|
|
token.T_STRING.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('[')).String(),
|
|
|
|
token.T_NUM_STRING.String(),
|
|
|
|
token.ID(int(']')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('[')).String(),
|
|
|
|
token.T_NUM_STRING.String(),
|
|
|
|
token.ID(int(']')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('[')).String(),
|
|
|
|
token.T_NUM_STRING.String(),
|
|
|
|
token.ID(int(']')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('[')).String(),
|
|
|
|
token.T_STRING.String(),
|
|
|
|
token.ID(int(']')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('[')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int(']')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_DOLLAR_OPEN_CURLY_BRACES.String(),
|
|
|
|
token.T_STRING_VARNAME.String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-02-17 10:53:10 +00:00
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-02-17 10:53:10 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-02-17 10:53:10 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-02-17 10:53:10 +00:00
|
|
|
}
|
2018-03-29 13:46:21 +00:00
|
|
|
|
2018-04-05 21:24:00 +00:00
|
|
|
func TestHereDocTokens2(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<CAT
|
|
|
|
$foo/
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$foo/100
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$/$foo
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$0$foo
|
|
|
|
CAT;
|
|
|
|
|
|
|
|
<<<CAT
|
|
|
|
$foo$bar\
|
|
|
|
CAT
|
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
|
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
|
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
|
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
|
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-04-05 21:24:00 +00:00
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-04-05 21:24:00 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-07-14 15:00:48 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-14 15:00:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokens3(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
|
|
|
|
<<<"CAT"
|
|
|
|
\\{$a['b']}
|
|
|
|
CAT;
|
|
|
|
`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_CURLY_OPEN.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('[')).String(),
|
|
|
|
token.T_CONSTANT_ENCAPSED_STRING.String(),
|
|
|
|
token.ID(int(']')).String(),
|
|
|
|
token.ID(int('}')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
|
|
|
|
2019-12-26 13:41:06 +00:00
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2019-12-26 13:41:06 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2019-12-26 13:41:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokens73(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<"CAT"
|
|
|
|
text
|
|
|
|
CAT, $b`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(',')).String(),
|
|
|
|
token.T_VARIABLE.String(),
|
2019-12-26 13:41:06 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-12-26 13:41:06 +00:00
|
|
|
actual := []string{}
|
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2019-12-26 13:41:06 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2019-12-26 13:41:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocTokensBefore73(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
<<<"CAT"
|
|
|
|
CAT
|
|
|
|
CAT;`
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_END_HEREDOC.String(),
|
|
|
|
token.ID(int(';')).String(),
|
2019-12-26 13:41:06 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 2,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-12-26 13:41:06 +00:00
|
|
|
actual := []string{}
|
|
|
|
|
2021-02-13 21:07:35 +00:00
|
|
|
for {
|
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
actual = append(actual, tkn.ID.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestHereDocUnclosed(t *testing.T) {
|
|
|
|
src := "<?<<<'S'\n"
|
|
|
|
|
|
|
|
expected := []string{
|
|
|
|
token.T_START_HEREDOC.String(),
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2021-02-13 21:07:35 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
|
|
|
actual := []string{}
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-07-14 15:00:48 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-04-05 21:24:00 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-05 21:24:00 +00:00
|
|
|
}
|
|
|
|
|
2018-04-01 21:02:13 +00:00
|
|
|
func TestInlineHtmlNopTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
$a; ?> test <?php
|
|
|
|
$a ?> test
|
|
|
|
`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
token.T_INLINE_HTML.String(),
|
2018-04-01 21:02:13 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int(';')).String(),
|
|
|
|
token.T_INLINE_HTML.String(),
|
2018-04-01 21:02:13 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-04-01 21:02:13 +00:00
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-04-01 21:02:13 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-04-01 21:02:13 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-01 21:02:13 +00:00
|
|
|
}
|
|
|
|
|
2018-03-29 13:46:21 +00:00
|
|
|
func TestStringTokensAfterVariable(t *testing.T) {
|
|
|
|
src := `<?php "test \"$var\""`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.ID(int('"')).String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.T_ENCAPSED_AND_WHITESPACE.String(),
|
|
|
|
token.ID(int('"')).String(),
|
2018-03-29 13:46:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
expectedTokens := []string{
|
|
|
|
"\"",
|
|
|
|
"test \\\"",
|
|
|
|
"$var",
|
|
|
|
"\\\"",
|
|
|
|
"\"",
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-29 13:46:21 +00:00
|
|
|
actualTokens := []string{}
|
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-03-29 13:46:21 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actualTokens = append(actualTokens, string(tkn.Value))
|
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-03-29 13:46:21 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
assert.DeepEqual(t, expectedTokens, actualTokens)
|
2018-03-29 13:46:21 +00:00
|
|
|
}
|
2018-03-30 11:28:50 +00:00
|
|
|
|
|
|
|
func TestSlashAfterVariable(t *testing.T) {
|
|
|
|
src := `<?php $foo/3`
|
|
|
|
|
2018-07-14 15:00:48 +00:00
|
|
|
expected := []string{
|
2020-08-17 17:31:04 +00:00
|
|
|
token.T_VARIABLE.String(),
|
|
|
|
token.ID(int('/')).String(),
|
|
|
|
token.T_LNUMBER.String(),
|
2018-03-30 11:28:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
expectedTokens := []string{
|
|
|
|
"$foo",
|
|
|
|
"/",
|
|
|
|
"3",
|
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-14 15:00:48 +00:00
|
|
|
actual := []string{}
|
2018-03-30 11:28:50 +00:00
|
|
|
actualTokens := []string{}
|
|
|
|
|
|
|
|
for {
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
if tkn.ID == 0 {
|
2018-03-30 11:28:50 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
actualTokens = append(actualTokens, string(tkn.Value))
|
|
|
|
actual = append(actual, tkn.ID.String())
|
2018-03-30 11:28:50 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
assert.DeepEqual(t, expectedTokens, actualTokens)
|
2018-03-30 11:28:50 +00:00
|
|
|
}
|
2018-03-30 14:15:26 +00:00
|
|
|
|
|
|
|
func TestCommentEnd(t *testing.T) {
|
|
|
|
src := `<?php //test`
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-07-02 21:20:32 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentNewLine(t *testing.T) {
|
|
|
|
src := "<?php //test\n$a"
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test\n"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentNewLine1(t *testing.T) {
|
|
|
|
src := "<?php //test\r$a"
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test\r"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentNewLine2(t *testing.T) {
|
|
|
|
src := "<?php #test\r\n$a"
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("#test\r\n"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestCommentWithPhpEndTag(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
//test?> test`
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("//test"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-03-30 14:15:26 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-03-30 14:15:26 +00:00
|
|
|
}
|
2018-04-10 21:58:57 +00:00
|
|
|
|
|
|
|
func TestInlineComment(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
/*test*/`
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("/*test*/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2018-07-23 17:33:45 +00:00
|
|
|
func TestInlineComment2(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
/*/*/`
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("/*/*/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-23 17:33:45 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-23 17:33:45 +00:00
|
|
|
|
2020-07-02 21:20:32 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-07-23 17:33:45 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-07-23 17:33:45 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-23 17:33:45 +00:00
|
|
|
}
|
|
|
|
|
2018-04-10 21:58:57 +00:00
|
|
|
func TestEmptyInlineComment(t *testing.T) {
|
|
|
|
src := `<?php
|
2018-07-23 17:33:45 +00:00
|
|
|
/**/ `
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_COMMENT,
|
|
|
|
Value: []byte("/**/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-07-02 21:20:32 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestEmptyInlineComment2(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
/***/`
|
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2020-05-12 21:16:36 +00:00
|
|
|
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_DOC_COMMENT,
|
|
|
|
Value: []byte("/***/"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestMethodCallTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
$a -> bar ( '' ) ;`
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected = []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual = tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected = []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual = tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected = []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual = tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected = []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual = tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected = []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual = tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected = []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual = tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestYieldFromTokens(t *testing.T) {
|
|
|
|
src := `<?php
|
|
|
|
yield from $a`
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-04-10 21:58:57 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected := []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_OPEN_TAG,
|
|
|
|
Value: []byte("<?php"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte("\n\t"),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual := tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-07-02 17:48:55 +00:00
|
|
|
|
2020-08-17 17:31:04 +00:00
|
|
|
expected = []*token.Token{
|
2019-02-13 20:18:07 +00:00
|
|
|
{
|
2020-05-12 21:16:36 +00:00
|
|
|
ID: token.T_WHITESPACE,
|
|
|
|
Value: []byte(" "),
|
2018-07-29 08:44:38 +00:00
|
|
|
},
|
2018-07-02 17:48:55 +00:00
|
|
|
}
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
2020-12-20 12:39:37 +00:00
|
|
|
actual = tkn.FreeFloating
|
2020-08-17 17:31:04 +00:00
|
|
|
for _, v := range actual {
|
|
|
|
v.Position = nil
|
|
|
|
}
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-04-10 21:58:57 +00:00
|
|
|
}
|
2018-11-05 14:56:27 +00:00
|
|
|
|
2019-06-06 18:32:58 +00:00
|
|
|
func TestVarNameByteChars(t *testing.T) {
|
|
|
|
src := "<?php $\x80 $\xff"
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-06-06 18:32:58 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
assert.Equal(t, "$\x80", string(tkn.Value))
|
2019-06-06 18:32:58 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
assert.Equal(t, "$\xff", string(tkn.Value))
|
2019-06-06 18:32:58 +00:00
|
|
|
}
|
|
|
|
|
2019-06-07 06:33:35 +00:00
|
|
|
func TestStringVarNameByteChars(t *testing.T) {
|
|
|
|
src := "<?php \"$\x80 $\xff\""
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2019-06-07 06:33:35 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
assert.Equal(t, "\"", string(tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
assert.Equal(t, "$\x80", string(tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
assert.Equal(t, " ", string(tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
assert.Equal(t, "$\xff", string(tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
assert.Equal(t, "\"", string(tkn.Value))
|
2019-06-07 06:33:35 +00:00
|
|
|
}
|
|
|
|
|
2018-11-05 14:56:27 +00:00
|
|
|
func TestIgnoreControllCharacters(t *testing.T) {
|
|
|
|
src := "<?php \004 echo $b;"
|
|
|
|
|
2020-05-18 17:07:17 +00:00
|
|
|
var actualErr *errors.Error
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
ErrorHandlerFunc: func(e *errors.Error) {
|
|
|
|
actualErr = e
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected := "echo"
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
actual := string(tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "$b"
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2020-05-18 17:07:17 +00:00
|
|
|
|
|
|
|
expectedErr := &errors.Error{
|
|
|
|
Msg: "WARNING: Unexpected character in input: '\x04' (ASCII=4)",
|
|
|
|
Pos: &position.Position{StartLine: 1, EndLine: 1, StartPos: 6, EndPos: 7},
|
|
|
|
}
|
|
|
|
assert.DeepEqual(t, expectedErr, actualErr)
|
2018-11-05 14:56:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestIgnoreControllCharactersAtStringVarOffset(t *testing.T) {
|
|
|
|
src := "<?php \"$a[test\004]\";"
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected := "\""
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn := lexer.Lex()
|
|
|
|
actual := string(tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "$a"
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "["
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "test"
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
|
|
|
|
expected = "]"
|
2020-05-17 22:01:35 +00:00
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
2019-02-25 16:11:28 +00:00
|
|
|
assert.DeepEqual(t, expected, actual)
|
2018-11-05 14:56:27 +00:00
|
|
|
}
|
2020-12-28 19:43:55 +00:00
|
|
|
|
|
|
|
func TestDoubleDollar(t *testing.T) {
|
|
|
|
src := `<?php "$$a";`
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2020-12-28 19:43:55 +00:00
|
|
|
|
|
|
|
expected := "\""
|
|
|
|
tkn := lexer.Lex()
|
|
|
|
actual := string(tkn.Value)
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
|
|
|
|
expected = "$"
|
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
|
|
|
|
expected = "$a"
|
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestTripleDollar(t *testing.T) {
|
|
|
|
src := `<?php "$$$a";`
|
|
|
|
|
2021-02-13 21:54:34 +00:00
|
|
|
config := conf.Config{
|
2020-12-29 19:23:22 +00:00
|
|
|
Version: &version.Version{
|
|
|
|
Major: 7,
|
|
|
|
Minor: 4,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
lexer := NewLexer([]byte(src), config)
|
2020-12-28 19:43:55 +00:00
|
|
|
|
|
|
|
expected := "\""
|
|
|
|
tkn := lexer.Lex()
|
|
|
|
actual := string(tkn.Value)
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
|
|
|
|
expected = "$$"
|
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
|
|
|
|
expected = "$a"
|
|
|
|
tkn = lexer.Lex()
|
|
|
|
actual = string(tkn.Value)
|
|
|
|
assert.DeepEqual(t, expected, actual)
|
|
|
|
}
|