PHP 8 Update

- nullsafe operator (?->)
- Remove (real) cast
- Named arguments
- Remove (unset) cast
- Remove {} access
- match expression
- Union types in type hints and static typehint
- Block catch without variable
- Trailing comma in parameter lists
- throw can be used as an expression
- Concatenation precedence
- Declaring properties in the constructor
- Attributes
- Names in the namespace are treated as a single token
- Trailing comma in closure use list
- Check that ::class on object works
- Deferencable changes and arbitrary expressions in new/instanceof
This commit is contained in:
Makhnev Petr 2021-07-30 20:53:27 +03:00 committed by GitHub
parent 367eff9de6
commit 049ce7ddc6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 108535 additions and 43 deletions

View File

@ -19,12 +19,15 @@ bench:
go test -benchmem -bench=. ./internal/php5
go test -benchmem -bench=. ./internal/php7
compile: ./internal/php5/php5.go ./internal/php7/php7.go ./internal/scanner/scanner.go
sed -i '' -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./internal/php7/php7.go
sed -i '' -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./internal/php5/php5.go
sed -i '' -e 's/\/\/line/\/\/ line/g' ./internal/php5/php5.go
sed -i '' -e 's/\/\/line/\/\/ line/g' ./internal/php7/php7.go
sed -i '' -e 's/\/\/line/\/\/ line/g' ./internal/scanner/scanner.go
compile: ./internal/php5/php5.go ./internal/php7/php7.go ./internal/php8/php8.go ./internal/php8/scanner.go ./internal/scanner/scanner.go
sed -i -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./internal/php5/php5.go
sed -i -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./internal/php7/php7.go
sed -i -e 's/yyErrorVerbose = false/yyErrorVerbose = true/g' ./internal/php8/php8.go
sed -i -e 's/\/\/line/\/\/ line/g' ./internal/php5/php5.go
sed -i -e 's/\/\/line/\/\/ line/g' ./internal/php7/php7.go
sed -i -e 's/\/\/line/\/\/ line/g' ./internal/php8/php8.go
sed -i -e 's/\/\/line/\/\/ line/g' ./internal/scanner/scanner.go
sed -i -e 's/\/\/line/\/\/ line/g' ./internal/php8/scanner.go
rm -f y.output
./internal/scanner/scanner.go: ./internal/scanner/scanner.rl
@ -36,6 +39,12 @@ compile: ./internal/php5/php5.go ./internal/php7/php7.go ./internal/scanner/scan
./internal/php7/php7.go: ./internal/php7/php7.y
goyacc -o $@ $<
./internal/php8/php8.go: ./internal/php8/php8.y
goyacc -o $@ $<
./internal/php8/scanner.go: ./internal/php8/scanner.rl
ragel -Z -G2 -o $@ $<
cpu_pprof:
go test -cpuprofile cpu.pprof -bench=. -benchtime=20s ./internal/php7
go tool pprof ./php7.test cpu.pprof

View File

@ -1,3 +1,5 @@
> This is a fork of the [z7zmey](https://github.com/z7zmey) [parser](https://github.com/z7zmey/php-parser) that adds PHP 8 support.
PHP Parser written in Go
========================
@ -6,29 +8,25 @@ PHP Parser written in Go
[![GoDoc](https://godoc.org/github.com/z7zmey/php-parser?status.svg)](https://godoc.org/github.com/z7zmey/php-parser)
[![Build Status](https://travis-ci.org/z7zmey/php-parser.svg?branch=master)](https://travis-ci.org/z7zmey/php-parser)
[![Go Report Card](https://goreportcard.com/badge/github.com/z7zmey/php-parser)](https://goreportcard.com/report/github.com/z7zmey/php-parser)
[![Maintainability](https://api.codeclimate.com/v1/badges/950783b2e739db26e0ed/maintainability)](https://codeclimate.com/github/z7zmey/php-parser/maintainability)
[![Test Coverage](https://api.codeclimate.com/v1/badges/950783b2e739db26e0ed/test_coverage)](https://codeclimate.com/github/z7zmey/php-parser/test_coverage)
This project uses [goyacc](https://godoc.org/golang.org/x/tools/cmd/goyacc) and [ragel](https://www.colm.net/open-source/ragel/) tools to create PHP parser. It parses source code into [AST](https://en.wikipedia.org/wiki/Abstract_syntax_tree). It can be used to write static analysis, refactoring, metrics, code style formatting tools.
#### Try it online: [demo](https://php-parser.com)
Features:
Features
---------
- Fully support PHP 5 and PHP 7 syntax
- Fully support PHP 5, PHP 7 and PHP 8.0 syntax
- Abstract syntax tree (AST) representation
- Traversing AST
- Resolving namespaced names
- Resolving namespace names
- Parsing syntax-invalid PHP files
- Saving and printing free-floating comments and whitespaces
Who Uses
--------
[VKCOM/noverify](https://github.com/VKCOM/noverify) - NoVerify is a pretty fast linter for PHP
[quasilyte/phpgrep](https://github.com/quasilyte/phpgrep) - phpgrep is a tool for syntax-aware PHP code search
- [VKCOM/noverify](https://github.com/VKCOM/noverify) pretty fast linter for PHP
- [VKCOM/nocolor](https://github.com/VKCOM/nocolor) — architecture validation tool for PHP based on the [*concept of colored functions*](https://github.com/VKCOM/nocolor/blob/master/docs/introducing_colors.md)
- [quasilyte/phpgrep](https://github.com/quasilyte/phpgrep) tool for syntax-aware PHP code search
Usage example
-------
@ -48,7 +46,7 @@ import (
)
func main() {
src := []byte(`<? echo "Hello world";`)
src := []byte(`<?php echo "Hello world";`)
// Error handler
@ -60,7 +58,7 @@ func main() {
// Parse
rootNode, err := parser.Parse(src, cfg.Config{
Version: &version.Version{Major: 5, Minor: 6},
Version: &version.Version{Major: 8, Minor: 0},
ErrorHandlerFunc: errorHandler,
})
@ -78,12 +76,6 @@ func main() {
}
```
Roadmap
-------
- Control Flow Graph (CFG)
- PHP8
Install
-------
@ -98,14 +90,14 @@ CLI
php-parser [flags] <path> ...
```
| flag | type | description |
| ------- | ------ | --------------------------------- |
| -p | bool | print filepath |
| -e | bool | print errors |
| -d | bool | dump in golang format |
| -r | bool | resolve names |
| -prof | string | start profiler: [cpu, mem, trace] |
| -phpver | string | php version (default: 7.4) |
| flag | type | description |
| ---------- | -------- | ----------------------------------- |
| `--p` | `bool` | Print file paths |
| `--e` | `bool` | Print errors |
| `--d` | `bool` | Dump AST in Golang format |
| `--r` | `bool` | Resolve names |
| `--prof` | `string` | Start profiler: `[cpu, mem, trace]` |
| `--phpver` | `string` | PHP version (default: 8.0) |
Namespace resolver
------------------
@ -113,4 +105,4 @@ Namespace resolver
Namespace resolver is a visitor that resolves nodes fully qualified name and saves into `map[node.Node]string` structure
- For `Class`, `Interface`, `Trait`, `Function`, `Constant` nodes it saves name with current namespace.
- For `Name`, `Relative`, `FullyQualified` nodes it resolves `use` aliases and saves a fully qualified name.
- For `Name`, `Relative`, `FullyQualified` nodes it resolves `use` aliases and saves a fully qualified name.

BIN
internal/php5/php5.go generated

Binary file not shown.

BIN
internal/php7/php7.go generated

Binary file not shown.

1431
internal/php8/builder.go Normal file

File diff suppressed because it is too large Load Diff

251
internal/php8/lexer.go Normal file
View File

@ -0,0 +1,251 @@
package php8
import (
"bytes"
"strings"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/errors"
"github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/pkg/token"
"github.com/z7zmey/php-parser/pkg/version"
)
type Lexer struct {
data []byte
phpVersion *version.Version
errHandlerFunc func(*errors.Error)
p, pe, cs int
ts, te, act int
stack []int
top int
heredocLabel []byte
tokenPool *token.Pool
positionPool *position.Pool
newLines NewLines
}
func NewLexer(data []byte, config conf.Config) *Lexer {
lex := &Lexer{
data: data,
phpVersion: config.Version,
errHandlerFunc: config.ErrorHandlerFunc,
pe: len(data),
stack: make([]int, 0),
tokenPool: token.NewPool(position.DefaultBlockSize),
positionPool: position.NewPool(token.DefaultBlockSize),
newLines: NewLines{make([]int, 0, 128)},
}
initLexer(lex)
return lex
}
func (lex *Lexer) setTokenPosition(token *token.Token) {
pos := lex.positionPool.Get()
pos.StartLine = lex.newLines.GetLine(lex.ts)
pos.EndLine = lex.newLines.GetLine(lex.te - 1)
pos.StartPos = lex.ts
pos.EndPos = lex.te
token.Position = pos
}
func (lex *Lexer) addFreeFloatingToken(t *token.Token, id token.ID, ps, pe int) {
skippedTkn := lex.tokenPool.Get()
skippedTkn.ID = id
skippedTkn.Value = lex.data[ps:pe]
lex.setTokenPosition(skippedTkn)
if t.FreeFloating == nil {
t.FreeFloating = make([]*token.Token, 0, 2)
}
t.FreeFloating = append(t.FreeFloating, skippedTkn)
}
func (lex *Lexer) isNotStringVar() bool {
p := lex.p
if lex.data[p-1] == '\\' && lex.data[p-2] != '\\' {
return true
}
if len(lex.data) < p+1 {
return true
}
if lex.data[p] == '$' && (lex.data[p+1] == '{' || isValidVarNameStart(lex.data[p+1])) {
return false
}
if lex.data[p] == '{' && lex.data[p+1] == '$' {
return false
}
return true
}
func (lex *Lexer) isNotStringEnd(s byte) bool {
p := lex.p
if lex.data[p-1] == '\\' && lex.data[p-2] != '\\' {
return true
}
return !(lex.data[p] == s)
}
func (lex *Lexer) isHeredocEnd(p int) bool {
o, err := version.New("7.3")
if err != nil {
panic(err)
}
if lex.phpVersion.GreaterOrEqual(o) {
return lex.isHeredocEndSince73(p)
}
return lex.isHeredocEndBefore73(p)
}
func (lex *Lexer) isHeredocEndBefore73(p int) bool {
if lex.data[p-1] != '\r' && lex.data[p-1] != '\n' {
return false
}
l := len(lex.heredocLabel)
if len(lex.data) < p+l {
return false
}
if len(lex.data) > p+l && lex.data[p+l] != ';' && lex.data[p+l] != '\r' && lex.data[p+l] != '\n' {
return false
}
if len(lex.data) > p+l+1 && lex.data[p+l] == ';' && lex.data[p+l+1] != '\r' && lex.data[p+l+1] != '\n' {
return false
}
return bytes.Equal(lex.heredocLabel, lex.data[p:p+l])
}
func (lex *Lexer) isHeredocEndSince73(p int) bool {
if lex.data[p-1] != '\r' && lex.data[p-1] != '\n' {
return false
}
if p == len(lex.data) {
return false
}
for lex.data[p] == ' ' || lex.data[p] == '\t' {
p++
}
l := len(lex.heredocLabel)
if len(lex.data) < p+l {
return false
}
if len(lex.data) > p+l && isValidVarName(lex.data[p+l]) {
return false
}
a := string(lex.heredocLabel)
b := string(lex.data[p : p+l])
_, _ = a, b
if bytes.Equal(lex.heredocLabel, lex.data[p:p+l]) {
lex.p = p
return true
}
return false
}
func (lex *Lexer) isNotHeredocEnd(p int) bool {
return !lex.isHeredocEnd(p)
}
func (lex *Lexer) growCallStack() {
if lex.top == len(lex.stack) {
lex.stack = append(lex.stack, 0)
}
}
func (lex *Lexer) isNotPhpCloseToken() bool {
if lex.p+1 == len(lex.data) {
return true
}
return lex.data[lex.p] != '?' || lex.data[lex.p+1] != '>'
}
func (lex *Lexer) isNotNewLine() bool {
if lex.data[lex.p] == '\n' && lex.data[lex.p-1] == '\r' {
return true
}
return lex.data[lex.p-1] != '\n' && lex.data[lex.p-1] != '\r'
}
func (lex *Lexer) call(state int, fnext int) {
lex.growCallStack()
lex.stack[lex.top] = state
lex.top++
lex.p++
lex.cs = fnext
}
func (lex *Lexer) ret(n int) {
lex.top = lex.top - n
if lex.top < 0 {
lex.top = 0
}
lex.cs = lex.stack[lex.top]
lex.p++
}
func (lex *Lexer) ungetStr(s string) {
tokenStr := string(lex.data[lex.ts:lex.te])
if strings.HasSuffix(tokenStr, s) {
lex.ungetCnt(len(s))
}
}
func (lex *Lexer) ungetCnt(n int) {
lex.p = lex.p - n
lex.te = lex.te - n
}
func (lex *Lexer) error(msg string) {
if lex.errHandlerFunc == nil {
return
}
pos := position.NewPosition(
lex.newLines.GetLine(lex.ts),
lex.newLines.GetLine(lex.te-1),
lex.ts,
lex.te,
)
lex.errHandlerFunc(errors.NewError(msg, pos))
}
func isValidVarNameStart(r byte) bool {
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r == '_' || r >= 0x80
}
func isValidVarName(r byte) bool {
return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '_' || r >= 0x80
}

25
internal/php8/newline.go Normal file
View File

@ -0,0 +1,25 @@
package php8
type NewLines struct {
data []int
}
func (nl *NewLines) Append(p int) {
if len(nl.data) == 0 || nl.data[len(nl.data)-1] < p {
nl.data = append(nl.data, p)
}
}
func (nl *NewLines) GetLine(p int) int {
line := len(nl.data) + 1
for i := len(nl.data) - 1; i >= 0; i-- {
if p < nl.data[i] {
line = i + 1
} else {
break
}
}
return line
}

99
internal/php8/node.go Normal file
View File

@ -0,0 +1,99 @@
package php8
import (
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/pkg/token"
)
type ParserBrackets struct {
Position *position.Position
OpenBracketTkn *token.Token
Child ast.Vertex
CloseBracketTkn *token.Token
}
func (n *ParserBrackets) Accept(v ast.Visitor) {
// do nothing
}
func (n *ParserBrackets) GetPosition() *position.Position {
return n.Position
}
type ParserSeparatedList struct {
Position *position.Position
Items []ast.Vertex
SeparatorTkns []*token.Token
}
func (n *ParserSeparatedList) Accept(v ast.Visitor) {
// do nothing
}
func (n *ParserSeparatedList) GetPosition() *position.Position {
return n.Position
}
// TraitAdaptationList node
type TraitAdaptationList struct {
Position *position.Position
OpenCurlyBracketTkn *token.Token
Adaptations []ast.Vertex
CloseCurlyBracketTkn *token.Token
}
func (n *TraitAdaptationList) Accept(v ast.Visitor) {
// do nothing
}
func (n *TraitAdaptationList) GetPosition() *position.Position {
return n.Position
}
// ArgumentList node
type ArgumentList struct {
Position *position.Position
OpenParenthesisTkn *token.Token
Arguments []ast.Vertex
SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token
}
func (n *ArgumentList) Accept(v ast.Visitor) {
// do nothing
}
func (n *ArgumentList) GetPosition() *position.Position {
return n.Position
}
type ReturnType struct {
Position *position.Position
ColonTkn *token.Token
Type ast.Vertex
}
func (n *ReturnType) Accept(v ast.Visitor) {
// do nothing
}
func (n *ReturnType) GetPosition() *position.Position {
return n.Position
}
// TraitMethodRef node
type TraitMethodRef struct {
Position *position.Position
Trait ast.Vertex
DoubleColonTkn *token.Token
Method ast.Vertex
}
func (n *TraitMethodRef) Accept(v ast.Visitor) {
// do nothing
}
func (n *TraitMethodRef) GetPosition() *position.Position {
return n.Position
}

66
internal/php8/parser.go Normal file
View File

@ -0,0 +1,66 @@
package php8
import (
"github.com/z7zmey/php-parser/internal/position"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/errors"
"github.com/z7zmey/php-parser/pkg/token"
)
// Parser structure
type Parser struct {
Lexer *Lexer
currentToken *token.Token
rootNode ast.Vertex
errHandlerFunc func(*errors.Error)
builder *Builder
}
// NewParser creates and returns new Parser
func NewParser(lexer *Lexer, config conf.Config) *Parser {
p := &Parser{
Lexer: lexer,
errHandlerFunc: config.ErrorHandlerFunc,
}
p.builder = NewBuilder(position.NewBuilder(), p)
return p
}
func (p *Parser) Lex(lval *yySymType) int {
t := p.Lexer.Lex()
p.currentToken = t
lval.token = t
return int(t.ID)
}
func (p *Parser) Error(msg string) {
if p.errHandlerFunc == nil {
return
}
p.errHandlerFunc(errors.NewError(msg, p.currentToken.Position))
}
// Parse the php7 Parser entrypoint
func (p *Parser) Parse() int {
p.rootNode = nil
return yyParse(p)
}
// GetRootNode returns root node
func (p *Parser) GetRootNode() ast.Vertex {
return p.rootNode
}
// helpers
func lastNode(nn []ast.Vertex) ast.Vertex {
if len(nn) == 0 {
return nil
}
return nn[len(nn)-1]
}

File diff suppressed because it is too large Load Diff

56137
internal/php8/parser_test.go Normal file

File diff suppressed because it is too large Load Diff

7192
internal/php8/php8.go Normal file

File diff suppressed because it is too large Load Diff

3423
internal/php8/php8.y Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,30 @@
package php8_test
import (
"io/ioutil"
"testing"
"github.com/z7zmey/php-parser/internal/php8"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/version"
)
func BenchmarkPhp8(b *testing.B) {
src, err := ioutil.ReadFile("test.php")
if err != nil {
b.Fatal("can not read test.php: " + err.Error())
}
for n := 0; n < b.N; n++ {
config := conf.Config{
Version: &version.Version{
Major: 8,
Minor: 8,
},
}
lexer := php8.NewLexer(src, config)
php8parser := php8.NewParser(lexer, config)
php8parser.Parse()
}
}

28084
internal/php8/scanner.go Normal file

File diff suppressed because it is too large Load Diff

516
internal/php8/scanner.rl Normal file
View File

@ -0,0 +1,516 @@
package php8
import (
"fmt"
"strconv"
"strings"
"github.com/z7zmey/php-parser/pkg/token"
)
%%{
machine lexer;
write data;
access lex.;
variable p lex.p;
variable pe lex.pe;
}%%
func initLexer(lex *Lexer) {
%% write init;
}
func (lex *Lexer) Lex() *token.Token {
eof := lex.pe
var tok token.ID
tkn := lex.tokenPool.Get()
lblStart := 0
lblEnd := 0
_, _ = lblStart, lblEnd
%%{
action heredoc_lbl_start {lblStart = lex.p}
action heredoc_lbl_end {lblEnd = lex.p}
action new_line {
if lex.data[lex.p] == '\n' {
lex.newLines.Append(lex.p+1)
}
if lex.data[lex.p] == '\r' && lex.data[lex.p+1] != '\n' {
lex.newLines.Append(lex.p+1)
}
}
action is_not_heredoc_end { lex.isNotHeredocEnd(lex.p) }
action is_not_comment_end { lex.isNotPhpCloseToken() && lex.isNotNewLine() }
action is_not_heredoc_end_or_var { lex.isNotHeredocEnd(lex.p) && lex.isNotStringVar() }
action is_not_string_end_or_var { lex.isNotStringEnd('"') && lex.isNotStringVar() }
action is_not_backqoute_end_or_var { lex.isNotStringEnd('`') && lex.isNotStringVar() }
newline = ('\r\n' >(nl, 1) | '\r' >(nl, 0) | '\n' >(nl, 0)) $new_line %{};
any_line = any | newline;
whitespace = [\t\v\f ];
whitespace_line = [\t\v\f ] | newline;
lnum = [0-9]+('_'[0-9]+)*;
dnum = (lnum?"." lnum)|(lnum"."lnum?);
hnum = '0x'[0-9a-fA-F]+('_'[0-9a-fA-F]+)*;
bnum = '0b'[01]+('_'[01]+)*;
exponent_dnum = (lnum | dnum) ('e'|'E') ('+'|'-')? lnum;
varname_first = [a-zA-Z_] | (0x0080..0x00FF);
varname_second = varname_first | [0-9];
varname = varname_first (varname_second)*;
heredoc_label = varname >heredoc_lbl_start %heredoc_lbl_end;
operators = ';'|':'|','|'.'|'['|']'|'('|')'|'|'|'/'|'^'|'&'|'+'|'-'|'*'|'='|'%'|'!'|'~'|'$'|'<'|'>'|'?'|'@';
prepush { lex.growCallStack(); }
constant_string =
start: (
"'" -> qoute
| "b"i? '"' -> double_qoute
),
# single qoute string
qoute: (
(any - [\\'\r\n]) -> qoute
| "\r" @new_line -> qoute
| "\n" @new_line -> qoute
| "\\" -> qoute_any
| "'" -> final
),
qoute_any: (
(any - [\r\n]) -> qoute
| "\r" @new_line -> qoute
| "\n" @new_line -> qoute
),
# double qoute string
double_qoute: (
(any - [\\"${\r\n]) -> double_qoute
| "\r" @new_line -> double_qoute
| "\n" @new_line -> double_qoute
| "\\" -> double_qoute_any
| '"' -> final
| '$' -> double_qoute_nonvarname
| '{' -> double_qoute_nondollar
),
double_qoute_any: (
(any - [\r\n]) -> double_qoute
| "\r" @new_line -> double_qoute
| "\n" @new_line -> double_qoute
),
double_qoute_nondollar: (
(any - [\\$"\r\n]) -> double_qoute
| "\r" @new_line -> double_qoute
| "\n" @new_line -> double_qoute
| "\\" -> double_qoute_any
| '"' -> final
),
double_qoute_nonvarname: (
(any - [\\${"\r\n] - varname_first) -> double_qoute
| "\r" @new_line -> double_qoute
| "\n" @new_line -> double_qoute
| "\\" -> double_qoute_any
| '$' -> double_qoute_nonvarname
| '"' -> final
);
main := |*
"#!" any* :>> newline => {
lex.addFreeFloatingToken(tkn, token.T_COMMENT, lex.ts, lex.te)
};
any => {
fnext html;
lex.ungetCnt(1)
};
*|;
html := |*
any_line+ -- '<?' => {
lex.ungetStr("<")
lex.setTokenPosition(tkn)
tok = token.T_INLINE_HTML;
fbreak;
};
'<?' => {
lex.addFreeFloatingToken(tkn, token.T_OPEN_TAG, lex.ts, lex.te)
fnext php;
};
'<?php'i ( [ \t] | newline ) => {
lex.ungetCnt(lex.te - lex.ts - 5)
lex.addFreeFloatingToken(tkn, token.T_OPEN_TAG, lex.ts, lex.ts+5)
fnext php;
};
'<?='i => {
lex.setTokenPosition(tkn);
tok = token.T_ECHO;
fnext php;
fbreak;
};
*|;
php := |*
whitespace_line* => {lex.addFreeFloatingToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
'?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;};
';' whitespace_line* '?>' newline? => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext html; fbreak;};
(dnum | exponent_dnum) => {lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;};
bnum => {
s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1)
_, err := strconv.ParseInt(s, 2, 0)
if err == nil {
lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak;
}
lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;
};
lnum => {
base := 10
if lex.data[lex.ts] == '0' {
base = 8
}
s := strings.Replace(string(lex.data[lex.ts:lex.te]), "_", "", -1)
_, err := strconv.ParseInt(s, base, 0)
if err == nil {
lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak;
}
lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;
};
hnum => {
s := strings.Replace(string(lex.data[lex.ts+2:lex.te]), "_", "", -1)
_, err := strconv.ParseInt(s, 16, 0)
if err == nil {
lex.setTokenPosition(tkn); tok = token.T_LNUMBER; fbreak;
}
lex.setTokenPosition(tkn); tok = token.T_DNUMBER; fbreak;
};
'namespace'i ('\\' varname)+ => {lex.setTokenPosition(tkn); tok = token.T_NAME_RELATIVE; fbreak;};
varname ('\\' varname)+ => {lex.setTokenPosition(tkn); tok = token.T_NAME_QUALIFIED; fbreak;};
'\\' varname ('\\' varname)* => {lex.setTokenPosition(tkn); tok = token.T_NAME_FULLY_QUALIFIED; fbreak;};
'\\' => {lex.setTokenPosition(tkn); tok = token.T_NS_SEPARATOR; fbreak;};
'abstract'i => {lex.setTokenPosition(tkn); tok = token.T_ABSTRACT; fbreak;};
'array'i => {lex.setTokenPosition(tkn); tok = token.T_ARRAY; fbreak;};
'as'i => {lex.setTokenPosition(tkn); tok = token.T_AS; fbreak;};
'break'i => {lex.setTokenPosition(tkn); tok = token.T_BREAK; fbreak;};
'callable'i => {lex.setTokenPosition(tkn); tok = token.T_CALLABLE; fbreak;};
'case'i => {lex.setTokenPosition(tkn); tok = token.T_CASE; fbreak;};
'catch'i => {lex.setTokenPosition(tkn); tok = token.T_CATCH; fbreak;};
'class'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS; fbreak;};
'clone'i => {lex.setTokenPosition(tkn); tok = token.T_CLONE; fbreak;};
'const'i => {lex.setTokenPosition(tkn); tok = token.T_CONST; fbreak;};
'continue'i => {lex.setTokenPosition(tkn); tok = token.T_CONTINUE; fbreak;};
'declare'i => {lex.setTokenPosition(tkn); tok = token.T_DECLARE; fbreak;};
'default'i => {lex.setTokenPosition(tkn); tok = token.T_DEFAULT; fbreak;};
'do'i => {lex.setTokenPosition(tkn); tok = token.T_DO; fbreak;};
'echo'i => {lex.setTokenPosition(tkn); tok = token.T_ECHO; fbreak;};
'else'i => {lex.setTokenPosition(tkn); tok = token.T_ELSE; fbreak;};
'elseif'i => {lex.setTokenPosition(tkn); tok = token.T_ELSEIF; fbreak;};
'empty'i => {lex.setTokenPosition(tkn); tok = token.T_EMPTY; fbreak;};
'enddeclare'i => {lex.setTokenPosition(tkn); tok = token.T_ENDDECLARE; fbreak;};
'endfor'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOR; fbreak;};
'endforeach'i => {lex.setTokenPosition(tkn); tok = token.T_ENDFOREACH; fbreak;};
'endif'i => {lex.setTokenPosition(tkn); tok = token.T_ENDIF; fbreak;};
'endswitch'i => {lex.setTokenPosition(tkn); tok = token.T_ENDSWITCH; fbreak;};
'endwhile'i => {lex.setTokenPosition(tkn); tok = token.T_ENDWHILE; fbreak;};
'eval'i => {lex.setTokenPosition(tkn); tok = token.T_EVAL; fbreak;};
'exit'i | 'die'i => {lex.setTokenPosition(tkn); tok = token.T_EXIT; fbreak;};
'extends'i => {lex.setTokenPosition(tkn); tok = token.T_EXTENDS; fbreak;};
'final'i => {lex.setTokenPosition(tkn); tok = token.T_FINAL; fbreak;};
'finally'i => {lex.setTokenPosition(tkn); tok = token.T_FINALLY; fbreak;};
'for'i => {lex.setTokenPosition(tkn); tok = token.T_FOR; fbreak;};
'foreach'i => {lex.setTokenPosition(tkn); tok = token.T_FOREACH; fbreak;};
'function'i | 'cfunction'i => {lex.setTokenPosition(tkn); tok = token.T_FUNCTION; fbreak;};
'fn'i => {lex.setTokenPosition(tkn); tok = token.T_FN; fbreak;};
'global'i => {lex.setTokenPosition(tkn); tok = token.T_GLOBAL; fbreak;};
'goto'i => {lex.setTokenPosition(tkn); tok = token.T_GOTO; fbreak;};
'if'i => {lex.setTokenPosition(tkn); tok = token.T_IF; fbreak;};
'isset'i => {lex.setTokenPosition(tkn); tok = token.T_ISSET; fbreak;};
'implements'i => {lex.setTokenPosition(tkn); tok = token.T_IMPLEMENTS; fbreak;};
'instanceof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTANCEOF; fbreak;};
'insteadof'i => {lex.setTokenPosition(tkn); tok = token.T_INSTEADOF; fbreak;};
'interface'i => {lex.setTokenPosition(tkn); tok = token.T_INTERFACE; fbreak;};
'list'i => {lex.setTokenPosition(tkn); tok = token.T_LIST; fbreak;};
'namespace'i => {lex.setTokenPosition(tkn); tok = token.T_NAMESPACE; fbreak;};
'private'i => {lex.setTokenPosition(tkn); tok = token.T_PRIVATE; fbreak;};
'public'i => {lex.setTokenPosition(tkn); tok = token.T_PUBLIC; fbreak;};
'print'i => {lex.setTokenPosition(tkn); tok = token.T_PRINT; fbreak;};
'protected'i => {lex.setTokenPosition(tkn); tok = token.T_PROTECTED; fbreak;};
'return'i => {lex.setTokenPosition(tkn); tok = token.T_RETURN; fbreak;};
'static'i => {lex.setTokenPosition(tkn); tok = token.T_STATIC; fbreak;};
'switch'i => {lex.setTokenPosition(tkn); tok = token.T_SWITCH; fbreak;};
'match'i => {lex.setTokenPosition(tkn); tok = token.T_MATCH; fbreak;};
'throw'i => {lex.setTokenPosition(tkn); tok = token.T_THROW; fbreak;};
'trait'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT; fbreak;};
'try'i => {lex.setTokenPosition(tkn); tok = token.T_TRY; fbreak;};
'unset'i => {lex.setTokenPosition(tkn); tok = token.T_UNSET; fbreak;};
'use'i => {lex.setTokenPosition(tkn); tok = token.T_USE; fbreak;};
'var'i => {lex.setTokenPosition(tkn); tok = token.T_VAR; fbreak;};
'while'i => {lex.setTokenPosition(tkn); tok = token.T_WHILE; fbreak;};
'yield'i whitespace_line+ 'from'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD_FROM; fbreak;};
'yield'i => {lex.setTokenPosition(tkn); tok = token.T_YIELD; fbreak;};
'include'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE; fbreak;};
'include_once'i => {lex.setTokenPosition(tkn); tok = token.T_INCLUDE_ONCE; fbreak;};
'require'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE; fbreak;};
'require_once'i => {lex.setTokenPosition(tkn); tok = token.T_REQUIRE_ONCE; fbreak;};
'__CLASS__'i => {lex.setTokenPosition(tkn); tok = token.T_CLASS_C; fbreak;};
'__DIR__'i => {lex.setTokenPosition(tkn); tok = token.T_DIR; fbreak;};
'__FILE__'i => {lex.setTokenPosition(tkn); tok = token.T_FILE; fbreak;};
'__FUNCTION__'i => {lex.setTokenPosition(tkn); tok = token.T_FUNC_C; fbreak;};
'__LINE__'i => {lex.setTokenPosition(tkn); tok = token.T_LINE; fbreak;};
'__NAMESPACE__'i => {lex.setTokenPosition(tkn); tok = token.T_NS_C; fbreak;};
'__METHOD__'i => {lex.setTokenPosition(tkn); tok = token.T_METHOD_C; fbreak;};
'__TRAIT__'i => {lex.setTokenPosition(tkn); tok = token.T_TRAIT_C; fbreak;};
'__halt_compiler'i => {lex.setTokenPosition(tkn); tok = token.T_HALT_COMPILER; fnext halt_compiller_open_parenthesis; fbreak;};
'new'i => {lex.setTokenPosition(tkn); tok = token.T_NEW; fbreak;};
'and'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_AND; fbreak;};
'or'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_OR; fbreak;};
'xor'i => {lex.setTokenPosition(tkn); tok = token.T_LOGICAL_XOR; fbreak;};
'#[' => {lex.setTokenPosition(tkn); tok = token.T_ATTRIBUTE; fbreak;};
'...' => {lex.setTokenPosition(tkn); tok = token.T_ELLIPSIS; fbreak;};
'::' => {lex.setTokenPosition(tkn); tok = token.T_PAAMAYIM_NEKUDOTAYIM; fbreak;};
'&&' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_AND; fbreak;};
'||' => {lex.setTokenPosition(tkn); tok = token.T_BOOLEAN_OR; fbreak;};
'&=' => {lex.setTokenPosition(tkn); tok = token.T_AND_EQUAL; fbreak;};
'|=' => {lex.setTokenPosition(tkn); tok = token.T_OR_EQUAL; fbreak;};
'.=' => {lex.setTokenPosition(tkn); tok = token.T_CONCAT_EQUAL; fbreak;};
'*=' => {lex.setTokenPosition(tkn); tok = token.T_MUL_EQUAL; fbreak;};
'**=' => {lex.setTokenPosition(tkn); tok = token.T_POW_EQUAL; fbreak;};
'/=' => {lex.setTokenPosition(tkn); tok = token.T_DIV_EQUAL; fbreak;};
'+=' => {lex.setTokenPosition(tkn); tok = token.T_PLUS_EQUAL; fbreak;};
'-=' => {lex.setTokenPosition(tkn); tok = token.T_MINUS_EQUAL; fbreak;};
'^=' => {lex.setTokenPosition(tkn); tok = token.T_XOR_EQUAL; fbreak;};
'%=' => {lex.setTokenPosition(tkn); tok = token.T_MOD_EQUAL; fbreak;};
'--' => {lex.setTokenPosition(tkn); tok = token.T_DEC; fbreak;};
'++' => {lex.setTokenPosition(tkn); tok = token.T_INC; fbreak;};
'=>' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_ARROW; fbreak;};
'<=>' => {lex.setTokenPosition(tkn); tok = token.T_SPACESHIP; fbreak;};
'!=' | '<>' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_EQUAL; fbreak;};
'!==' => {lex.setTokenPosition(tkn); tok = token.T_IS_NOT_IDENTICAL; fbreak;};
'==' => {lex.setTokenPosition(tkn); tok = token.T_IS_EQUAL; fbreak;};
'===' => {lex.setTokenPosition(tkn); tok = token.T_IS_IDENTICAL; fbreak;};
'<<=' => {lex.setTokenPosition(tkn); tok = token.T_SL_EQUAL; fbreak;};
'>>=' => {lex.setTokenPosition(tkn); tok = token.T_SR_EQUAL; fbreak;};
'>=' => {lex.setTokenPosition(tkn); tok = token.T_IS_GREATER_OR_EQUAL; fbreak;};
'<=' => {lex.setTokenPosition(tkn); tok = token.T_IS_SMALLER_OR_EQUAL; fbreak;};
'**' => {lex.setTokenPosition(tkn); tok = token.T_POW; fbreak;};
'<<' => {lex.setTokenPosition(tkn); tok = token.T_SL; fbreak;};
'>>' => {lex.setTokenPosition(tkn); tok = token.T_SR; fbreak;};
'??' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE; fbreak;};
'??=' => {lex.setTokenPosition(tkn); tok = token.T_COALESCE_EQUAL; fbreak;};
'(' whitespace* 'array'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_ARRAY_CAST; fbreak;};
'(' whitespace* ('bool'i|'boolean'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_BOOL_CAST; fbreak;};
'(' whitespace* ('real'i) whitespace* ')' => {lex.error(fmt.Sprintf("The (real) cast has been removed, use (float) instead")); fbreak;};
'(' whitespace* ('double'i|'float'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_DOUBLE_CAST; fbreak;};
'(' whitespace* ('int'i|'integer'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_INT_CAST; fbreak;};
'(' whitespace* 'object'i whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_CAST; fbreak;};
'(' whitespace* ('string'i|'binary'i) whitespace* ')' => {lex.setTokenPosition(tkn); tok = token.T_STRING_CAST; fbreak;};
'(' whitespace* 'unset'i whitespace* ')' => {lex.error(fmt.Sprintf("The (unset) cast is no longer supported")); fbreak;};
(('#' ^'[') | '//') any_line* when is_not_comment_end => {
lex.ungetStr("?>")
lex.addFreeFloatingToken(tkn, token.T_COMMENT, lex.ts, lex.te)
};
'#' => {
lex.addFreeFloatingToken(tkn, token.T_COMMENT, lex.ts, lex.te)
};
'/*' any_line* :>> '*/' {
isDocComment := false;
if lex.te - lex.ts > 4 && string(lex.data[lex.ts:lex.ts+3]) == "/**" {
isDocComment = true;
}
if isDocComment {
lex.addFreeFloatingToken(tkn, token.T_DOC_COMMENT, lex.ts, lex.te)
} else {
lex.addFreeFloatingToken(tkn, token.T_COMMENT, lex.ts, lex.te)
}
};
operators => {
lex.setTokenPosition(tkn);
tok = token.ID(int(lex.data[lex.ts]));
fbreak;
};
"{" => { lex.setTokenPosition(tkn); tok = token.ID(int('{')); lex.call(ftargs, fentry(php)); goto _out; };
"}" => { lex.setTokenPosition(tkn); tok = token.ID(int('}')); lex.ret(1); goto _out;};
"$" varname => { lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak; };
varname => { lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak; };
"->" => { lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fnext property; fbreak; };
"?->" => { lex.setTokenPosition(tkn); tok = token.T_NULLSAFE_OBJECT_OPERATOR; fnext property; fbreak; };
constant_string => {
lex.setTokenPosition(tkn);
tok = token.T_CONSTANT_ENCAPSED_STRING;
fbreak;
};
"b"i? "<<<" [ \t]* ( heredoc_label | ("'" heredoc_label "'") | ('"' heredoc_label '"') ) newline => {
lex.heredocLabel = lex.data[lblStart:lblEnd]
lex.setTokenPosition(tkn);
tok = token.T_START_HEREDOC;
if lex.isHeredocEnd(lex.p+1) {
fnext heredoc_end;
} else if lex.data[lblStart-1] == '\'' {
fnext nowdoc;
} else {
fnext heredoc;
}
fbreak;
};
"`" => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext backqote; fbreak;};
'"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext template_string; fbreak;};
any_line => {
c := lex.data[lex.p]
lex.error(fmt.Sprintf("WARNING: Unexpected character in input: '%c' (ASCII=%d)", c, c));
};
*|;
property := |*
whitespace_line* => {lex.addFreeFloatingToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
"->" => {lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;};
"?->" => {lex.setTokenPosition(tkn); tok = token.T_NULLSAFE_OBJECT_OPERATOR; fbreak;};
varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fnext php; fbreak;};
any => {lex.ungetCnt(1); fgoto php;};
*|;
nowdoc := |*
any_line* when is_not_heredoc_end => {
lex.setTokenPosition(tkn);
tok = token.T_ENCAPSED_AND_WHITESPACE;
fnext heredoc_end;
fbreak;
};
*|;
heredoc := |*
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" => {lex.ungetCnt(1); fcall string_var;};
any_line* when is_not_heredoc_end_or_var => {
lex.setTokenPosition(tkn);
tok = token.T_ENCAPSED_AND_WHITESPACE;
if len(lex.data) > lex.p+1 && lex.data[lex.p+1] != '$' && lex.data[lex.p+1] != '{' {
fnext heredoc_end;
}
fbreak;
};
*|;
backqote := |*
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
'`' => {lex.setTokenPosition(tkn); tok = token.ID(int('`')); fnext php; fbreak;};
any_line* when is_not_backqoute_end_or_var => {
lex.setTokenPosition(tkn);
tok = token.T_ENCAPSED_AND_WHITESPACE;
fbreak;
};
*|;
template_string := |*
"{$" => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_CURLY_OPEN; lex.call(ftargs, fentry(php)); goto _out;};
"${" => {lex.setTokenPosition(tkn); tok = token.T_DOLLAR_OPEN_CURLY_BRACES; lex.call(ftargs, fentry(string_var_name)); goto _out;};
"$" varname_first => {lex.ungetCnt(2); fcall string_var;};
'"' => {lex.setTokenPosition(tkn); tok = token.ID(int('"')); fnext php; fbreak;};
any_line* when is_not_string_end_or_var => {
lex.setTokenPosition(tkn);
tok = token.T_ENCAPSED_AND_WHITESPACE;
fbreak;
};
*|;
heredoc_end := |*
varname -- ";" => {
lex.setTokenPosition(tkn);
tok = token.T_END_HEREDOC;
fnext php;
fbreak;
};
varname => {
lex.setTokenPosition(tkn);
tok = token.T_END_HEREDOC;
fnext php;
fbreak;
};
*|;
string_var := |*
'$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;};
'->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_OBJECT_OPERATOR; fbreak;};
'?->' varname_first => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_NULLSAFE_OBJECT_OPERATOR; fbreak;};
varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;};
'[' => {lex.setTokenPosition(tkn); tok = token.ID(int('[')); lex.call(ftargs, fentry(string_var_index)); goto _out;};
any => {lex.ungetCnt(1); fret;};
*|;
string_var_index := |*
lnum | hnum | bnum => {lex.setTokenPosition(tkn); tok = token.T_NUM_STRING; fbreak;};
'$' varname => {lex.setTokenPosition(tkn); tok = token.T_VARIABLE; fbreak;};
varname => {lex.setTokenPosition(tkn); tok = token.T_STRING; fbreak;};
whitespace_line | [\\'#] => {lex.setTokenPosition(tkn); tok = token.T_ENCAPSED_AND_WHITESPACE; lex.ret(2); goto _out;};
operators > (svi, 1) => {lex.setTokenPosition(tkn); tok = token.ID(int(lex.data[lex.ts])); fbreak;};
']' > (svi, 2) => {lex.setTokenPosition(tkn); tok = token.ID(int(']')); lex.ret(2); goto _out;};
any_line => {
c := lex.data[lex.p]
lex.error(fmt.Sprintf("WARNING: Unexpected character in input: '%c' (ASCII=%d)", c, c));
};
*|;
string_var_name := |*
varname ("[" | "}") => {lex.ungetCnt(1); lex.setTokenPosition(tkn); tok = token.T_STRING_VARNAME; fnext php; fbreak;};
any => {lex.ungetCnt(1); fnext php;};
*|;
halt_compiller_open_parenthesis := |*
whitespace_line* => {lex.addFreeFloatingToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
"(" => {lex.setTokenPosition(tkn); tok = token.ID(int('(')); fnext halt_compiller_close_parenthesis; fbreak;};
any => {lex.ungetCnt(1); fnext php;};
*|;
halt_compiller_close_parenthesis := |*
whitespace_line* => {lex.addFreeFloatingToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
")" => {lex.setTokenPosition(tkn); tok = token.ID(int(')')); fnext halt_compiller_close_semicolon; fbreak;};
any => {lex.ungetCnt(1); fnext php;};
*|;
halt_compiller_close_semicolon := |*
whitespace_line* => {lex.addFreeFloatingToken(tkn, token.T_WHITESPACE, lex.ts, lex.te)};
";" => {lex.setTokenPosition(tkn); tok = token.ID(int(';')); fnext halt_compiller_end; fbreak;};
any => {lex.ungetCnt(1); fnext php;};
*|;
halt_compiller_end := |*
any_line* => { lex.addFreeFloatingToken(tkn, token.T_HALT_COMPILER, lex.ts, lex.te); };
*|;
write exec;
}%%
tkn.Value = lex.data[lex.ts:lex.te]
tkn.ID = token.ID(tok)
return tkn
}

View File

@ -0,0 +1,346 @@
package php8_test
import (
"testing"
"github.com/z7zmey/php-parser/internal/php8"
"github.com/z7zmey/php-parser/internal/tester"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/token"
"gotest.tools/assert"
)
func TestNullsafeMethodCallTokens(t *testing.T) {
suite := tester.NewLexerTokenStringTestSuite(t)
suite.UsePHP8()
suite.Code = "<?php $a?->foo();"
suite.Expected = []string{
"$a",
"?->",
"foo",
"(",
")",
";",
}
suite.Run()
}
func TestNullsafePropertyFetchTokens(t *testing.T) {
suite := tester.NewLexerTokenStringTestSuite(t)
suite.UsePHP8()
suite.Code = "<?php $a?->prop;"
suite.Expected = []string{
"$a",
"?->",
"prop",
";",
}
suite.Run()
}
func TestNullsafePropertyFetchInStringTokens(t *testing.T) {
suite := tester.NewLexerTokenStringTestSuite(t)
suite.UsePHP8()
suite.Code = "<?php \"$a?->prop\";"
suite.Expected = []string{
"\"",
"$a",
"?->",
"prop",
"\"",
";",
}
suite.Run()
}
func TestNullsafeMethodCallTokensFreeFloating(t *testing.T) {
suite := tester.NewLexerTokenFreeFloatingTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php
$a ?-> bar ( '' ) ;`
suite.Expected = [][]*token.Token{
{
{
ID: token.T_OPEN_TAG,
Value: []byte("<?php"),
},
{
ID: token.T_WHITESPACE,
Value: []byte("\n\t"),
},
},
{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
},
{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
},
{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
},
{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
},
{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
},
{
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
},
}
suite.Run()
}
func TestMatchStringTokens(t *testing.T) {
suite := tester.NewLexerTokenStringTestSuite(t)
suite.UsePHP8()
suite.Code = "<?php match($a) {}"
suite.Expected = []string{
"match",
"(",
"$a",
")",
"{",
"}",
}
suite.Run()
}
func TestMatchWithConditionStringTokens(t *testing.T) {
suite := tester.NewLexerTokenStringTestSuite(t)
suite.UsePHP8()
suite.Code = "<?php match($a) { 10 => 100 }"
suite.Expected = []string{
"match",
"(",
"$a",
")",
"{",
"10",
"=>",
"100",
"}",
}
suite.Run()
}
func TestMatchWithDefaultStringTokens(t *testing.T) {
suite := tester.NewLexerTokenStringTestSuite(t)
suite.UsePHP8()
suite.Code = "<?php match($a) { default => 10 }"
suite.Expected = []string{
"match",
"(",
"$a",
")",
"{",
"default",
"=>",
"10",
"}",
}
suite.Run()
}
func TestAttributeTokens(t *testing.T) {
suite := tester.NewLexerTokenStringTestSuite(t)
suite.UsePHP8()
suite.Code = "<?php #[ FooAttribute]"
suite.Expected = []string{
"#[",
"FooAttribute",
"]",
}
suite.Run()
}
func TestCommentEnd2(t *testing.T) {
src := `<?php //`
expected := []*token.Token{
{
ID: token.T_OPEN_TAG,
Value: []byte("<?php"),
},
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
{
ID: token.T_COMMENT,
Value: []byte("//"),
},
}
lexer := php8.NewLexer([]byte(src), conf.Config{})
tkn := lexer.Lex()
actual := tkn.FreeFloating
for _, v := range actual {
v.Position = nil
}
assert.DeepEqual(t, expected, actual)
}
func TestCommentEnd3(t *testing.T) {
src := `<?php #`
expected := []*token.Token{
{
ID: token.T_OPEN_TAG,
Value: []byte("<?php"),
},
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
{
ID: token.T_COMMENT,
Value: []byte("#"),
},
}
lexer := php8.NewLexer([]byte(src), conf.Config{})
tkn := lexer.Lex()
actual := tkn.FreeFloating
for _, v := range actual {
v.Position = nil
}
assert.DeepEqual(t, expected, actual)
}
func TestAttribute(t *testing.T) {
src := `<?php #[ FooAttribute]`
expected := []*token.Token{
{
ID: token.T_OPEN_TAG,
Value: []byte("<?php"),
},
{
ID: token.T_WHITESPACE,
Value: []byte(" "),
},
}
lexer := php8.NewLexer([]byte(src), conf.Config{})
tkn := lexer.Lex()
actual := tkn.FreeFloating
for _, v := range actual {
v.Position = nil
}
assert.DeepEqual(t, expected, actual)
assert.DeepEqual(t, token.T_ATTRIBUTE, tkn.ID)
assert.DeepEqual(t, "#[", string(tkn.Value))
}
func TestNamespaceFullyQualifiedTokens(t *testing.T) {
suite := tester.NewLexerTokenStructTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php use \Foo;`
suite.Expected = []*token.Token{
{
ID: php8.T_USE,
Value: []byte("use"),
},
{
ID: php8.T_NAME_FULLY_QUALIFIED,
Value: []byte(`\Foo`),
},
{
ID: ';',
Value: []byte(";"),
},
}
suite.Run()
}
func TestNamespaceFullyQualifiedWithKeywordsTokens(t *testing.T) {
suite := tester.NewLexerTokenStructTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php use \Foo\match\fn;`
suite.Expected = []*token.Token{
{
ID: php8.T_USE,
Value: []byte("use"),
},
{
ID: php8.T_NAME_FULLY_QUALIFIED,
Value: []byte(`\Foo\match\fn`),
},
{
ID: ';',
Value: []byte(";"),
},
}
suite.Run()
}
func TestNamespaceQualifiedTokens(t *testing.T) {
suite := tester.NewLexerTokenStructTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php namespace Boo\Foo;`
suite.Expected = []*token.Token{
{
ID: php8.T_NAMESPACE,
Value: []byte("namespace"),
},
{
ID: php8.T_NAME_QUALIFIED,
Value: []byte(`Boo\Foo`),
},
{
ID: ';',
Value: []byte(";"),
},
}
suite.Run()
}
func TestNamespaceRelativeTokens(t *testing.T) {
suite := tester.NewLexerTokenStructTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php namespace\match;`
suite.Expected = []*token.Token{
{
ID: php8.T_NAME_RELATIVE,
Value: []byte(`namespace\match`),
},
{
ID: ';',
Value: []byte(";"),
},
}
suite.Run()
}

File diff suppressed because it is too large Load Diff

356
internal/php8/test.php Normal file
View File

@ -0,0 +1,356 @@
<?
foo($a, ...$b);
$foo($a, ...$b);
$foo->bar($a, ...$b);
foo::bar($a, ...$b);
$foo::bar($a, ...$b);
new foo($a, ...$b);
/** anonymous class */
new class ($a, ...$b) {};
new class {};
new $foo;
new $foo[1];
new $foo{$bar};
new $foo->bar;
new $foo::$bar;
new static::$bar;
function foo(?bar $bar=null, baz &...$baz) {}
class foo {public function foo(?bar $bar=null, baz &...$baz) {}}
function(?bar $bar=null, baz &...$baz) {};
static function(?bar $bar=null, baz &...$baz) {};
1234567890123456789;
12345678901234567890;
0.;
0b0111111111111111111111111111111111111111111111111111111111111111;
0b1111111111111111111111111111111111111111111111111111111111111111;
0x007111111111111111;
0x8111111111111111;
__CLASS__;
__DIR__;
__FILE__;
__FUNCTION__;
__LINE__;
__NAMESPACE__;
__METHOD__;
__TRAIT__;
"test $var";
"test $var[1]";
"test $var[-1]";
"test $var[1234567890123456789012345678901234567890]";
"test $var[-1234567890123456789012345678901234567890]";
"test $var[bar]";
"test $var[$bar]";
"$foo $bar";
"test $foo->bar()";
"test ${foo}";
"test ${foo[0]}";
"test ${$foo}";
"test {$foo->bar()}";
if ($a) :
endif;
if ($a) :
elseif ($b):
endif;
if ($a) :
else:
endif;
if ($a) :
elseif ($b):
elseif ($c):
else:
endif;
while (1) { break; }
while (1) { break 2; }
while (1) : break(3); endwhile;
class foo{ public const FOO = 1, BAR = 2; }
class foo{ const FOO = 1, BAR = 2; }
class foo{ function bar() {} }
class foo{ public static function &bar() {} }
class foo{ public static function &bar(): void {} }
abstract class foo{ }
final class foo extends bar { }
final class foo implements bar { }
final class foo implements bar, baz { }
new class() extends foo implements bar, baz { };
const FOO = 1, BAR = 2;
while (1) { continue; }
while (1) { continue 2; }
while (1) { continue(3); }
declare(ticks=1);
declare(ticks=1) {}
declare(ticks=1): enddeclare;
do {} while(1);
echo $a, 1;
echo($a);
for($i = 0; $i < 10; $i++, $i++) {}
for(; $i < 10; $i++, $i++) : endfor;
foreach ($a as $v) {}
foreach ($a as $v) : endforeach;
foreach ($a as $k => $v) {}
foreach ($a as $k => &$v) {}
foreach ($a as $k => list($v)) {}
foreach ($a as $k => [$v]) {}
function foo() {}
function foo() {return;}
function &foo() {return 1;}
function &foo(): void {}
global $a, $b;
a:
goto a;
if ($a) {}
if ($a) {} elseif ($b) {}
if ($a) {} else {}
if ($a) {} elseif ($b) {} elseif ($c) {} else {}
if ($a) {} elseif ($b) {} else if ($c) {} else {}
?> <div></div> <?
interface Foo {}
interface Foo extends Bar {}
interface Foo extends Bar, Baz {}
namespace Foo;
namespace Foo {}
namespace {}
class foo {var $a;}
class foo {public static $a, $b = 1;}
static $a, $b = 1;
switch (1) :
case 1:
default:
case 2:
endswitch;
switch (1) :;
case 1;
case 2;
endswitch;
switch (1) {
case 1: break;
case 2: break;
}
switch (1) {;
case 1; break;
case 2; break;
}
throw $e;
trait Foo {}
class Foo { use Bar; }
class Foo { use Bar, Baz {} }
class Foo { use Bar, Baz { one as include; } }
class Foo { use Bar, Baz { one as public; } }
class Foo { use Bar, Baz { one as public two; } }
class Foo { use Bar, Baz { Bar::one insteadof Baz, Quux; Baz::one as two; } }
try {}
try {} catch (Exception $e) {}
try {} catch (Exception|RuntimeException $e) {}
try {} catch (Exception $e) {} catch (RuntimeException $e) {}
try {} catch (Exception $e) {} finally {}
unset($a, $b,);
use Foo;
use \Foo;
use \Foo as Bar;
use Foo, Bar;
use Foo, Bar as Baz;
use function Foo, \Bar;
use function Foo as foo, \Bar as bar;
use const Foo, \Bar;
use const Foo as foo, \Bar as bar;
use \Foo\{Bar, Baz};
use Foo\{Bar, Baz as quux};
use function Foo\{Bar, Baz};
use const \Foo\{Bar, Baz};
use Foo\{const Bar, function Baz};
$a[1];
$a[1][2];
array();
array(1);
array(1=>1, &$b,);
~$a;
!$a;
Foo::Bar;
$foo::Bar;
clone($a);
clone $a;
function(){};
function($a, $b) use ($c, &$d) {};
function(): void {};
foo;
namespace\foo;
\foo;
empty($a);
@$a;
eval($a);
exit;
exit($a);
die;
die($a);
foo();
namespace\foo();
\foo();
$foo();
$a--;
$a++;
--$a;
++$a;
include $a;
include_once $a;
require $a;
require_once $a;
$a instanceof Foo;
$a instanceof namespace\Foo;
$a instanceof \Foo;
isset($a, $b);
list($a) = $b;
list($a[]) = $b;
list(list($a)) = $b;
$a->foo();
new Foo();
new namespace\Foo();
new \Foo();
new class ($a, ...$b) {};
print($a);
$a->foo;
`cmd $a`;
`cmd`;
``;
[];
[1];
[1=>1, &$b,];
[$a] = $b;
[$a[]] = $b;
[list($a)] = $b;
Foo::bar();
namespace\Foo::bar();
\Foo::bar();
Foo::$bar;
$foo::$bar;
namespace\Foo::$bar;
\Foo::$bar;
$a ? $b : $c;
$a ? : $c;
$a ? $b ? $c : $d : $e;
$a ? $b : $c ? $d : $e;
-$a;
+$a;
$$a;
yield;
yield $a;
yield $a => $b;
yield from $a;
(array)$a;
(boolean)$a;
(bool)$a;
(double)$a;
(float)$a;
(integer)$a;
(int)$a;
(object)$a;
(string)$a;
(unset)$a;
$a & $b;
$a | $b;
$a ^ $b;
$a && $b;
$a || $b;
$a ?? $b;
$a . $b;
$a / $b;
$a == $b;
$a >= $b;
$a > $b;
$a === $b;
$a and $b;
$a or $b;
$a xor $b;
$a - $b;
$a % $b;
$a * $b;
$a != $b;
$a !== $b;
$a + $b;
$a ** $b;
$a << $b;
$a >> $b;
$a <= $b;
$a < $b;
$a <=> $b;
$a =& $b;
$a = $b;
$a &= $b;
$a |= $b;
$a ^= $b;
$a .= $b;
$a /= $b;
$a -= $b;
$a %= $b;
$a *= $b;
$a += $b;
$a **= $b;
$a <<= $b;
$a >>= $b;
class foo {public function class() {} }
\foo\bar();
function foo(&$a, ...$b) {
function bar() {}
class Baz {}
trait Quux{}
interface Quuux {}
}
function foo(&$a = 1, ...$b = 1, $c = 1) {}
function foo(array $a, callable $b) {}
abstract final class foo { abstract protected static function bar(); final private function baz() {} }
(new Foo)->bar;
(new Foo)();
[$foo][0]();
foo[1]();
"foo"();
[1]{$foo}();
${foo()};
Foo::$bar();
Foo::{$bar[0]}();
$foo->$bar;
$foo->{$bar[0]};
[1=>&$a, 2=>list($b)];
__halt_compiler();
(new Foo)?->bar;
(new Foo)?->bar();
$foo?->$bar;
$foo?->{$bar[0]};
"{$foo?->{$bar[0]}}";
parsing process must be terminated

View File

@ -0,0 +1,62 @@
package tester
import (
"testing"
"github.com/z7zmey/php-parser/internal/php8"
"github.com/z7zmey/php-parser/internal/scanner"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/token"
"github.com/z7zmey/php-parser/pkg/version"
"gotest.tools/assert"
)
type Lexer interface {
Lex() *token.Token
}
type LexerTokenFreeFloatingTestSuite struct {
t *testing.T
Code string
Expected [][]*token.Token
Version version.Version
}
func NewLexerTokenFreeFloatingTestSuite(t *testing.T) *LexerTokenFreeFloatingTestSuite {
return &LexerTokenFreeFloatingTestSuite{
t: t,
Version: version.Version{
Major: 7,
Minor: 4,
},
}
}
func (l *LexerTokenFreeFloatingTestSuite) UsePHP8() {
l.Version = version.Version{Major: 8, Minor: 0}
}
func (l *LexerTokenFreeFloatingTestSuite) Run() {
config := conf.Config{
Version: &l.Version,
}
var lexer Lexer
if l.Version.Less(&version.Version{Major: 8, Minor: 0}) {
lexer = scanner.NewLexer([]byte(l.Code), config)
} else {
lexer = php8.NewLexer([]byte(l.Code), config)
}
for _, expected := range l.Expected {
tkn := lexer.Lex()
actual := tkn.FreeFloating
for _, v := range actual {
v.Position = nil
}
assert.DeepEqual(l.t, expected, actual)
}
}

View File

@ -0,0 +1,54 @@
package tester
import (
"testing"
"github.com/z7zmey/php-parser/internal/php8"
"github.com/z7zmey/php-parser/internal/scanner"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/version"
"gotest.tools/assert"
)
type LexerTokenStringTestSuite struct {
t *testing.T
Code string
Expected []string
Version version.Version
}
func NewLexerTokenStringTestSuite(t *testing.T) *LexerTokenStringTestSuite {
return &LexerTokenStringTestSuite{
t: t,
Version: version.Version{
Major: 7,
Minor: 4,
},
}
}
func (l *LexerTokenStringTestSuite) UsePHP8() {
l.Version = version.Version{Major: 8, Minor: 0}
}
func (l *LexerTokenStringTestSuite) Run() {
config := conf.Config{
Version: &l.Version,
}
var lexer Lexer
if l.Version.Less(&version.Version{Major: 8, Minor: 0}) {
lexer = scanner.NewLexer([]byte(l.Code), config)
} else {
lexer = php8.NewLexer([]byte(l.Code), config)
}
for _, expected := range l.Expected {
tkn := lexer.Lex()
actual := string(tkn.Value)
assert.DeepEqual(l.t, expected, actual)
}
}

View File

@ -0,0 +1,56 @@
package tester
import (
"testing"
"github.com/z7zmey/php-parser/internal/php8"
"github.com/z7zmey/php-parser/internal/scanner"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/token"
"github.com/z7zmey/php-parser/pkg/version"
"gotest.tools/assert"
)
type LexerTokenStructTestSuite struct {
t *testing.T
Code string
Expected []*token.Token
Version version.Version
}
func NewLexerTokenStructTestSuite(t *testing.T) *LexerTokenStructTestSuite {
return &LexerTokenStructTestSuite{
t: t,
Version: version.Version{
Major: 7,
Minor: 4,
},
}
}
func (l *LexerTokenStructTestSuite) UsePHP8() {
l.Version = version.Version{Major: 8, Minor: 0}
}
func (l *LexerTokenStructTestSuite) Run() {
config := conf.Config{
Version: &l.Version,
}
var lexer Lexer
if l.Version.Less(&version.Version{Major: 8, Minor: 0}) {
lexer = scanner.NewLexer([]byte(l.Code), config)
} else {
lexer = php8.NewLexer([]byte(l.Code), config)
}
for _, expected := range l.Expected {
actual := lexer.Lex()
actual.Position = nil
actual.FreeFloating = nil
assert.DeepEqual(l.t, expected, actual)
}
}

View File

@ -0,0 +1,60 @@
package tester
import (
"bytes"
"testing"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/parser"
"github.com/z7zmey/php-parser/pkg/version"
"github.com/z7zmey/php-parser/pkg/visitor/printer"
"gotest.tools/assert"
)
type ParserPrintTestSuite struct {
t *testing.T
Version version.Version
}
func NewParserPrintTestSuite(t *testing.T) *ParserPrintTestSuite {
return &ParserPrintTestSuite{
t: t,
Version: version.Version{
Major: 7,
Minor: 4,
},
}
}
func (p *ParserPrintTestSuite) UsePHP8() *ParserPrintTestSuite {
p.Version = version.Version{Major: 8, Minor: 0}
return p
}
func (p *ParserPrintTestSuite) Run(code string) {
actual := p.print(p.parse(code))
assert.DeepEqual(p.t, code, actual)
}
func (p *ParserPrintTestSuite) parse(src string) ast.Vertex {
config := conf.Config{
Version: &p.Version,
}
root, err := parser.Parse([]byte(src), config)
if err != nil {
p.t.Fatal(err)
}
return root
}
func (p *ParserPrintTestSuite) print(n ast.Vertex) string {
o := bytes.NewBufferString("")
pr := printer.NewPrinter(o)
n.Accept(pr)
return o.String()
}

46
internal/tester/parser.go Normal file
View File

@ -0,0 +1,46 @@
package tester
import (
"testing"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/parser"
"github.com/z7zmey/php-parser/pkg/version"
"gotest.tools/assert"
)
type ParserTestSuite struct {
t *testing.T
Code string
Expected ast.Vertex
Version version.Version
}
func NewParserTestSuite(t *testing.T) *ParserTestSuite {
return &ParserTestSuite{
t: t,
Version: version.Version{
Major: 7,
Minor: 4,
},
}
}
func (p *ParserTestSuite) UsePHP8() {
p.Version = version.Version{Major: 8, Minor: 0}
}
func (p *ParserTestSuite) Run() {
config := conf.Config{
Version: &p.Version,
}
actual, err := parser.Parse([]byte(p.Code), config)
if err != nil {
p.t.Fatalf("Error parse: %v", err)
}
assert.DeepEqual(p.t, p.Expected, actual)
}

View File

@ -0,0 +1,64 @@
package tester
import (
"bytes"
"testing"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/parser"
"github.com/z7zmey/php-parser/pkg/version"
"github.com/z7zmey/php-parser/pkg/visitor/dumper"
"gotest.tools/assert"
)
type ParserDumpTestSuite struct {
t *testing.T
Code string
Expected string
Version version.Version
actualDump *bytes.Buffer
dumper *dumper.Dumper
}
func NewParserDumpTestSuite(t *testing.T) *ParserDumpTestSuite {
actualDump := bytes.NewBuffer(nil)
return &ParserDumpTestSuite{
t: t,
Version: version.Version{
Major: 7,
Minor: 4,
},
actualDump: actualDump,
dumper: dumper.NewDumper(actualDump),
}
}
func (p *ParserDumpTestSuite) WithTokens() {
p.dumper = p.dumper.WithTokens()
}
func (p *ParserDumpTestSuite) WithPositions() {
p.dumper = p.dumper.WithPositions()
}
func (p *ParserDumpTestSuite) UsePHP8() {
p.Version = version.Version{Major: 8, Minor: 0}
}
func (p *ParserDumpTestSuite) Run() {
config := conf.Config{
Version: &p.Version,
}
actual, err := parser.Parse([]byte(p.Code), config)
if err != nil {
p.t.Fatalf("Error parse: %v", err)
}
p.dumper.Dump(actual)
assert.DeepEqual(p.t, p.Expected+"\n", p.actualDump.String())
}

View File

@ -0,0 +1,52 @@
package tester
import (
"testing"
"github.com/z7zmey/php-parser/pkg/conf"
"github.com/z7zmey/php-parser/pkg/errors"
"github.com/z7zmey/php-parser/pkg/parser"
"github.com/z7zmey/php-parser/pkg/version"
"gotest.tools/assert"
)
type ParserErrorTestSuite struct {
t *testing.T
Code string
Expected []*errors.Error
Version version.Version
}
func NewParserErrorTestSuite(t *testing.T) *ParserErrorTestSuite {
return &ParserErrorTestSuite{
t: t,
Version: version.Version{
Major: 7,
Minor: 4,
},
}
}
func (p *ParserErrorTestSuite) UsePHP8() {
p.Version = version.Version{Major: 8, Minor: 0}
}
func (p *ParserErrorTestSuite) Run() {
config := conf.Config{
Version: &p.Version,
}
var errs []*errors.Error
config.ErrorHandlerFunc = func(e *errors.Error) {
errs = append(errs, e)
}
_, err := parser.Parse([]byte(p.Code), config)
if err != nil {
p.t.Fatalf("Error parse: %v", err)
}
assert.DeepEqual(p.t, p.Expected, errs)
}

View File

@ -13,6 +13,10 @@ type Visitor interface {
Parameter(n *Parameter)
Identifier(n *Identifier)
Argument(n *Argument)
MatchArm(n *MatchArm)
Union(n *Union)
Attribute(n *Attribute)
AttributeGroup(n *AttributeGroup)
StmtBreak(n *StmtBreak)
StmtCase(n *StmtCase)
@ -85,6 +89,8 @@ type Visitor interface {
ExprIsset(n *ExprIsset)
ExprList(n *ExprList)
ExprMethodCall(n *ExprMethodCall)
ExprNullsafeMethodCall(n *ExprNullsafeMethodCall)
ExprMatch(n *ExprMatch)
ExprNew(n *ExprNew)
ExprPostDec(n *ExprPostDec)
ExprPostInc(n *ExprPostInc)
@ -92,12 +98,14 @@ type Visitor interface {
ExprPreInc(n *ExprPreInc)
ExprPrint(n *ExprPrint)
ExprPropertyFetch(n *ExprPropertyFetch)
ExprNullsafePropertyFetch(n *ExprNullsafePropertyFetch)
ExprRequire(n *ExprRequire)
ExprRequireOnce(n *ExprRequireOnce)
ExprShellExec(n *ExprShellExec)
ExprStaticCall(n *ExprStaticCall)
ExprStaticPropertyFetch(n *ExprStaticPropertyFetch)
ExprTernary(n *ExprTernary)
ExprThrow(n *ExprThrow)
ExprUnaryMinus(n *ExprUnaryMinus)
ExprUnaryPlus(n *ExprUnaryPlus)
ExprVariable(n *ExprVariable)

View File

@ -20,7 +20,7 @@ func (n *Root) GetPosition() *position.Position {
return n.Position
}
// Nullable node
// Nullable node is ?Expr
type Nullable struct {
Position *position.Position
QuestionTkn *token.Token
@ -35,9 +35,26 @@ func (n *Nullable) GetPosition() *position.Position {
return n.Position
}
// Union node is Expr|Expr1|...
type Union struct {
Position *position.Position
Types []Vertex
SeparatorTkns []*token.Token
}
func (n *Union) Accept(v Visitor) {
v.Union(n)
}
func (n *Union) GetPosition() *position.Position {
return n.Position
}
// Parameter node
type Parameter struct {
Position *position.Position
AttrGroups []Vertex
Visibility Vertex
Type Vertex
AmpersandTkn *token.Token
VariadicTkn *token.Token
@ -72,6 +89,8 @@ func (n *Identifier) GetPosition() *position.Position {
// Argument node
type Argument struct {
Position *position.Position
Name Vertex
ColonTkn *token.Token
VariadicTkn *token.Token
AmpersandTkn *token.Token
Expr Vertex
@ -85,6 +104,41 @@ func (n *Argument) GetPosition() *position.Position {
return n.Position
}
// Attribute node
type Attribute struct {
Position *position.Position
Name Vertex
OpenParenthesisTkn *token.Token
Args []Vertex
SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token
}
func (n *Attribute) Accept(v Visitor) {
v.Attribute(n)
}
func (n *Attribute) GetPosition() *position.Position {
return n.Position
}
// AttributeGroup node
type AttributeGroup struct {
Position *position.Position
OpenAttributeTkn *token.Token
Attrs []Vertex
SeparatorTkns []*token.Token
CloseAttributeTkn *token.Token
}
func (n *AttributeGroup) Accept(v Visitor) {
v.AttributeGroup(n)
}
func (n *AttributeGroup) GetPosition() *position.Position {
return n.Position
}
// ScalarDnumber node
type ScalarDnumber struct {
Position *position.Position
@ -286,6 +340,7 @@ func (n *StmtCatch) GetPosition() *position.Position {
// StmtClass node
type StmtClass struct {
Position *position.Position
AttrGroups []Vertex
Modifiers []Vertex
ClassTkn *token.Token
Name Vertex
@ -314,6 +369,7 @@ func (n *StmtClass) GetPosition() *position.Position {
// StmtClassConstList node
type StmtClassConstList struct {
Position *position.Position
AttrGroups []Vertex
Modifiers []Vertex
ConstTkn *token.Token
Consts []Vertex
@ -332,6 +388,7 @@ func (n *StmtClassConstList) GetPosition() *position.Position {
// StmtClassMethod node
type StmtClassMethod struct {
Position *position.Position
AttrGroups []Vertex
Modifiers []Vertex
FunctionTkn *token.Token
AmpersandTkn *token.Token
@ -601,6 +658,7 @@ func (n *StmtForeach) GetPosition() *position.Position {
// StmtFunction node
type StmtFunction struct {
Position *position.Position
AttrGroups []Vertex
FunctionTkn *token.Token
AmpersandTkn *token.Token
Name Vertex
@ -714,6 +772,7 @@ func (n *StmtInlineHtml) GetPosition() *position.Position {
// StmtInterface node
type StmtInterface struct {
Position *position.Position
AttrGroups []Vertex
InterfaceTkn *token.Token
Name Vertex
ExtendsTkn *token.Token
@ -799,6 +858,7 @@ func (n *StmtProperty) GetPosition() *position.Position {
// StmtPropertyList node
type StmtPropertyList struct {
Position *position.Position
AttrGroups []Vertex
Modifiers []Vertex
Type Vertex
Props []Vertex
@ -922,6 +982,7 @@ func (n *StmtThrow) GetPosition() *position.Position {
// StmtTrait node
type StmtTrait struct {
Position *position.Position
AttrGroups []Vertex
TraitTkn *token.Token
Name Vertex
OpenCurlyBracketTkn *token.Token
@ -1171,6 +1232,7 @@ func (n *ExprArrayItem) GetPosition() *position.Position {
// ExprArrowFunction node
type ExprArrowFunction struct {
Position *position.Position
AttrGroups []Vertex
StaticTkn *token.Token
FnTkn *token.Token
AmpersandTkn *token.Token
@ -1271,6 +1333,7 @@ func (n *ExprClone) GetPosition() *position.Position {
// ExprClosure node
type ExprClosure struct {
Position *position.Position
AttrGroups []Vertex
StaticTkn *token.Token
FunctionTkn *token.Token
AmpersandTkn *token.Token
@ -1515,6 +1578,28 @@ func (n *ExprMethodCall) GetPosition() *position.Position {
return n.Position
}
// ExprNullsafeMethodCall node is $a?->methodName()
type ExprNullsafeMethodCall struct {
Position *position.Position
Var Vertex
ObjectOperatorTkn *token.Token
OpenCurlyBracketTkn *token.Token
Method Vertex
CloseCurlyBracketTkn *token.Token
OpenParenthesisTkn *token.Token
Args []Vertex
SeparatorTkns []*token.Token
CloseParenthesisTkn *token.Token
}
func (n *ExprNullsafeMethodCall) Accept(v Visitor) {
v.ExprNullsafeMethodCall(n)
}
func (n *ExprNullsafeMethodCall) GetPosition() *position.Position {
return n.Position
}
// ExprNew node
type ExprNew struct {
Position *position.Position
@ -1627,6 +1712,24 @@ func (n *ExprPropertyFetch) GetPosition() *position.Position {
return n.Position
}
// ExprNullsafePropertyFetch node
type ExprNullsafePropertyFetch struct {
Position *position.Position
Var Vertex
ObjectOperatorTkn *token.Token
OpenCurlyBracketTkn *token.Token
Prop Vertex
CloseCurlyBracketTkn *token.Token
}
func (n *ExprNullsafePropertyFetch) Accept(v Visitor) {
v.ExprNullsafePropertyFetch(n)
}
func (n *ExprNullsafePropertyFetch) GetPosition() *position.Position {
return n.Position
}
// ExprRequire node
type ExprRequire struct {
Position *position.Position
@ -2586,6 +2689,62 @@ func (n *ExprBinarySpaceship) GetPosition() *position.Position {
return n.Position
}
// ExprMatch node is match(expr) { list<MatchArm> }
type ExprMatch struct {
Position *position.Position
MatchTkn *token.Token
OpenParenthesisTkn *token.Token
Expr Vertex
CloseParenthesisTkn *token.Token
OpenCurlyBracketTkn *token.Token
Arms []Vertex
SeparatorTkns []*token.Token
CloseCurlyBracketTkn *token.Token
}
func (n *ExprMatch) Accept(v Visitor) {
v.ExprMatch(n)
}
func (n *ExprMatch) GetPosition() *position.Position {
return n.Position
}
// ExprThrow node is 'throw Expr'
type ExprThrow struct {
Position *position.Position
ThrowTkn *token.Token
Expr Vertex
SemiColonTkn *token.Token
}
func (n *ExprThrow) Accept(v Visitor) {
v.ExprThrow(n)
}
func (n *ExprThrow) GetPosition() *position.Position {
return n.Position
}
// MatchArm node is [expr, expr1, ...]|default => return_expr
type MatchArm struct {
Position *position.Position
DefaultTkn *token.Token
DefaultCommaTkn *token.Token
Exprs []Vertex
SeparatorTkns []*token.Token
DoubleArrowTkn *token.Token
ReturnExpr Vertex
}
func (n *MatchArm) Accept(v Visitor) {
v.MatchArm(n)
}
func (n *MatchArm) GetPosition() *position.Position {
return n.Position
}
type Name struct {
Position *position.Position
Parts []Vertex

View File

@ -5,6 +5,7 @@ import (
"github.com/z7zmey/php-parser/internal/php5"
"github.com/z7zmey/php-parser/internal/php7"
"github.com/z7zmey/php-parser/internal/php8"
"github.com/z7zmey/php-parser/internal/scanner"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/conf"
@ -20,6 +21,9 @@ var (
php7RangeStart = &version.Version{Major: 7}
php7RangeEnd = &version.Version{Major: 7, Minor: 4}
php8RangeStart = &version.Version{Major: 8}
php8RangeEnd = &version.Version{Major: 8, Minor: 1}
)
// Parser interface
@ -49,5 +53,12 @@ func Parse(src []byte, config conf.Config) (ast.Vertex, error) {
return parser.GetRootNode(), nil
}
if config.Version.InRange(php8RangeStart, php8RangeEnd) {
lexer := php8.NewLexer(src, config)
parser = php8.NewParser(lexer, config)
parser.Parse()
return parser.GetRootNode(), nil
}
return nil, ErrVersionOutOfRange
}

View File

@ -144,6 +144,12 @@ const (
T_IS_NOT_EQUAL
T_IS_SMALLER_OR_EQUAL
T_IS_GREATER_OR_EQUAL
T_NULLSAFE_OBJECT_OPERATOR
T_MATCH
T_ATTRIBUTE
T_NAME_RELATIVE
T_NAME_QUALIFIED
T_NAME_FULLY_QUALIFIED
)
type Token struct {

View File

@ -146,11 +146,17 @@ func _() {
_ = x[T_IS_NOT_EQUAL-57481]
_ = x[T_IS_SMALLER_OR_EQUAL-57482]
_ = x[T_IS_GREATER_OR_EQUAL-57483]
_ = x[T_NULLSAFE_OBJECT_OPERATOR-57484]
_ = x[T_MATCH-57485]
_ = x[T_ATTRIBUTE-57486]
_ = x[T_NAME_RELATIVE-57487]
_ = x[T_NAME_QUALIFIED-57488]
_ = x[T_NAME_FULLY_QUALIFIED-57489]
}
const _ID_name = "T_INCLUDET_INCLUDE_ONCET_EXITT_IFT_LNUMBERT_DNUMBERT_STRINGT_STRING_VARNAMET_VARIABLET_NUM_STRINGT_INLINE_HTMLT_CHARACTERT_BAD_CHARACTERT_ENCAPSED_AND_WHITESPACET_CONSTANT_ENCAPSED_STRINGT_ECHOT_DOT_WHILET_ENDWHILET_FORT_ENDFORT_FOREACHT_ENDFOREACHT_DECLARET_ENDDECLARET_AST_SWITCHT_ENDSWITCHT_CASET_DEFAULTT_BREAKT_CONTINUET_GOTOT_FUNCTIONT_FNT_CONSTT_RETURNT_TRYT_CATCHT_FINALLYT_THROWT_USET_INSTEADOFT_GLOBALT_VART_UNSETT_ISSETT_EMPTYT_HALT_COMPILERT_CLASST_TRAITT_INTERFACET_EXTENDST_IMPLEMENTST_OBJECT_OPERATORT_DOUBLE_ARROWT_LISTT_ARRAYT_CALLABLET_CLASS_CT_TRAIT_CT_METHOD_CT_FUNC_CT_LINET_FILET_COMMENTT_DOC_COMMENTT_OPEN_TAGT_OPEN_TAG_WITH_ECHOT_CLOSE_TAGT_WHITESPACET_START_HEREDOCT_END_HEREDOCT_DOLLAR_OPEN_CURLY_BRACEST_CURLY_OPENT_PAAMAYIM_NEKUDOTAYIMT_NAMESPACET_NS_CT_DIRT_NS_SEPARATORT_ELLIPSIST_EVALT_REQUIRET_REQUIRE_ONCET_LOGICAL_ORT_LOGICAL_XORT_LOGICAL_ANDT_INSTANCEOFT_NEWT_CLONET_ELSEIFT_ELSET_ENDIFT_PRINTT_YIELDT_STATICT_ABSTRACTT_FINALT_PRIVATET_PROTECTEDT_PUBLICT_INCT_DECT_YIELD_FROMT_INT_CASTT_DOUBLE_CASTT_STRING_CASTT_ARRAY_CASTT_OBJECT_CASTT_BOOL_CASTT_UNSET_CASTT_COALESCET_SPACESHIPT_NOELSET_PLUS_EQUALT_MINUS_EQUALT_MUL_EQUALT_POW_EQUALT_DIV_EQUALT_CONCAT_EQUALT_MOD_EQUALT_AND_EQUALT_OR_EQUALT_XOR_EQUALT_SL_EQUALT_SR_EQUALT_COALESCE_EQUALT_BOOLEAN_ORT_BOOLEAN_ANDT_POWT_SLT_SRT_IS_IDENTICALT_IS_NOT_IDENTICALT_IS_EQUALT_IS_NOT_EQUALT_IS_SMALLER_OR_EQUALT_IS_GREATER_OR_EQUAL"
const _ID_name = "T_INCLUDET_INCLUDE_ONCET_EXITT_IFT_LNUMBERT_DNUMBERT_STRINGT_STRING_VARNAMET_VARIABLET_NUM_STRINGT_INLINE_HTMLT_CHARACTERT_BAD_CHARACTERT_ENCAPSED_AND_WHITESPACET_CONSTANT_ENCAPSED_STRINGT_ECHOT_DOT_WHILET_ENDWHILET_FORT_ENDFORT_FOREACHT_ENDFOREACHT_DECLARET_ENDDECLARET_AST_SWITCHT_ENDSWITCHT_CASET_DEFAULTT_BREAKT_CONTINUET_GOTOT_FUNCTIONT_FNT_CONSTT_RETURNT_TRYT_CATCHT_FINALLYT_THROWT_USET_INSTEADOFT_GLOBALT_VART_UNSETT_ISSETT_EMPTYT_HALT_COMPILERT_CLASST_TRAITT_INTERFACET_EXTENDST_IMPLEMENTST_OBJECT_OPERATORT_DOUBLE_ARROWT_LISTT_ARRAYT_CALLABLET_CLASS_CT_TRAIT_CT_METHOD_CT_FUNC_CT_LINET_FILET_COMMENTT_DOC_COMMENTT_OPEN_TAGT_OPEN_TAG_WITH_ECHOT_CLOSE_TAGT_WHITESPACET_START_HEREDOCT_END_HEREDOCT_DOLLAR_OPEN_CURLY_BRACEST_CURLY_OPENT_PAAMAYIM_NEKUDOTAYIMT_NAMESPACET_NS_CT_DIRT_NS_SEPARATORT_ELLIPSIST_EVALT_REQUIRET_REQUIRE_ONCET_LOGICAL_ORT_LOGICAL_XORT_LOGICAL_ANDT_INSTANCEOFT_NEWT_CLONET_ELSEIFT_ELSET_ENDIFT_PRINTT_YIELDT_STATICT_ABSTRACTT_FINALT_PRIVATET_PROTECTEDT_PUBLICT_INCT_DECT_YIELD_FROMT_INT_CASTT_DOUBLE_CASTT_STRING_CASTT_ARRAY_CASTT_OBJECT_CASTT_BOOL_CASTT_UNSET_CASTT_COALESCET_SPACESHIPT_NOELSET_PLUS_EQUALT_MINUS_EQUALT_MUL_EQUALT_POW_EQUALT_DIV_EQUALT_CONCAT_EQUALT_MOD_EQUALT_AND_EQUALT_OR_EQUALT_XOR_EQUALT_SL_EQUALT_SR_EQUALT_COALESCE_EQUALT_BOOLEAN_ORT_BOOLEAN_ANDT_POWT_SLT_SRT_IS_IDENTICALT_IS_NOT_IDENTICALT_IS_EQUALT_IS_NOT_EQUALT_IS_SMALLER_OR_EQUALT_IS_GREATER_OR_EQUALT_NULLSAFE_OBJECT_OPERATORT_MATCHT_ATTRIBUTET_NAME_RELATIVET_NAME_QUALIFIEDT_NAME_FULLY_QUALIFIED"
var _ID_index = [...]uint16{0, 9, 23, 29, 33, 42, 51, 59, 75, 85, 97, 110, 121, 136, 161, 187, 193, 197, 204, 214, 219, 227, 236, 248, 257, 269, 273, 281, 292, 298, 307, 314, 324, 330, 340, 344, 351, 359, 364, 371, 380, 387, 392, 403, 411, 416, 423, 430, 437, 452, 459, 466, 477, 486, 498, 515, 529, 535, 542, 552, 561, 570, 580, 588, 594, 600, 609, 622, 632, 652, 663, 675, 690, 703, 729, 741, 763, 774, 780, 785, 799, 809, 815, 824, 838, 850, 863, 876, 888, 893, 900, 908, 914, 921, 928, 935, 943, 953, 960, 969, 980, 988, 993, 998, 1010, 1020, 1033, 1046, 1058, 1071, 1082, 1094, 1104, 1115, 1123, 1135, 1148, 1159, 1170, 1181, 1195, 1206, 1217, 1227, 1238, 1248, 1258, 1274, 1286, 1299, 1304, 1308, 1312, 1326, 1344, 1354, 1368, 1389, 1410}
var _ID_index = [...]uint16{0, 9, 23, 29, 33, 42, 51, 59, 75, 85, 97, 110, 121, 136, 161, 187, 193, 197, 204, 214, 219, 227, 236, 248, 257, 269, 273, 281, 292, 298, 307, 314, 324, 330, 340, 344, 351, 359, 364, 371, 380, 387, 392, 403, 411, 416, 423, 430, 437, 452, 459, 466, 477, 486, 498, 515, 529, 535, 542, 552, 561, 570, 580, 588, 594, 600, 609, 622, 632, 652, 663, 675, 690, 703, 729, 741, 763, 774, 780, 785, 799, 809, 815, 824, 838, 850, 863, 876, 888, 893, 900, 908, 914, 921, 928, 935, 943, 953, 960, 969, 980, 988, 993, 998, 1010, 1020, 1033, 1046, 1058, 1071, 1082, 1094, 1104, 1115, 1123, 1135, 1148, 1159, 1170, 1181, 1195, 1206, 1217, 1227, 1238, 1248, 1258, 1274, 1286, 1299, 1304, 1308, 1312, 1326, 1344, 1354, 1368, 1389, 1410, 1436, 1443, 1454, 1469, 1485, 1507}
func (i ID) String() string {
i -= 57346

View File

@ -22,6 +22,9 @@ var (
php7RangeStart = &Version{Major: 7}
php7RangeEnd = &Version{Major: 7, Minor: 4}
php8RangeStart = &Version{Major: 8}
php8RangeEnd = &Version{Major: 8, Minor: 1}
)
func New(v string) (*Version, error) {
@ -48,7 +51,9 @@ func New(v string) (*Version, error) {
}
func (v *Version) Validate() error {
if !v.InRange(php5RangeStart, php5RangeEnd) && !v.InRange(php7RangeStart, php7RangeEnd) {
if !v.InRange(php5RangeStart, php5RangeEnd) &&
!v.InRange(php7RangeStart, php7RangeEnd) &&
!v.InRange(php8RangeStart, php8RangeEnd) {
return ErrUnsupportedVer
}
@ -75,7 +80,7 @@ func (v *Version) GreaterOrEqual(o *Version) bool {
return v.Compare(o) >= 0
}
// GreaterOrEqual tests if one version is greater than another one or equal
// InRange tests if version is in range in another one
func (v *Version) InRange(s, e *Version) bool {
return v.Compare(s) >= 0 && v.Compare(e) <= 0
}

View File

@ -1,9 +1,10 @@
package version_test
import (
"gotest.tools/assert"
"testing"
"gotest.tools/assert"
"github.com/z7zmey/php-parser/pkg/version"
)
@ -46,3 +47,19 @@ func TestInRange(t *testing.T) {
assert.NilError(t, err)
assert.Assert(t, ver.InRange(s, e))
}
func TestInRangePHP8(t *testing.T) {
s, err := version.New("8.0")
assert.NilError(t, err)
e, err := version.New("8.1")
assert.NilError(t, err)
ver, err := version.New("8.0")
assert.NilError(t, err)
assert.Assert(t, ver.InRange(s, e))
ver, err = version.New("8.1")
assert.NilError(t, err)
assert.Assert(t, ver.InRange(s, e))
}

View File

@ -1,12 +1,13 @@
package dumper
import (
"github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/pkg/token"
"io"
"strconv"
"strings"
"github.com/z7zmey/php-parser/pkg/position"
"github.com/z7zmey/php-parser/pkg/token"
"github.com/z7zmey/php-parser/pkg/ast"
)
@ -192,6 +193,8 @@ func (v *Dumper) Parameter(n *ast.Parameter) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpVertex("Visibility", n.Visibility)
v.dumpVertex("Type", n.Type)
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
v.dumpToken("VariadicTkn", n.VariadicTkn)
@ -221,6 +224,8 @@ func (v *Dumper) Argument(n *ast.Argument) {
v.dumpPosition(n.Position)
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
v.dumpVertex("Name", n.Name)
v.dumpToken("ColonTkn", n.ColonTkn)
v.dumpToken("VariadicTkn", n.VariadicTkn)
v.dumpVertex("Expr", n.Expr)
@ -228,6 +233,63 @@ func (v *Dumper) Argument(n *ast.Argument) {
v.print(v.indent, "},\n")
}
func (v *Dumper) MatchArm(n *ast.MatchArm) {
v.print(0, "&ast.MatchArm{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpToken("DefaultTkn", n.DefaultTkn)
v.dumpToken("DefaultCommaTkn", n.DefaultCommaTkn)
v.dumpVertexList("Exprs", n.Exprs)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("DoubleArrowTkn", n.DoubleArrowTkn)
v.dumpVertex("ReturnExpr", n.ReturnExpr)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) Union(n *ast.Union) {
v.print(0, "&ast.Union{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("Types", n.Types)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) Attribute(n *ast.Attribute) {
v.print(0, "&ast.Attribute{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpVertex("Name", n.Name)
v.dumpToken("OpenParenthesisTkn", n.OpenParenthesisTkn)
v.dumpVertexList("Args", n.Args)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) AttributeGroup(n *ast.AttributeGroup) {
v.print(0, "&ast.AttributeGroup{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpToken("OpenAttributeTkn", n.OpenAttributeTkn)
v.dumpVertexList("Attrs", n.Attrs)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseAttributeTkn", n.CloseAttributeTkn)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) StmtBreak(n *ast.StmtBreak) {
v.print(0, "&ast.StmtBreak{\n")
v.indent++
@ -279,6 +341,7 @@ func (v *Dumper) StmtClass(n *ast.StmtClass) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpVertexList("Modifiers", n.Modifiers)
v.dumpToken("ClassTkn", n.ClassTkn)
v.dumpVertex("Name", n.Name)
@ -304,6 +367,7 @@ func (v *Dumper) StmtClassConstList(n *ast.StmtClassConstList) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpVertexList("Modifiers", n.Modifiers)
v.dumpToken("ConstTkn", n.ConstTkn)
v.dumpVertexList("Consts", n.Consts)
@ -319,6 +383,7 @@ func (v *Dumper) StmtClassMethod(n *ast.StmtClassMethod) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpVertexList("Modifiers", n.Modifiers)
v.dumpToken("FunctionTkn", n.FunctionTkn)
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
@ -547,6 +612,7 @@ func (v *Dumper) StmtFunction(n *ast.StmtFunction) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpToken("FunctionTkn", n.FunctionTkn)
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
v.dumpVertex("Name", n.Name)
@ -642,6 +708,7 @@ func (v *Dumper) StmtInterface(n *ast.StmtInterface) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpToken("InterfaceTkn", n.InterfaceTkn)
v.dumpVertex("Name", n.Name)
v.dumpToken("ExtendsTkn", n.ExtendsTkn)
@ -712,6 +779,7 @@ func (v *Dumper) StmtPropertyList(n *ast.StmtPropertyList) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpVertexList("Modifiers", n.Modifiers)
v.dumpVertex("Type", n.Type)
v.dumpVertexList("Props", n.Props)
@ -814,6 +882,7 @@ func (v *Dumper) StmtTrait(n *ast.StmtTrait) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpToken("TraitTkn", n.TraitTkn)
v.dumpVertex("Name", n.Name)
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
@ -1024,6 +1093,7 @@ func (v *Dumper) ExprArrowFunction(n *ast.ExprArrowFunction) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpToken("StaticTkn", n.StaticTkn)
v.dumpToken("FnTkn", n.FnTkn)
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
@ -1107,6 +1177,7 @@ func (v *Dumper) ExprClosure(n *ast.ExprClosure) {
v.indent++
v.dumpPosition(n.Position)
v.dumpVertexList("AttrGroups", n.AttrGroups)
v.dumpToken("StaticTkn", n.StaticTkn)
v.dumpToken("FunctionTkn", n.FunctionTkn)
v.dumpToken("AmpersandTkn", n.AmpersandTkn)
@ -1307,6 +1378,25 @@ func (v *Dumper) ExprMethodCall(n *ast.ExprMethodCall) {
v.print(v.indent, "},\n")
}
func (v *Dumper) ExprNullsafeMethodCall(n *ast.ExprNullsafeMethodCall) {
v.print(0, "&ast.ExprNullsafeMethodCall{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpVertex("Var", n.Var)
v.dumpToken("ObjectOperatorTkn", n.ObjectOperatorTkn)
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
v.dumpVertex("Method", n.Method)
v.dumpToken("CloseCurlyBracketTkn", n.CloseCurlyBracketTkn)
v.dumpToken("OpenParenthesisTkn", n.OpenParenthesisTkn)
v.dumpVertexList("Args", n.Args)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) ExprNew(n *ast.ExprNew) {
v.print(0, "&ast.ExprNew{\n")
v.indent++
@ -1398,6 +1488,21 @@ func (v *Dumper) ExprPropertyFetch(n *ast.ExprPropertyFetch) {
v.print(v.indent, "},\n")
}
func (v *Dumper) ExprNullsafePropertyFetch(n *ast.ExprNullsafePropertyFetch) {
v.print(0, "&ast.ExprNullsafePropertyFetch{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpVertex("Var", n.Var)
v.dumpToken("ObjectOperatorTkn", n.ObjectOperatorTkn)
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
v.dumpVertex("Prop", n.Prop)
v.dumpToken("CloseCurlyBracketTkn", n.CloseCurlyBracketTkn)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) ExprRequire(n *ast.ExprRequire) {
v.print(0, "&ast.ExprRequire{\n")
v.indent++
@ -2177,6 +2282,37 @@ func (v *Dumper) ExprCastUnset(n *ast.ExprCastUnset) {
v.print(v.indent, "},\n")
}
func (v *Dumper) ExprMatch(n *ast.ExprMatch) {
v.print(0, "&ast.ExprMatch{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpToken("MatchTkn", n.MatchTkn)
v.dumpToken("OpenParenthesisTkn", n.OpenParenthesisTkn)
v.dumpVertex("Expr", n.Expr)
v.dumpToken("CloseParenthesisTkn", n.CloseParenthesisTkn)
v.dumpToken("OpenCurlyBracketTkn", n.OpenCurlyBracketTkn)
v.dumpVertexList("Arms", n.Arms)
v.dumpTokenList("SeparatorTkns", n.SeparatorTkns)
v.dumpToken("CloseCurlyBracketTkn", n.CloseCurlyBracketTkn)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) ExprThrow(n *ast.ExprThrow) {
v.print(0, "&ast.ExprThrow{\n")
v.indent++
v.dumpPosition(n.Position)
v.dumpToken("ThrowTkn", n.ThrowTkn)
v.dumpVertex("Expr", n.Expr)
v.dumpToken("SemiColonTkn", n.SemiColonTkn)
v.indent--
v.print(v.indent, "},\n")
}
func (v *Dumper) ScalarDnumber(n *ast.ScalarDnumber) {
v.print(0, "&ast.ScalarDnumber{\n")
v.indent++

View File

@ -2,6 +2,7 @@ package formatter
import (
"bytes"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/token"
)
@ -153,6 +154,18 @@ func (f *formatter) Nullable(n *ast.Nullable) {
}
func (f *formatter) Parameter(n *ast.Parameter) {
if n.AttrGroups != nil {
for _, group := range n.AttrGroups {
group.Accept(f)
}
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
}
if n.Visibility != nil {
n.Visibility.Accept(f)
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
}
if n.Type != nil {
n.Type.Accept(f)
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
@ -185,6 +198,14 @@ func (f *formatter) Identifier(n *ast.Identifier) {
}
func (f *formatter) Argument(n *ast.Argument) {
if n.Name != nil {
n.Name.Accept(f)
}
if n.ColonTkn != nil {
n.ColonTkn = f.newToken(':', []byte(":"))
}
if n.VariadicTkn != nil {
n.VariadicTkn = f.newToken(token.T_ELLIPSIS, []byte("..."))
}
@ -196,6 +217,45 @@ func (f *formatter) Argument(n *ast.Argument) {
n.Expr.Accept(f)
}
func (f *formatter) MatchArm(n *ast.MatchArm) {
if n.DefaultTkn != nil {
n.DefaultTkn = f.newToken(token.T_DEFAULT, []byte("default"))
}
if n.DefaultCommaTkn != nil {
n.DefaultCommaTkn = f.newToken(',', []byte(","))
}
n.SeparatorTkns = nil
if len(n.Exprs) > 0 {
n.SeparatorTkns = f.formatList(n.Exprs, ',')
}
n.DoubleArrowTkn = f.newToken(token.T_DOUBLE_ARROW, []byte("=>"))
}
func (f *formatter) Union(n *ast.Union) {
if len(n.Types) > 0 {
n.SeparatorTkns = f.formatList(n.Types, '|')
}
}
func (f *formatter) Attribute(n *ast.Attribute) {
n.Name.Accept(f)
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
n.SeparatorTkns = nil
if len(n.Args) > 0 {
n.SeparatorTkns = f.formatList(n.Args, ',')
}
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
}
func (f *formatter) AttributeGroup(n *ast.AttributeGroup) {
n.OpenAttributeTkn = f.newToken(token.T_ATTRIBUTE, []byte("#["))
n.SeparatorTkns = nil
if len(n.Attrs) > 0 {
n.SeparatorTkns = f.formatList(n.Attrs, ',')
}
n.CloseAttributeTkn = f.newToken(']', []byte("]"))
}
func (f *formatter) StmtBreak(n *ast.StmtBreak) {
n.BreakTkn = f.newToken(token.T_BREAK, []byte("break"))
@ -307,6 +367,10 @@ func (f *formatter) StmtClass(n *ast.StmtClass) {
}
func (f *formatter) StmtClassConstList(n *ast.StmtClassConstList) {
for _, m := range n.AttrGroups {
m.Accept(f)
f.addFreeFloating(token.T_WHITESPACE, []byte("\n"))
}
for _, m := range n.Modifiers {
m.Accept(f)
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
@ -736,6 +800,10 @@ func (f *formatter) StmtProperty(n *ast.StmtProperty) {
}
func (f *formatter) StmtPropertyList(n *ast.StmtPropertyList) {
for _, m := range n.AttrGroups {
m.Accept(f)
f.addFreeFloating(token.T_WHITESPACE, []byte("\n"))
}
for _, m := range n.Modifiers {
m.Accept(f)
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
@ -1061,6 +1129,13 @@ func (f *formatter) ExprArrayItem(n *ast.ExprArrayItem) {
}
func (f *formatter) ExprArrowFunction(n *ast.ExprArrowFunction) {
if n.AttrGroups != nil {
for _, group := range n.AttrGroups {
group.Accept(f)
}
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
}
if n.StaticTkn != nil {
n.StaticTkn = f.newToken(token.T_STATIC, []byte("static"))
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
@ -1123,6 +1198,13 @@ func (f *formatter) ExprClone(n *ast.ExprClone) {
}
func (f *formatter) ExprClosure(n *ast.ExprClosure) {
if n.AttrGroups != nil {
for _, group := range n.AttrGroups {
group.Accept(f)
}
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
}
if n.StaticTkn != nil {
n.StaticTkn = f.newToken(token.T_STATIC, []byte("static"))
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
@ -1287,6 +1369,30 @@ func (f *formatter) ExprMethodCall(n *ast.ExprMethodCall) {
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
}
func (f *formatter) ExprNullsafeMethodCall(n *ast.ExprNullsafeMethodCall) {
n.Var.Accept(f)
n.ObjectOperatorTkn = f.newToken(token.T_NULLSAFE_OBJECT_OPERATOR, []byte("?->"))
n.OpenCurlyBracketTkn = nil
n.CloseCurlyBracketTkn = nil
switch n.Method.(type) {
case *ast.Identifier:
case *ast.ExprVariable:
default:
n.OpenCurlyBracketTkn = f.newToken('{', []byte("{"))
n.CloseCurlyBracketTkn = f.newToken('}', []byte("}"))
}
n.Method.Accept(f)
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
n.SeparatorTkns = nil
if len(n.Args) > 0 {
n.SeparatorTkns = f.formatList(n.Args, ',')
}
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
}
func (f *formatter) ExprNew(n *ast.ExprNew) {
n.NewTkn = f.newToken(token.T_NEW, []byte("new"))
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
@ -1347,6 +1453,23 @@ func (f *formatter) ExprPropertyFetch(n *ast.ExprPropertyFetch) {
n.Prop.Accept(f)
}
func (f *formatter) ExprNullsafePropertyFetch(n *ast.ExprNullsafePropertyFetch) {
n.Var.Accept(f)
n.ObjectOperatorTkn = f.newToken(token.T_NULLSAFE_OBJECT_OPERATOR, []byte("?->"))
n.OpenCurlyBracketTkn = nil
n.CloseCurlyBracketTkn = nil
switch n.Prop.(type) {
case *ast.Identifier:
case *ast.ExprVariable:
default:
n.OpenCurlyBracketTkn = f.newToken('{', []byte("{"))
n.CloseCurlyBracketTkn = f.newToken('}', []byte("}"))
}
n.Prop.Accept(f)
}
func (f *formatter) ExprRequire(n *ast.ExprRequire) {
n.RequireTkn = f.newToken(token.T_REQUIRE, []byte("require"))
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
@ -1916,6 +2039,27 @@ func (f *formatter) ExprCastUnset(n *ast.ExprCastUnset) {
n.Expr.Accept(f)
}
func (f *formatter) ExprMatch(n *ast.ExprMatch) {
n.MatchTkn = f.newToken(token.T_MATCH, []byte("match"))
n.OpenParenthesisTkn = f.newToken('(', []byte("("))
n.CloseParenthesisTkn = f.newToken(')', []byte(")"))
n.OpenCurlyBracketTkn = f.newToken('}', []byte("}"))
n.SeparatorTkns = nil
if len(n.Arms) > 0 {
n.SeparatorTkns = f.formatList(n.Arms, ',')
}
n.CloseCurlyBracketTkn = f.newToken('{', []byte("{"))
}
func (f *formatter) ExprThrow(n *ast.ExprThrow) {
n.ThrowTkn = f.newToken(token.T_THROW, []byte("throw"))
f.addFreeFloating(token.T_WHITESPACE, []byte(" "))
n.Expr.Accept(f)
n.SemiColonTkn = f.newSemicolonTkn()
}
func (f *formatter) ScalarDnumber(n *ast.ScalarDnumber) {
if n.NumberTkn == nil {
n.NumberTkn = f.newToken(token.T_STRING, n.Value)

View File

@ -3,9 +3,10 @@ package nsresolver
import (
"errors"
"strings"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/visitor"
"strings"
)
// NamespaceResolver visitor

View File

@ -42,6 +42,22 @@ func (v *Null) Argument(_ *ast.Argument) {
// do nothing
}
func (v *Null) MatchArm(_ *ast.MatchArm) {
// do nothing
}
func (v *Null) Union(_ *ast.Union) {
// do nothing
}
func (v *Null) Attribute(_ *ast.Attribute) {
// do nothing
}
func (v *Null) AttributeGroup(_ *ast.AttributeGroup) {
// do nothing
}
func (v *Null) StmtBreak(_ *ast.StmtBreak) {
// do nothing
}
@ -322,6 +338,10 @@ func (v *Null) ExprMethodCall(_ *ast.ExprMethodCall) {
// do nothing
}
func (v *Null) ExprNullsafeMethodCall(_ *ast.ExprNullsafeMethodCall) {
// do nothing
}
func (v *Null) ExprNew(_ *ast.ExprNew) {
// do nothing
}
@ -350,6 +370,10 @@ func (v *Null) ExprPropertyFetch(_ *ast.ExprPropertyFetch) {
// do nothing
}
func (v *Null) ExprNullsafePropertyFetch(_ *ast.ExprNullsafePropertyFetch) {
// do nothing
}
func (v *Null) ExprRequire(_ *ast.ExprRequire) {
// do nothing
}
@ -590,6 +614,14 @@ func (v *Null) ExprCastUnset(_ *ast.ExprCastUnset) {
// do nothing
}
func (v *Null) ExprMatch(_ *ast.ExprMatch) {
// do nothing
}
func (v *Null) ExprThrow(_ *ast.ExprThrow) {
// do nothing
}
func (v *Null) ScalarDnumber(_ *ast.ScalarDnumber) {
// do nothing
}

View File

@ -2,9 +2,10 @@ package printer
import (
"bytes"
"io"
"github.com/z7zmey/php-parser/pkg/ast"
"github.com/z7zmey/php-parser/pkg/token"
"io"
)
type printerState int
@ -145,6 +146,8 @@ func (p *printer) Nullable(n *ast.Nullable) {
}
func (p *printer) Parameter(n *ast.Parameter) {
p.printList(n.AttrGroups)
p.printNode(n.Visibility)
p.printNode(n.Type)
p.printToken(n.AmpersandTkn, nil)
p.printToken(n.VariadicTkn, nil)
@ -158,11 +161,38 @@ func (p *printer) Identifier(n *ast.Identifier) {
}
func (p *printer) Argument(n *ast.Argument) {
p.printNode(n.Name)
p.printToken(n.ColonTkn, nil)
p.printToken(n.VariadicTkn, nil)
p.printToken(n.AmpersandTkn, nil)
p.printNode(n.Expr)
}
func (p *printer) MatchArm(n *ast.MatchArm) {
p.printToken(n.DefaultTkn, nil)
p.printToken(n.DefaultCommaTkn, nil)
p.printSeparatedList(n.Exprs, n.SeparatorTkns, []byte(","))
p.printToken(n.DoubleArrowTkn, []byte("=>"))
p.printNode(n.ReturnExpr)
}
func (p *printer) Union(n *ast.Union) {
p.printSeparatedList(n.Types, n.SeparatorTkns, []byte("|"))
}
func (p *printer) Attribute(n *ast.Attribute) {
p.printNode(n.Name)
p.printToken(n.OpenParenthesisTkn, p.ifNodeList(n.Args, []byte("(")))
p.printSeparatedList(n.Args, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseParenthesisTkn, p.ifNodeList(n.Args, []byte(")")))
}
func (p *printer) AttributeGroup(n *ast.AttributeGroup) {
p.printToken(n.OpenAttributeTkn, []byte("#["))
p.printSeparatedList(n.Attrs, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseAttributeTkn, []byte("]"))
}
func (p *printer) StmtBreak(n *ast.StmtBreak) {
p.printToken(n.BreakTkn, []byte("break"))
p.printNode(n.Expr)
@ -188,6 +218,7 @@ func (p *printer) StmtCatch(n *ast.StmtCatch) {
}
func (p *printer) StmtClass(n *ast.StmtClass) {
p.printList(n.AttrGroups)
p.printList(n.Modifiers)
p.printToken(n.ClassTkn, []byte("class"))
p.printNode(n.Name)
@ -204,6 +235,7 @@ func (p *printer) StmtClass(n *ast.StmtClass) {
}
func (p *printer) StmtClassConstList(n *ast.StmtClassConstList) {
p.printList(n.AttrGroups)
p.printList(n.Modifiers)
p.printToken(n.ConstTkn, []byte("const"))
p.printSeparatedList(n.Consts, n.SeparatorTkns, []byte(","))
@ -211,6 +243,7 @@ func (p *printer) StmtClassConstList(n *ast.StmtClassConstList) {
}
func (p *printer) StmtClassMethod(n *ast.StmtClassMethod) {
p.printList(n.AttrGroups)
p.printList(n.Modifiers)
p.printToken(n.FunctionTkn, []byte("function"))
p.printToken(n.AmpersandTkn, nil)
@ -364,6 +397,7 @@ func (p *printer) StmtForeach(n *ast.StmtForeach) {
}
func (p *printer) StmtFunction(n *ast.StmtFunction) {
p.printList(n.AttrGroups)
p.printToken(n.FunctionTkn, []byte("function"))
p.printToken(n.AmpersandTkn, nil)
p.printNode(n.Name)
@ -426,6 +460,7 @@ func (p *printer) StmtInlineHtml(n *ast.StmtInlineHtml) {
}
func (p *printer) StmtInterface(n *ast.StmtInterface) {
p.printList(n.AttrGroups)
p.printToken(n.InterfaceTkn, []byte("interface"))
p.printNode(n.Name)
p.printToken(n.ExtendsTkn, p.ifNodeList(n.Extends, []byte("extends")))
@ -460,6 +495,7 @@ func (p *printer) StmtProperty(n *ast.StmtProperty) {
}
func (p *printer) StmtPropertyList(n *ast.StmtPropertyList) {
p.printList(n.AttrGroups)
p.printList(n.Modifiers)
p.printNode(n.Type)
p.printSeparatedList(n.Props, n.SeparatorTkns, []byte(","))
@ -511,6 +547,7 @@ func (p *printer) StmtThrow(n *ast.StmtThrow) {
}
func (p *printer) StmtTrait(n *ast.StmtTrait) {
p.printList(n.AttrGroups)
p.printToken(n.TraitTkn, []byte("trait"))
p.printNode(n.Name)
p.printToken(n.OpenCurlyBracketTkn, []byte("{"))
@ -630,6 +667,7 @@ func (p *printer) ExprArrayItem(n *ast.ExprArrayItem) {
}
func (p *printer) ExprArrowFunction(n *ast.ExprArrowFunction) {
p.printList(n.AttrGroups)
p.printToken(n.StaticTkn, nil)
p.printToken(n.FnTkn, []byte("fn"))
p.printToken(n.AmpersandTkn, nil)
@ -670,6 +708,7 @@ func (p *printer) ExprClone(n *ast.ExprClone) {
}
func (p *printer) ExprClosure(n *ast.ExprClosure) {
p.printList(n.AttrGroups)
p.printToken(n.StaticTkn, nil)
p.printToken(n.FunctionTkn, []byte("function"))
p.printToken(n.AmpersandTkn, nil)
@ -770,6 +809,17 @@ func (p *printer) ExprMethodCall(n *ast.ExprMethodCall) {
p.printToken(n.CloseParenthesisTkn, []byte(")"))
}
func (p *printer) ExprNullsafeMethodCall(n *ast.ExprNullsafeMethodCall) {
p.printNode(n.Var)
p.printToken(n.ObjectOperatorTkn, []byte("?->"))
p.printToken(n.OpenCurlyBracketTkn, nil)
p.printNode(n.Method)
p.printToken(n.CloseCurlyBracketTkn, nil)
p.printToken(n.OpenParenthesisTkn, []byte("("))
p.printSeparatedList(n.Args, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseParenthesisTkn, []byte(")"))
}
func (p *printer) ExprNew(n *ast.ExprNew) {
p.printToken(n.NewTkn, []byte("new"))
p.printNode(n.Class)
@ -811,6 +861,14 @@ func (p *printer) ExprPropertyFetch(n *ast.ExprPropertyFetch) {
p.printToken(n.CloseCurlyBracketTkn, nil)
}
func (p *printer) ExprNullsafePropertyFetch(n *ast.ExprNullsafePropertyFetch) {
p.printNode(n.Var)
p.printToken(n.ObjectOperatorTkn, []byte("?->"))
p.printToken(n.OpenCurlyBracketTkn, nil)
p.printNode(n.Prop)
p.printToken(n.CloseCurlyBracketTkn, nil)
}
func (p *printer) ExprRequire(n *ast.ExprRequire) {
p.printToken(n.RequireTkn, []byte("require"))
p.printNode(n.Expr)
@ -1169,6 +1227,21 @@ func (p *printer) ExprCastUnset(n *ast.ExprCastUnset) {
p.printNode(n.Expr)
}
func (p *printer) ExprMatch(n *ast.ExprMatch) {
p.printToken(n.MatchTkn, []byte("match"))
p.printToken(n.OpenParenthesisTkn, []byte("("))
p.printNode(n.Expr)
p.printToken(n.CloseParenthesisTkn, []byte(")"))
p.printToken(n.OpenCurlyBracketTkn, []byte("{"))
p.printSeparatedList(n.Arms, n.SeparatorTkns, []byte(","))
p.printToken(n.CloseCurlyBracketTkn, []byte("}"))
}
func (p *printer) ExprThrow(n *ast.ExprThrow) {
p.printToken(n.ThrowTkn, []byte("throw"))
p.printNode(n.Expr)
}
func (p *printer) ScalarDnumber(n *ast.ScalarDnumber) {
p.printToken(n.NumberTkn, n.Value)
}

File diff suppressed because it is too large Load Diff

View File

@ -37,6 +37,11 @@ func (t *Traverser) Nullable(n *ast.Nullable) {
func (t *Traverser) Parameter(n *ast.Parameter) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
t.Traverse(n.Visibility)
t.Traverse(n.Type)
t.Traverse(n.Var)
t.Traverse(n.DefaultValue)
@ -49,9 +54,44 @@ func (t *Traverser) Identifier(n *ast.Identifier) {
func (t *Traverser) Argument(n *ast.Argument) {
n.Accept(t.v)
t.Traverse(n.Name)
t.Traverse(n.Expr)
}
func (t *Traverser) MatchArm(n *ast.MatchArm) {
n.Accept(t.v)
for _, nn := range n.Exprs {
nn.Accept(t)
}
t.Traverse(n.ReturnExpr)
}
func (t *Traverser) Union(n *ast.Union) {
n.Accept(t.v)
for _, nn := range n.Types {
nn.Accept(t)
}
}
func (t *Traverser) Attribute(n *ast.Attribute) {
n.Accept(t.v)
t.Traverse(n.Name)
for _, nn := range n.Args {
nn.Accept(t)
}
}
func (t *Traverser) AttributeGroup(n *ast.AttributeGroup) {
n.Accept(t.v)
for _, nn := range n.Attrs {
nn.Accept(t)
}
}
func (t *Traverser) StmtBreak(n *ast.StmtBreak) {
n.Accept(t.v)
@ -82,6 +122,9 @@ func (t *Traverser) StmtCatch(n *ast.StmtCatch) {
func (t *Traverser) StmtClass(n *ast.StmtClass) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
for _, nn := range n.Modifiers {
nn.Accept(t)
}
@ -101,6 +144,9 @@ func (t *Traverser) StmtClass(n *ast.StmtClass) {
func (t *Traverser) StmtClassConstList(n *ast.StmtClassConstList) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
for _, nn := range n.Modifiers {
nn.Accept(t)
}
@ -112,6 +158,9 @@ func (t *Traverser) StmtClassConstList(n *ast.StmtClassConstList) {
func (t *Traverser) StmtClassMethod(n *ast.StmtClassMethod) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
for _, nn := range n.Modifiers {
nn.Accept(t)
}
@ -230,6 +279,9 @@ func (t *Traverser) StmtForeach(n *ast.StmtForeach) {
func (t *Traverser) StmtFunction(n *ast.StmtFunction) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
t.Traverse(n.Name)
for _, nn := range n.Params {
nn.Accept(t)
@ -276,6 +328,9 @@ func (t *Traverser) StmtInlineHtml(n *ast.StmtInlineHtml) {
func (t *Traverser) StmtInterface(n *ast.StmtInterface) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
t.Traverse(n.Name)
for _, nn := range n.Extends {
nn.Accept(t)
@ -314,6 +369,9 @@ func (t *Traverser) StmtProperty(n *ast.StmtProperty) {
func (t *Traverser) StmtPropertyList(n *ast.StmtPropertyList) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
for _, nn := range n.Modifiers {
nn.Accept(t)
}
@ -370,6 +428,9 @@ func (t *Traverser) StmtThrow(n *ast.StmtThrow) {
func (t *Traverser) StmtTrait(n *ast.StmtTrait) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
t.Traverse(n.Name)
for _, nn := range n.Stmts {
nn.Accept(t)
@ -485,6 +546,9 @@ func (t *Traverser) ExprArrayItem(n *ast.ExprArrayItem) {
func (t *Traverser) ExprArrowFunction(n *ast.ExprArrowFunction) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
for _, nn := range n.Params {
nn.Accept(t)
}
@ -526,6 +590,9 @@ func (t *Traverser) ExprClone(n *ast.ExprClone) {
func (t *Traverser) ExprClosure(n *ast.ExprClosure) {
n.Accept(t.v)
for _, nn := range n.AttrGroups {
nn.Accept(t)
}
for _, nn := range n.Params {
nn.Accept(t)
}
@ -628,6 +695,16 @@ func (t *Traverser) ExprMethodCall(n *ast.ExprMethodCall) {
}
}
func (t *Traverser) ExprNullsafeMethodCall(n *ast.ExprNullsafeMethodCall) {
n.Accept(t.v)
t.Traverse(n.Var)
t.Traverse(n.Method)
for _, nn := range n.Args {
nn.Accept(t)
}
}
func (t *Traverser) ExprNew(n *ast.ExprNew) {
n.Accept(t.v)
@ -674,6 +751,13 @@ func (t *Traverser) ExprPropertyFetch(n *ast.ExprPropertyFetch) {
t.Traverse(n.Prop)
}
func (t *Traverser) ExprNullsafePropertyFetch(n *ast.ExprNullsafePropertyFetch) {
n.Accept(t.v)
t.Traverse(n.Var)
t.Traverse(n.Prop)
}
func (t *Traverser) ExprRequire(n *ast.ExprRequire) {
n.Accept(t.v)
@ -1086,6 +1170,21 @@ func (t *Traverser) ExprCastUnset(n *ast.ExprCastUnset) {
t.Traverse(n.Expr)
}
func (t *Traverser) ExprMatch(n *ast.ExprMatch) {
n.Accept(t.v)
t.Traverse(n.Expr)
for _, nn := range n.Arms {
nn.Accept(t)
}
}
func (t *Traverser) ExprThrow(n *ast.ExprThrow) {
n.Accept(t.v)
t.Traverse(n.Expr)
}
func (t *Traverser) ScalarDnumber(n *ast.ScalarDnumber) {
n.Accept(t.v)
}