merge DocComment and PlainComment
This commit is contained in:
parent
e65ace8984
commit
c2f938e55c
@ -1,16 +1,17 @@
|
|||||||
package comment
|
package comment
|
||||||
|
|
||||||
import "github.com/z7zmey/php-parser/node"
|
// Comment aggrigates information about comment /**
|
||||||
|
type Comment struct {
|
||||||
// Comment represents comment lines in the code
|
value string
|
||||||
type Comment interface {
|
|
||||||
String() string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Comments a collection of comment groups assigned to nodes
|
// NewComment - Comment constructor
|
||||||
type Comments map[node.Node][]Comment
|
func NewComment(value string) *Comment {
|
||||||
|
return &Comment{
|
||||||
// AddComments add comment group to the collection
|
value,
|
||||||
func (c Comments) AddComments(node node.Node, comments []Comment) {
|
}
|
||||||
c[node] = append(c[node], comments...)
|
}
|
||||||
|
|
||||||
|
func (c *Comment) String() string {
|
||||||
|
return c.value
|
||||||
}
|
}
|
||||||
|
@ -10,9 +10,9 @@ import (
|
|||||||
func TestComments(t *testing.T) {
|
func TestComments(t *testing.T) {
|
||||||
n := node.NewIdentifier("test")
|
n := node.NewIdentifier("test")
|
||||||
|
|
||||||
commentGroup := []comment.Comment{
|
commentGroup := []*comment.Comment{
|
||||||
comment.NewDocComment("/** hello world */"),
|
comment.NewComment("/** hello world */"),
|
||||||
comment.NewPlainComment("// hello world"),
|
comment.NewComment("// hello world"),
|
||||||
}
|
}
|
||||||
|
|
||||||
comments := comment.Comments{}
|
comments := comment.Comments{}
|
||||||
|
11
comment/comments.go
Normal file
11
comment/comments.go
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
package comment
|
||||||
|
|
||||||
|
import "github.com/z7zmey/php-parser/node"
|
||||||
|
|
||||||
|
// Comments a collection of comment groups assigned to nodes
|
||||||
|
type Comments map[node.Node][]*Comment
|
||||||
|
|
||||||
|
// AddComments add comment group to the collection
|
||||||
|
func (c Comments) AddComments(node node.Node, comments []*Comment) {
|
||||||
|
c[node] = append(c[node], comments...)
|
||||||
|
}
|
@ -1,17 +0,0 @@
|
|||||||
package comment
|
|
||||||
|
|
||||||
// DocComment represents comments that start /**
|
|
||||||
type DocComment struct {
|
|
||||||
value string
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDocComment - DocComment constructor
|
|
||||||
func NewDocComment(value string) *DocComment {
|
|
||||||
return &DocComment{
|
|
||||||
value,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *DocComment) String() string {
|
|
||||||
return c.value
|
|
||||||
}
|
|
@ -1,17 +0,0 @@
|
|||||||
package comment
|
|
||||||
|
|
||||||
// PlainComment represents comments that dont start /**
|
|
||||||
type PlainComment struct {
|
|
||||||
value string
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewPlainComment - PlainComment constructor
|
|
||||||
func NewPlainComment(value string) *PlainComment {
|
|
||||||
return &PlainComment{
|
|
||||||
value,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *PlainComment) String() string {
|
|
||||||
return c.value
|
|
||||||
}
|
|
@ -72,7 +72,7 @@ func (l *Parser) Parse() int {
|
|||||||
return yyParse(l)
|
return yyParse(l)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *Parser) listGetFirstNodeComments(list []node.Node) []comment.Comment {
|
func (l *Parser) listGetFirstNodeComments(list []node.Node) []*comment.Comment {
|
||||||
if len(list) == 0 {
|
if len(list) == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ func (l *Parser) Parse() int {
|
|||||||
return yyParse(l)
|
return yyParse(l)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *Parser) listGetFirstNodeComments(list []node.Node) []comment.Comment {
|
func (l *Parser) listGetFirstNodeComments(list []node.Node) []*comment.Comment {
|
||||||
if len(list) == 0 {
|
if len(list) == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -438,7 +438,7 @@ type Lexer struct {
|
|||||||
*lex.Lexer
|
*lex.Lexer
|
||||||
StateStack []int
|
StateStack []int
|
||||||
PhpDocComment string
|
PhpDocComment string
|
||||||
Comments []comment.Comment
|
Comments []*comment.Comment
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rune2Class returns the rune integer id
|
// Rune2Class returns the rune integer id
|
||||||
@ -520,7 +520,8 @@ func (l *Lexer) newToken(chars []lex.Char) Token {
|
|||||||
return NewToken(l.charsToBytes(chars), startLine, endLine, startPos, endPos).SetComments(l.Comments)
|
return NewToken(l.charsToBytes(chars), startLine, endLine, startPos, endPos).SetComments(l.Comments)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *Lexer) addComment(c comment.Comment) {
|
func (l *Lexer) addComment(chars []lex.Char) {
|
||||||
|
c := comment.NewComment(string(l.charsToBytes(chars)))
|
||||||
l.Comments = append(l.Comments, c)
|
l.Comments = append(l.Comments, c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,7 +12,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/cznic/golex/lex"
|
"github.com/cznic/golex/lex"
|
||||||
"github.com/z7zmey/php-parser/comment"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -8311,15 +8310,12 @@ yyrule125: // \?\?
|
|||||||
yyrule126: // (#|[/][/])
|
yyrule126: // (#|[/][/])
|
||||||
{
|
{
|
||||||
|
|
||||||
tb := []rune{}
|
tb := l.Token()
|
||||||
for _, chr := range l.Token() {
|
|
||||||
tb = append(tb, chr.Rune)
|
|
||||||
}
|
|
||||||
for {
|
for {
|
||||||
if c == -1 {
|
if c == -1 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
tb = append(tb, rune(c))
|
tb = append(tb, l.Last)
|
||||||
switch c {
|
switch c {
|
||||||
case '\r':
|
case '\r':
|
||||||
c = l.Next()
|
c = l.Next()
|
||||||
@ -8342,7 +8338,7 @@ yyrule126: // (#|[/][/])
|
|||||||
}
|
}
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
l.addComment(comment.NewPlainComment(string(tb)))
|
l.addComment(tb)
|
||||||
goto yystate0
|
goto yystate0
|
||||||
}
|
}
|
||||||
yyrule127: // ([/][*])|([/][*][*])
|
yyrule127: // ([/][*])|([/][*][*])
|
||||||
@ -8367,10 +8363,10 @@ yyrule127: // ([/][*])|([/][*][*])
|
|||||||
lval.Token(l.newToken(l.Token()))
|
lval.Token(l.newToken(l.Token()))
|
||||||
if is_doc_comment {
|
if is_doc_comment {
|
||||||
l.PhpDocComment = string(l.TokenBytes(nil))
|
l.PhpDocComment = string(l.TokenBytes(nil))
|
||||||
l.addComment(comment.NewDocComment(string(l.TokenBytes(nil))))
|
l.addComment(l.Token())
|
||||||
// return T_DOC_COMMENT
|
// return T_DOC_COMMENT
|
||||||
} else {
|
} else {
|
||||||
l.addComment(comment.NewPlainComment(string(l.TokenBytes(nil))))
|
l.addComment(l.Token())
|
||||||
// return T_COMMENT
|
// return T_COMMENT
|
||||||
}
|
}
|
||||||
goto yystate0
|
goto yystate0
|
||||||
|
@ -11,7 +11,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"bytes"
|
"bytes"
|
||||||
"github.com/cznic/golex/lex"
|
"github.com/cznic/golex/lex"
|
||||||
"github.com/z7zmey/php-parser/comment"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -253,18 +252,14 @@ NEW_LINE (\r|\n|\r\n)
|
|||||||
<PHP>\>\> lval.Token(l.newToken(l.Token())); return T_SR
|
<PHP>\>\> lval.Token(l.newToken(l.Token())); return T_SR
|
||||||
<PHP>\?\? lval.Token(l.newToken(l.Token())); return T_COALESCE
|
<PHP>\?\? lval.Token(l.newToken(l.Token())); return T_COALESCE
|
||||||
<PHP>(#|[/][/])
|
<PHP>(#|[/][/])
|
||||||
tb := []rune{}
|
tb := l.Token()
|
||||||
|
|
||||||
for _, chr := range(l.Token()) {
|
|
||||||
tb = append(tb, chr.Rune)
|
|
||||||
}
|
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if c == -1 {
|
if c == -1 {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
tb = append(tb, rune(c))
|
tb = append(tb, l.Last)
|
||||||
|
|
||||||
switch c {
|
switch c {
|
||||||
case '\r':
|
case '\r':
|
||||||
@ -292,7 +287,7 @@ NEW_LINE (\r|\n|\r\n)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
l.addComment(comment.NewPlainComment(string(tb)))
|
l.addComment(tb)
|
||||||
|
|
||||||
<PHP>([/][*])|([/][*][*])
|
<PHP>([/][*])|([/][*][*])
|
||||||
tb := l.Token()
|
tb := l.Token()
|
||||||
@ -318,10 +313,10 @@ NEW_LINE (\r|\n|\r\n)
|
|||||||
lval.Token(l.newToken(l.Token()))
|
lval.Token(l.newToken(l.Token()))
|
||||||
if is_doc_comment {
|
if is_doc_comment {
|
||||||
l.PhpDocComment = string(l.TokenBytes(nil))
|
l.PhpDocComment = string(l.TokenBytes(nil))
|
||||||
l.addComment(comment.NewDocComment(string(l.TokenBytes(nil))))
|
l.addComment(l.Token())
|
||||||
// return T_DOC_COMMENT
|
// return T_DOC_COMMENT
|
||||||
} else {
|
} else {
|
||||||
l.addComment(comment.NewPlainComment(string(l.TokenBytes(nil))))
|
l.addComment(l.Token())
|
||||||
// return T_COMMENT
|
// return T_COMMENT
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -872,8 +872,8 @@ func TestSlashAfterVariable(t *testing.T) {
|
|||||||
func TestCommentEnd(t *testing.T) {
|
func TestCommentEnd(t *testing.T) {
|
||||||
src := `<?php //test`
|
src := `<?php //test`
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewPlainComment("//test"),
|
comment.NewComment("//test"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
@ -889,8 +889,8 @@ func TestCommentEnd(t *testing.T) {
|
|||||||
func TestCommentNewLine(t *testing.T) {
|
func TestCommentNewLine(t *testing.T) {
|
||||||
src := "<?php //test\n$a"
|
src := "<?php //test\n$a"
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewPlainComment("//test\n"),
|
comment.NewComment("//test\n"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
@ -906,8 +906,8 @@ func TestCommentNewLine(t *testing.T) {
|
|||||||
func TestCommentNewLine1(t *testing.T) {
|
func TestCommentNewLine1(t *testing.T) {
|
||||||
src := "<?php //test\r$a"
|
src := "<?php //test\r$a"
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewPlainComment("//test\r"),
|
comment.NewComment("//test\r"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
@ -923,8 +923,8 @@ func TestCommentNewLine1(t *testing.T) {
|
|||||||
func TestCommentNewLine2(t *testing.T) {
|
func TestCommentNewLine2(t *testing.T) {
|
||||||
src := "<?php #test\r\n$a"
|
src := "<?php #test\r\n$a"
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewPlainComment("#test\r\n"),
|
comment.NewComment("#test\r\n"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
@ -941,8 +941,8 @@ func TestCommentWithPhpEndTag(t *testing.T) {
|
|||||||
src := `<?php
|
src := `<?php
|
||||||
//test?> test`
|
//test?> test`
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewPlainComment("//test"),
|
comment.NewComment("//test"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
@ -959,8 +959,8 @@ func TestInlineComment(t *testing.T) {
|
|||||||
src := `<?php
|
src := `<?php
|
||||||
/*test*/`
|
/*test*/`
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewPlainComment("/*test*/"),
|
comment.NewComment("/*test*/"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
@ -977,8 +977,8 @@ func TestEmptyInlineComment(t *testing.T) {
|
|||||||
src := `<?php
|
src := `<?php
|
||||||
/**/`
|
/**/`
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewDocComment("/**/"),
|
comment.NewComment("/**/"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
@ -995,8 +995,8 @@ func TestEmptyInlineComment2(t *testing.T) {
|
|||||||
src := `<?php
|
src := `<?php
|
||||||
/***/`
|
/***/`
|
||||||
|
|
||||||
expected := []comment.Comment{
|
expected := []*comment.Comment{
|
||||||
comment.NewDocComment("/***/"),
|
comment.NewComment("/***/"),
|
||||||
}
|
}
|
||||||
|
|
||||||
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
lexer := scanner.NewLexer(bytes.NewBufferString(src), "test.php")
|
||||||
|
@ -11,7 +11,7 @@ type Token struct {
|
|||||||
EndLine int
|
EndLine int
|
||||||
StartPos int
|
StartPos int
|
||||||
EndPos int
|
EndPos int
|
||||||
comments []comment.Comment
|
comments []*comment.Comment
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewToken Token constructor
|
// NewToken Token constructor
|
||||||
@ -25,12 +25,12 @@ func (t Token) String() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Comments returns attached comments
|
// Comments returns attached comments
|
||||||
func (t Token) Comments() []comment.Comment {
|
func (t Token) Comments() []*comment.Comment {
|
||||||
return t.comments
|
return t.comments
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetComments attach comments
|
// SetComments attach comments
|
||||||
func (t Token) SetComments(comments []comment.Comment) Token {
|
func (t Token) SetComments(comments []*comment.Comment) Token {
|
||||||
t.comments = comments
|
t.comments = comments
|
||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
@ -12,8 +12,8 @@ import (
|
|||||||
func TestToken(t *testing.T) {
|
func TestToken(t *testing.T) {
|
||||||
tkn := scanner.NewToken([]byte(`foo`), 1, 1, 0, 3)
|
tkn := scanner.NewToken([]byte(`foo`), 1, 1, 0, 3)
|
||||||
|
|
||||||
c := []comment.Comment{
|
c := []*comment.Comment{
|
||||||
comment.NewPlainComment("test comment"),
|
comment.NewComment("test comment"),
|
||||||
}
|
}
|
||||||
|
|
||||||
tkn.SetComments(c)
|
tkn.SetComments(c)
|
||||||
|
Loading…
Reference in New Issue
Block a user