merge token package into scanner package

This commit is contained in:
z7zmey
2018-04-15 14:47:40 +03:00
parent 983c721e83
commit e65ace8984
14 changed files with 56 additions and 62 deletions

View File

@@ -10,7 +10,6 @@ import (
"github.com/cznic/golex/lex"
"github.com/z7zmey/php-parser/comment"
t "github.com/z7zmey/php-parser/token"
)
// Allocate Character classes anywhere in [0x80, 0xFF].
@@ -431,7 +430,7 @@ const T_POW = 57481
// Lval parsers yySymType must implement this interface
type Lval interface {
Token(tkn t.Token)
Token(tkn Token)
}
// Lexer php lexer
@@ -509,7 +508,7 @@ func (l *Lexer) getCurrentState() int {
return l.StateStack[len(l.StateStack)-1]
}
func (l *Lexer) newToken(chars []lex.Char) t.Token {
func (l *Lexer) newToken(chars []lex.Char) Token {
firstChar := chars[0]
lastChar := chars[len(chars)-1]
@@ -518,7 +517,7 @@ func (l *Lexer) newToken(chars []lex.Char) t.Token {
startPos := int(firstChar.Pos())
endPos := int(lastChar.Pos())
return t.NewToken(l.charsToBytes(chars), startLine, endLine, startPos, endPos).SetComments(l.Comments)
return NewToken(l.charsToBytes(chars), startLine, endLine, startPos, endPos).SetComments(l.Comments)
}
func (l *Lexer) addComment(c comment.Comment) {

View File

@@ -8,7 +8,6 @@ import (
"github.com/z7zmey/php-parser/comment"
"github.com/z7zmey/php-parser/scanner"
"github.com/z7zmey/php-parser/token"
"github.com/kylelemons/godebug/pretty"
)
@@ -27,10 +26,10 @@ func assertEqual(t *testing.T, expected interface{}, actual interface{}) {
}
type lval struct {
Tkn token.Token
Tkn scanner.Token
}
func (lv *lval) Token(t token.Token) {
func (lv *lval) Token(t scanner.Token) {
lv.Tkn = t
}

36
scanner/token.go Normal file
View File

@@ -0,0 +1,36 @@
package scanner
import (
"github.com/z7zmey/php-parser/comment"
)
// Token value returned by lexer
type Token struct {
Value string
StartLine int
EndLine int
StartPos int
EndPos int
comments []comment.Comment
}
// NewToken Token constructor
// TODO: return pointer
func NewToken(value []byte, startLine int, endLine int, startPos int, endPos int) Token {
return Token{string(value), startLine, endLine, startPos, endPos, nil}
}
func (t Token) String() string {
return string(t.Value)
}
// Comments returns attached comments
func (t Token) Comments() []comment.Comment {
return t.comments
}
// SetComments attach comments
func (t Token) SetComments(comments []comment.Comment) Token {
t.comments = comments
return t
}

32
scanner/token_test.go Normal file
View File

@@ -0,0 +1,32 @@
package scanner_test
import (
"reflect"
"testing"
"github.com/z7zmey/php-parser/comment"
"github.com/z7zmey/php-parser/scanner"
)
func TestToken(t *testing.T) {
tkn := scanner.NewToken([]byte(`foo`), 1, 1, 0, 3)
c := []comment.Comment{
comment.NewPlainComment("test comment"),
}
tkn.SetComments(c)
if reflect.DeepEqual(tkn.Comments(), c) {
t.Errorf("comments are not equal\n")
}
if tkn.String() != `foo` {
t.Errorf("token value is not equal\n")
}
if tkn.StartLine != 1 || tkn.EndLine != 1 || tkn.StartPos != 0 || tkn.EndPos != 3 {
t.Errorf("token position is not equal\n")
}
}