#21 scanner.Lexer.charsToBytes optimization

This commit is contained in:
z7zmey 2018-06-04 21:21:04 +03:00
parent b4f51d58af
commit 788628d902

View File

@ -441,6 +441,7 @@ type Lexer struct {
PhpDocComment string PhpDocComment string
Comments []comment.Comment Comments []comment.Comment
heredocLabel []lex.Char heredocLabel []lex.Char
tokenBytesBuf *bytes.Buffer
} }
// Rune2Class returns the rune integer id // Rune2Class returns the rune integer id
@ -468,7 +469,7 @@ func NewLexer(src io.Reader, fName string) *Lexer {
if err != nil { if err != nil {
panic(err) panic(err)
} }
return &Lexer{lx, []int{0}, "", nil, nil} return &Lexer{lx, []int{0}, "", nil, nil, &bytes.Buffer{}}
} }
func (l *Lexer) ungetChars(n int) []lex.Char { func (l *Lexer) ungetChars(n int) []lex.Char {
@ -527,11 +528,13 @@ func (l *Lexer) addComment(c comment.Comment) {
} }
func (l *Lexer) charsToBytes(chars []lex.Char) []byte { func (l *Lexer) charsToBytes(chars []lex.Char) []byte {
bytesBuf := bytes.Buffer{}
for _, c := range chars { for _, c := range chars {
bytesBuf.WriteRune(c.Rune) l.tokenBytesBuf.WriteRune(c.Rune)
} }
return bytesBuf.Bytes() r := l.tokenBytesBuf.Bytes()
l.tokenBytesBuf.Reset()
return r
} }