Use a string builder instead of appending to a slice

This commit is contained in:
Kovid Goyal 2022-11-10 20:41:35 +05:30
parent 87b4800fdf
commit eae4899df4
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C

View File

@ -202,11 +202,11 @@ var ErrUnclosedSingleQuote error = errors.New("EOF found when expecting closing
func (t *Tokenizer) scanStream() (*Token, error) { func (t *Tokenizer) scanStream() (*Token, error) {
state := startState state := startState
var tokenType TokenType var tokenType TokenType
var value []rune
var nextRune rune var nextRune rune
var nextRuneType runeTokenClass var nextRuneType runeTokenClass
var err error var err error
var sz int var sz int
value := strings.Builder{}
unread_rune := func() { unread_rune := func() {
t.redo_rune.sz = sz t.redo_rune.sz = sz
@ -244,7 +244,7 @@ func (t *Tokenizer) scanStream() (*Token, error) {
case spaceRuneClass: case spaceRuneClass:
{ {
tokenType = SpaceToken tokenType = SpaceToken
value = append(value, nextRune) value.WriteRune(nextRune)
state = inSpaceState state = inSpaceState
} }
case escapingQuoteRuneClass: case escapingQuoteRuneClass:
@ -265,7 +265,7 @@ func (t *Tokenizer) scanStream() (*Token, error) {
default: default:
{ {
tokenType = WordToken tokenType = WordToken
value = append(value, nextRune) value.WriteRune(nextRune)
state = inWordState state = inWordState
} }
} }
@ -275,13 +275,13 @@ func (t *Tokenizer) scanStream() (*Token, error) {
switch nextRuneType { switch nextRuneType {
case spaceRuneClass: case spaceRuneClass:
{ {
value = append(value, nextRune) value.WriteRune(nextRune)
} }
default: default:
{ {
token := &Token{ token := &Token{
tokenType: tokenType, tokenType: tokenType,
value: string(value)} value: value.String()}
unread_rune() unread_rune()
return token, err return token, err
} }
@ -294,14 +294,14 @@ func (t *Tokenizer) scanStream() (*Token, error) {
{ {
token := &Token{ token := &Token{
tokenType: tokenType, tokenType: tokenType,
value: string(value)} value: value.String()}
return token, err return token, err
} }
case spaceRuneClass: case spaceRuneClass:
{ {
token := &Token{ token := &Token{
tokenType: tokenType, tokenType: tokenType,
value: string(value)} value: value.String()}
unread_rune() unread_rune()
return token, err return token, err
} }
@ -319,7 +319,7 @@ func (t *Tokenizer) scanStream() (*Token, error) {
} }
default: default:
{ {
value = append(value, nextRune) value.WriteRune(nextRune)
} }
} }
} }
@ -331,13 +331,13 @@ func (t *Tokenizer) scanStream() (*Token, error) {
err = ErrTrailingEscape err = ErrTrailingEscape
token := &Token{ token := &Token{
tokenType: tokenType, tokenType: tokenType,
value: string(value)} value: value.String()}
return token, err return token, err
} }
default: default:
{ {
state = inWordState state = inWordState
value = append(value, nextRune) value.WriteRune(nextRune)
} }
} }
} }
@ -349,13 +349,13 @@ func (t *Tokenizer) scanStream() (*Token, error) {
err = ErrTrailingQuoteEscape err = ErrTrailingQuoteEscape
token := &Token{ token := &Token{
tokenType: tokenType, tokenType: tokenType,
value: string(value)} value: value.String()}
return token, err return token, err
} }
default: default:
{ {
state = quotingEscapingState state = quotingEscapingState
value = append(value, nextRune) value.WriteRune(nextRune)
} }
} }
} }
@ -367,7 +367,7 @@ func (t *Tokenizer) scanStream() (*Token, error) {
err = ErrUnclosedDoubleQuote err = ErrUnclosedDoubleQuote
token := &Token{ token := &Token{
tokenType: tokenType, tokenType: tokenType,
value: string(value)} value: value.String()}
return token, err return token, err
} }
case escapingQuoteRuneClass: case escapingQuoteRuneClass:
@ -380,7 +380,7 @@ func (t *Tokenizer) scanStream() (*Token, error) {
} }
default: default:
{ {
value = append(value, nextRune) value.WriteRune(nextRune)
} }
} }
} }
@ -392,7 +392,7 @@ func (t *Tokenizer) scanStream() (*Token, error) {
err = ErrUnclosedSingleQuote err = ErrUnclosedSingleQuote
token := &Token{ token := &Token{
tokenType: tokenType, tokenType: tokenType,
value: string(value)} value: value.String()}
return token, err return token, err
} }
case nonEscapingQuoteRuneClass: case nonEscapingQuoteRuneClass:
@ -401,7 +401,7 @@ func (t *Tokenizer) scanStream() (*Token, error) {
} }
default: default:
{ {
value = append(value, nextRune) value.WriteRune(nextRune)
} }
} }
} }