token.CHAR -> token.Char, token.STRING -> token.String

This commit is contained in:
Daniel Kang 2019-01-09 08:58:18 -08:00
parent 2c3282da21
commit 60c95c5de8
4 changed files with 22 additions and 22 deletions

View file

@ -285,7 +285,7 @@ func (p *Parser) parseOperand() ast.Expr {
p.next() p.next()
return x return x
case token.CHAR: case token.Char:
v, _ := utf8.DecodeRuneInString(p.tokenLit) v, _ := utf8.DecodeRuneInString(p.tokenLit)
x := &ast.CharLit{ x := &ast.CharLit{
Value: v, Value: v,
@ -295,7 +295,7 @@ func (p *Parser) parseOperand() ast.Expr {
p.next() p.next()
return x return x
case token.STRING: case token.String:
v, _ := strconv.Unquote(p.tokenLit) v, _ := strconv.Unquote(p.tokenLit)
x := &ast.StringLit{ x := &ast.StringLit{
Value: v, Value: v,
@ -485,10 +485,10 @@ func (p *Parser) parseStmt() (stmt ast.Stmt) {
} }
switch p.token { switch p.token {
case // simple statements case // simple statements
token.Func, token.Ident, token.Int, token.Float, token.CHAR, token.STRING, token.True, token.False, token.LParen, // operands token.Func, token.Ident, token.Int, token.Float, token.Char, token.String, token.True, token.False, token.LParen, // operands
token.LBrace, token.LBrack, // composite types token.LBrace, token.LBrack, // composite types
token.Add, token.Sub, token.Mul, token.And, token.Xor, token.Not: // unary operators token.Add, token.Sub, token.Mul, token.And, token.Xor, token.Not: // unary operators
s := p.parseSimpleStmt(false) s := p.parseSimpleStmt(false)
p.expectSemi() p.expectSemi()
return s return s

View file

@ -84,15 +84,15 @@ func (s *Scanner) Scan() (tok token.Token, literal string, pos Pos) {
return token.Semicolon, "\n", pos return token.Semicolon, "\n", pos
case '"': case '"':
insertSemi = true insertSemi = true
tok = token.STRING tok = token.String
literal = s.scanString() literal = s.scanString()
case '\'': case '\'':
insertSemi = true insertSemi = true
tok = token.CHAR tok = token.Char
literal = s.scanRune() literal = s.scanRune()
case '`': case '`':
insertSemi = true insertSemi = true
tok = token.STRING tok = token.String
literal = s.scanRawString() literal = s.scanRawString()
case ':': case ':':
tok = s.switch2(token.Colon, token.Define) tok = s.switch2(token.Colon, token.Define)

View file

@ -50,18 +50,18 @@ func TestScanner_Scan(t *testing.T) {
{token.Float, "1e+100"}, {token.Float, "1e+100"},
{token.Float, "1e-100"}, {token.Float, "1e-100"},
{token.Float, "2.71828e-1000"}, {token.Float, "2.71828e-1000"},
{token.CHAR, "'a'"}, {token.Char, "'a'"},
{token.CHAR, "'\\000'"}, {token.Char, "'\\000'"},
{token.CHAR, "'\\xFF'"}, {token.Char, "'\\xFF'"},
{token.CHAR, "'\\uff16'"}, {token.Char, "'\\uff16'"},
{token.CHAR, "'\\U0000ff16'"}, {token.Char, "'\\U0000ff16'"},
{token.STRING, "`foobar`"}, {token.String, "`foobar`"},
{token.STRING, "`" + `foo {token.String, "`" + `foo
bar` + bar` +
"`", "`",
}, },
{token.STRING, "`\r`"}, {token.String, "`\r`"},
{token.STRING, "`foo\r\nbar`"}, {token.String, "`foo\r\nbar`"},
{token.Add, "+"}, {token.Add, "+"},
{token.Sub, "-"}, {token.Sub, "-"},
{token.Mul, "*"}, {token.Mul, "*"},

View file

@ -12,8 +12,8 @@ const (
Ident Ident
Int Int
Float Float
CHAR Char
STRING String
_literalEnd _literalEnd
_operatorBeg _operatorBeg
Add // + Add // +
@ -88,8 +88,8 @@ var tokens = [...]string{
Ident: "IDENT", Ident: "IDENT",
Int: "INT", Int: "INT",
Float: "FLOAT", Float: "FLOAT",
CHAR: "CHAR", Char: "CHAR",
STRING: "STRING", String: "STRING",
Add: "+", Add: "+",
Sub: "-", Sub: "-",
Mul: "*", Mul: "*",