From 60c95c5de8ac2d0c53287477c523c8258311cee6 Mon Sep 17 00:00:00 2001 From: Daniel Kang Date: Wed, 9 Jan 2019 08:58:18 -0800 Subject: [PATCH] token.CHAR -> token.Char, token.STRING -> token.String --- parser/parser.go | 12 ++++++------ scanner/scanner.go | 6 +++--- scanner/scanner_test.go | 18 +++++++++--------- token/tokens.go | 8 ++++---- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/parser/parser.go b/parser/parser.go index 5ba16c9..eb4a50a 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -285,7 +285,7 @@ func (p *Parser) parseOperand() ast.Expr { p.next() return x - case token.CHAR: + case token.Char: v, _ := utf8.DecodeRuneInString(p.tokenLit) x := &ast.CharLit{ Value: v, @@ -295,7 +295,7 @@ func (p *Parser) parseOperand() ast.Expr { p.next() return x - case token.STRING: + case token.String: v, _ := strconv.Unquote(p.tokenLit) x := &ast.StringLit{ Value: v, @@ -485,10 +485,10 @@ func (p *Parser) parseStmt() (stmt ast.Stmt) { } switch p.token { - case // simple statements - token.Func, token.Ident, token.Int, token.Float, token.CHAR, token.STRING, token.True, token.False, token.LParen, // operands - token.LBrace, token.LBrack, // composite types - token.Add, token.Sub, token.Mul, token.And, token.Xor, token.Not: // unary operators + case // simple statements + token.Func, token.Ident, token.Int, token.Float, token.Char, token.String, token.True, token.False, token.LParen, // operands + token.LBrace, token.LBrack, // composite types + token.Add, token.Sub, token.Mul, token.And, token.Xor, token.Not: // unary operators s := p.parseSimpleStmt(false) p.expectSemi() return s diff --git a/scanner/scanner.go b/scanner/scanner.go index f9af4f5..ea5b3b4 100644 --- a/scanner/scanner.go +++ b/scanner/scanner.go @@ -84,15 +84,15 @@ func (s *Scanner) Scan() (tok token.Token, literal string, pos Pos) { return token.Semicolon, "\n", pos case '"': insertSemi = true - tok = token.STRING + tok = token.String literal = s.scanString() case '\'': insertSemi = true - tok = token.CHAR + tok = token.Char literal = s.scanRune() case '`': insertSemi = true - tok = token.STRING + tok = token.String literal = s.scanRawString() case ':': tok = s.switch2(token.Colon, token.Define) diff --git a/scanner/scanner_test.go b/scanner/scanner_test.go index b1fe281..5d4e040 100644 --- a/scanner/scanner_test.go +++ b/scanner/scanner_test.go @@ -50,18 +50,18 @@ func TestScanner_Scan(t *testing.T) { {token.Float, "1e+100"}, {token.Float, "1e-100"}, {token.Float, "2.71828e-1000"}, - {token.CHAR, "'a'"}, - {token.CHAR, "'\\000'"}, - {token.CHAR, "'\\xFF'"}, - {token.CHAR, "'\\uff16'"}, - {token.CHAR, "'\\U0000ff16'"}, - {token.STRING, "`foobar`"}, - {token.STRING, "`" + `foo + {token.Char, "'a'"}, + {token.Char, "'\\000'"}, + {token.Char, "'\\xFF'"}, + {token.Char, "'\\uff16'"}, + {token.Char, "'\\U0000ff16'"}, + {token.String, "`foobar`"}, + {token.String, "`" + `foo bar` + "`", }, - {token.STRING, "`\r`"}, - {token.STRING, "`foo\r\nbar`"}, + {token.String, "`\r`"}, + {token.String, "`foo\r\nbar`"}, {token.Add, "+"}, {token.Sub, "-"}, {token.Mul, "*"}, diff --git a/token/tokens.go b/token/tokens.go index ddb8e1e..2fd549f 100644 --- a/token/tokens.go +++ b/token/tokens.go @@ -12,8 +12,8 @@ const ( Ident Int Float - CHAR - STRING + Char + String _literalEnd _operatorBeg Add // + @@ -88,8 +88,8 @@ var tokens = [...]string{ Ident: "IDENT", Int: "INT", Float: "FLOAT", - CHAR: "CHAR", - STRING: "STRING", + Char: "CHAR", + String: "STRING", Add: "+", Sub: "-", Mul: "*",