...

Source file src/pkg/cmd/asm/internal/lex/lex.go

     1	// Copyright 2015 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	// Package lex implements lexical analysis for the assembler.
     6	package lex
     7	
     8	import (
     9		"fmt"
    10		"log"
    11		"os"
    12		"strings"
    13		"text/scanner"
    14	
    15		"cmd/internal/src"
    16	)
    17	
    18	// A ScanToken represents an input item. It is a simple wrapping of rune, as
    19	// returned by text/scanner.Scanner, plus a couple of extra values.
    20	type ScanToken rune
    21	
    22	const (
    23		// Asm defines some two-character lexemes. We make up
    24		// a rune/ScanToken value for them - ugly but simple.
    25		LSH       ScanToken = -1000 - iota // << Left shift.
    26		RSH                                // >> Logical right shift.
    27		ARR                                // -> Used on ARM for shift type 3, arithmetic right shift.
    28		ROT                                // @> Used on ARM for shift type 4, rotate right.
    29		macroName                          // name of macro that should not be expanded
    30	)
    31	
    32	// IsRegisterShift reports whether the token is one of the ARM register shift operators.
    33	func IsRegisterShift(r ScanToken) bool {
    34		return ROT <= r && r <= LSH // Order looks backwards because these are negative.
    35	}
    36	
    37	func (t ScanToken) String() string {
    38		switch t {
    39		case scanner.EOF:
    40			return "EOF"
    41		case scanner.Ident:
    42			return "identifier"
    43		case scanner.Int:
    44			return "integer constant"
    45		case scanner.Float:
    46			return "float constant"
    47		case scanner.Char:
    48			return "rune constant"
    49		case scanner.String:
    50			return "string constant"
    51		case scanner.RawString:
    52			return "raw string constant"
    53		case scanner.Comment:
    54			return "comment"
    55		default:
    56			return fmt.Sprintf("%q", rune(t))
    57		}
    58	}
    59	
    60	// NewLexer returns a lexer for the named file and the given link context.
    61	func NewLexer(name string) TokenReader {
    62		input := NewInput(name)
    63		fd, err := os.Open(name)
    64		if err != nil {
    65			log.Fatalf("%s\n", err)
    66		}
    67		input.Push(NewTokenizer(name, fd, fd))
    68		return input
    69	}
    70	
    71	// The other files in this directory each contain an implementation of TokenReader.
    72	
    73	// A TokenReader is like a reader, but returns lex tokens of type Token. It also can tell you what
    74	// the text of the most recently returned token is, and where it was found.
    75	// The underlying scanner elides all spaces except newline, so the input looks like a stream of
    76	// Tokens; original spacing is lost but we don't need it.
    77	type TokenReader interface {
    78		// Next returns the next token.
    79		Next() ScanToken
    80		// The following methods all refer to the most recent token returned by Next.
    81		// Text returns the original string representation of the token.
    82		Text() string
    83		// File reports the source file name of the token.
    84		File() string
    85		// Base reports the position base of the token.
    86		Base() *src.PosBase
    87		// SetBase sets the position base.
    88		SetBase(*src.PosBase)
    89		// Line reports the source line number of the token.
    90		Line() int
    91		// Col reports the source column number of the token.
    92		Col() int
    93		// Close does any teardown required.
    94		Close()
    95	}
    96	
    97	// A Token is a scan token plus its string value.
    98	// A macro is stored as a sequence of Tokens with spaces stripped.
    99	type Token struct {
   100		ScanToken
   101		text string
   102	}
   103	
   104	// Make returns a Token with the given rune (ScanToken) and text representation.
   105	func Make(token ScanToken, text string) Token {
   106		// If the symbol starts with center dot, as in ·x, rewrite it as ""·x
   107		if token == scanner.Ident && strings.HasPrefix(text, "\u00B7") {
   108			text = `""` + text
   109		}
   110		// Substitute the substitutes for . and /.
   111		text = strings.Replace(text, "\u00B7", ".", -1)
   112		text = strings.Replace(text, "\u2215", "/", -1)
   113		return Token{ScanToken: token, text: text}
   114	}
   115	
   116	func (l Token) String() string {
   117		return l.text
   118	}
   119	
   120	// A Macro represents the definition of a #defined macro.
   121	type Macro struct {
   122		name   string   // The #define name.
   123		args   []string // Formal arguments.
   124		tokens []Token  // Body of macro.
   125	}
   126	
   127	// Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests.
   128	func Tokenize(str string) []Token {
   129		t := NewTokenizer("command line", strings.NewReader(str), nil)
   130		var tokens []Token
   131		for {
   132			tok := t.Next()
   133			if tok == scanner.EOF {
   134				break
   135			}
   136			tokens = append(tokens, Make(tok, t.Text()))
   137		}
   138		return tokens
   139	}
   140	

View as plain text