...

Source file src/pkg/cmd/compile/internal/ssa/flagalloc.go

     1	// Copyright 2015 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	package ssa
     6	
     7	// flagalloc allocates the flag register among all the flag-generating
     8	// instructions. Flag values are recomputed if they need to be
     9	// spilled/restored.
    10	func flagalloc(f *Func) {
    11		// Compute the in-register flag value we want at the end of
    12		// each block. This is basically a best-effort live variable
    13		// analysis, so it can be much simpler than a full analysis.
    14		end := make([]*Value, f.NumBlocks())
    15		po := f.postorder()
    16		for n := 0; n < 2; n++ {
    17			for _, b := range po {
    18				// Walk values backwards to figure out what flag
    19				// value we want in the flag register at the start
    20				// of the block.
    21				flag := end[b.ID]
    22				if b.Control != nil && b.Control.Type.IsFlags() {
    23					flag = b.Control
    24				}
    25				for j := len(b.Values) - 1; j >= 0; j-- {
    26					v := b.Values[j]
    27					if v == flag {
    28						flag = nil
    29					}
    30					if v.clobbersFlags() {
    31						flag = nil
    32					}
    33					for _, a := range v.Args {
    34						if a.Type.IsFlags() {
    35							flag = a
    36						}
    37					}
    38				}
    39				if flag != nil {
    40					for _, e := range b.Preds {
    41						p := e.b
    42						end[p.ID] = flag
    43					}
    44				}
    45			}
    46		}
    47	
    48		// For blocks which have a flags control value, that's the only value
    49		// we can leave in the flags register at the end of the block. (There
    50		// is no place to put a flag regeneration instruction.)
    51		for _, b := range f.Blocks {
    52			v := b.Control
    53			if v != nil && v.Type.IsFlags() && end[b.ID] != v {
    54				end[b.ID] = nil
    55			}
    56			if b.Kind == BlockDefer {
    57				// Defer blocks internally use/clobber the flags value.
    58				end[b.ID] = nil
    59			}
    60		}
    61	
    62		// Compute which flags values will need to be spilled.
    63		spill := map[ID]bool{}
    64		for _, b := range f.Blocks {
    65			var flag *Value
    66			if len(b.Preds) > 0 {
    67				flag = end[b.Preds[0].b.ID]
    68			}
    69			for _, v := range b.Values {
    70				for _, a := range v.Args {
    71					if !a.Type.IsFlags() {
    72						continue
    73					}
    74					if a == flag {
    75						continue
    76					}
    77					// a will need to be restored here.
    78					spill[a.ID] = true
    79					flag = a
    80				}
    81				if v.clobbersFlags() {
    82					flag = nil
    83				}
    84				if v.Type.IsFlags() {
    85					flag = v
    86				}
    87			}
    88			if v := b.Control; v != nil && v != flag && v.Type.IsFlags() {
    89				spill[v.ID] = true
    90			}
    91			if v := end[b.ID]; v != nil && v != flag {
    92				spill[v.ID] = true
    93			}
    94		}
    95	
    96		// Add flag spill and recomputation where they are needed.
    97		// TODO: Remove original instructions if they are never used.
    98		var oldSched []*Value
    99		for _, b := range f.Blocks {
   100			oldSched = append(oldSched[:0], b.Values...)
   101			b.Values = b.Values[:0]
   102			// The current live flag value (the pre-flagalloc copy).
   103			var flag *Value
   104			if len(b.Preds) > 0 {
   105				flag = end[b.Preds[0].b.ID]
   106				// Note: the following condition depends on the lack of critical edges.
   107				for _, e := range b.Preds[1:] {
   108					p := e.b
   109					if end[p.ID] != flag {
   110						f.Fatalf("live flag in %s's predecessors not consistent", b)
   111					}
   112				}
   113			}
   114			for _, v := range oldSched {
   115				if v.Op == OpPhi && v.Type.IsFlags() {
   116					f.Fatalf("phi of flags not supported: %s", v.LongString())
   117				}
   118	
   119				// If v will be spilled, and v uses memory, then we must split it
   120				// into a load + a flag generator.
   121				if spill[v.ID] && v.MemoryArg() != nil {
   122					if !f.Config.splitLoad(v) {
   123						f.Fatalf("can't split flag generator: %s", v.LongString())
   124					}
   125				}
   126	
   127				// Make sure any flag arg of v is in the flags register.
   128				// If not, recompute it.
   129				for i, a := range v.Args {
   130					if !a.Type.IsFlags() {
   131						continue
   132					}
   133					if a == flag {
   134						continue
   135					}
   136					// Recalculate a
   137					c := copyFlags(a, b)
   138					// Update v.
   139					v.SetArg(i, c)
   140					// Remember the most-recently computed flag value.
   141					flag = a
   142				}
   143				// Issue v.
   144				b.Values = append(b.Values, v)
   145				if v.clobbersFlags() {
   146					flag = nil
   147				}
   148				if v.Type.IsFlags() {
   149					flag = v
   150				}
   151			}
   152			if v := b.Control; v != nil && v != flag && v.Type.IsFlags() {
   153				// Recalculate control value.
   154				c := copyFlags(v, b)
   155				b.SetControl(c)
   156				flag = v
   157			}
   158			if v := end[b.ID]; v != nil && v != flag {
   159				// Need to reissue flag generator for use by
   160				// subsequent blocks.
   161				copyFlags(v, b)
   162				// Note: this flag generator is not properly linked up
   163				// with the flag users. This breaks the SSA representation.
   164				// We could fix up the users with another pass, but for now
   165				// we'll just leave it.  (Regalloc has the same issue for
   166				// standard regs, and it runs next.)
   167			}
   168		}
   169	
   170		// Save live flag state for later.
   171		for _, b := range f.Blocks {
   172			b.FlagsLiveAtEnd = end[b.ID] != nil
   173		}
   174	}
   175	
   176	func (v *Value) clobbersFlags() bool {
   177		if opcodeTable[v.Op].clobberFlags {
   178			return true
   179		}
   180		if v.Type.IsTuple() && (v.Type.FieldType(0).IsFlags() || v.Type.FieldType(1).IsFlags()) {
   181			// This case handles the possibility where a flag value is generated but never used.
   182			// In that case, there's no corresponding Select to overwrite the flags value,
   183			// so we must consider flags clobbered by the tuple-generating instruction.
   184			return true
   185		}
   186		return false
   187	}
   188	
   189	// copyFlags copies v (flag generator) into b, returns the copy.
   190	// If v's arg is also flags, copy recursively.
   191	func copyFlags(v *Value, b *Block) *Value {
   192		flagsArgs := make(map[int]*Value)
   193		for i, a := range v.Args {
   194			if a.Type.IsFlags() || a.Type.IsTuple() {
   195				flagsArgs[i] = copyFlags(a, b)
   196			}
   197		}
   198		c := v.copyInto(b)
   199		for i, a := range flagsArgs {
   200			c.SetArg(i, a)
   201		}
   202		return c
   203	}
   204	

View as plain text