...

Source file src/pkg/cmd/compile/internal/gc/align.go

     1	// Copyright 2009 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	package gc
     6	
     7	import (
     8		"cmd/compile/internal/types"
     9		"sort"
    10	)
    11	
    12	// sizeCalculationDisabled indicates whether it is safe
    13	// to calculate Types' widths and alignments. See dowidth.
    14	var sizeCalculationDisabled bool
    15	
    16	// machine size and rounding alignment is dictated around
    17	// the size of a pointer, set in betypeinit (see ../amd64/galign.go).
    18	var defercalc int
    19	
    20	func Rnd(o int64, r int64) int64 {
    21		if r < 1 || r > 8 || r&(r-1) != 0 {
    22			Fatalf("rnd %d", r)
    23		}
    24		return (o + r - 1) &^ (r - 1)
    25	}
    26	
    27	// expandiface computes the method set for interface type t by
    28	// expanding embedded interfaces.
    29	func expandiface(t *types.Type) {
    30		var fields []*types.Field
    31		for _, m := range t.Methods().Slice() {
    32			if m.Sym != nil {
    33				fields = append(fields, m)
    34				checkwidth(m.Type)
    35				continue
    36			}
    37	
    38			if !m.Type.IsInterface() {
    39				yyerrorl(m.Pos, "interface contains embedded non-interface %v", m.Type)
    40				m.SetBroke(true)
    41				t.SetBroke(true)
    42				// Add to fields so that error messages
    43				// include the broken embedded type when
    44				// printing t.
    45				// TODO(mdempsky): Revisit this.
    46				fields = append(fields, m)
    47				continue
    48			}
    49	
    50			// Embedded interface: duplicate all methods
    51			// (including broken ones, if any) and add to t's
    52			// method set.
    53			for _, t1 := range m.Type.Fields().Slice() {
    54				f := types.NewField()
    55				f.Pos = m.Pos // preserve embedding position
    56				f.Sym = t1.Sym
    57				f.Type = t1.Type
    58				f.SetBroke(t1.Broke())
    59				fields = append(fields, f)
    60			}
    61		}
    62		sort.Sort(methcmp(fields))
    63	
    64		// Access fields directly to avoid recursively calling dowidth
    65		// within Type.Fields().
    66		t.Extra.(*types.Interface).Fields.Set(fields)
    67	}
    68	
    69	func offmod(t *types.Type) {
    70		o := int32(0)
    71		for _, f := range t.Fields().Slice() {
    72			f.Offset = int64(o)
    73			o += int32(Widthptr)
    74			if int64(o) >= thearch.MAXWIDTH {
    75				yyerror("interface too large")
    76				o = int32(Widthptr)
    77			}
    78		}
    79	}
    80	
    81	func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
    82		starto := o
    83		maxalign := int32(flag)
    84		if maxalign < 1 {
    85			maxalign = 1
    86		}
    87		lastzero := int64(0)
    88		for _, f := range t.Fields().Slice() {
    89			if f.Type == nil {
    90				// broken field, just skip it so that other valid fields
    91				// get a width.
    92				continue
    93			}
    94	
    95			dowidth(f.Type)
    96			if int32(f.Type.Align) > maxalign {
    97				maxalign = int32(f.Type.Align)
    98			}
    99			if f.Type.Align > 0 {
   100				o = Rnd(o, int64(f.Type.Align))
   101			}
   102			f.Offset = o
   103			if n := asNode(f.Nname); n != nil {
   104				// addrescapes has similar code to update these offsets.
   105				// Usually addrescapes runs after widstruct,
   106				// in which case we could drop this,
   107				// but function closure functions are the exception.
   108				// NOTE(rsc): This comment may be stale.
   109				// It's possible the ordering has changed and this is
   110				// now the common case. I'm not sure.
   111				if n.Name.Param.Stackcopy != nil {
   112					n.Name.Param.Stackcopy.Xoffset = o
   113					n.Xoffset = 0
   114				} else {
   115					n.Xoffset = o
   116				}
   117			}
   118	
   119			w := f.Type.Width
   120			if w < 0 {
   121				Fatalf("invalid width %d", f.Type.Width)
   122			}
   123			if w == 0 {
   124				lastzero = o
   125			}
   126			o += w
   127			maxwidth := thearch.MAXWIDTH
   128			// On 32-bit systems, reflect tables impose an additional constraint
   129			// that each field start offset must fit in 31 bits.
   130			if maxwidth < 1<<32 {
   131				maxwidth = 1<<31 - 1
   132			}
   133			if o >= maxwidth {
   134				yyerror("type %L too large", errtype)
   135				o = 8 // small but nonzero
   136			}
   137		}
   138	
   139		// For nonzero-sized structs which end in a zero-sized thing, we add
   140		// an extra byte of padding to the type. This padding ensures that
   141		// taking the address of the zero-sized thing can't manufacture a
   142		// pointer to the next object in the heap. See issue 9401.
   143		if flag == 1 && o > starto && o == lastzero {
   144			o++
   145		}
   146	
   147		// final width is rounded
   148		if flag != 0 {
   149			o = Rnd(o, int64(maxalign))
   150		}
   151		t.Align = uint8(maxalign)
   152	
   153		// type width only includes back to first field's offset
   154		t.Width = o - starto
   155	
   156		return o
   157	}
   158	
   159	// dowidth calculates and stores the size and alignment for t.
   160	// If sizeCalculationDisabled is set, and the size/alignment
   161	// have not already been calculated, it calls Fatal.
   162	// This is used to prevent data races in the back end.
   163	func dowidth(t *types.Type) {
   164		if Widthptr == 0 {
   165			Fatalf("dowidth without betypeinit")
   166		}
   167	
   168		if t == nil {
   169			return
   170		}
   171	
   172		if t.Width == -2 {
   173			if !t.Broke() {
   174				t.SetBroke(true)
   175				// t.Nod should not be nil here, but in some cases is appears to be
   176				// (see issue #23823). For now (temporary work-around) at a minimum
   177				// don't crash and provide a meaningful error message.
   178				// TODO(gri) determine the correct fix during a regular devel cycle
   179				// (see issue #31872).
   180				if t.Nod == nil {
   181					yyerror("invalid recursive type %v", t)
   182				} else {
   183					yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t)
   184				}
   185			}
   186	
   187			t.Width = 0
   188			t.Align = 1
   189			return
   190		}
   191	
   192		if t.WidthCalculated() {
   193			return
   194		}
   195	
   196		if sizeCalculationDisabled {
   197			if t.Broke() {
   198				// break infinite recursion from Fatal call below
   199				return
   200			}
   201			t.SetBroke(true)
   202			Fatalf("width not calculated: %v", t)
   203		}
   204	
   205		// break infinite recursion if the broken recursive type
   206		// is referenced again
   207		if t.Broke() && t.Width == 0 {
   208			return
   209		}
   210	
   211		// defer checkwidth calls until after we're done
   212		defercalc++
   213	
   214		lno := lineno
   215		if asNode(t.Nod) != nil {
   216			lineno = asNode(t.Nod).Pos
   217		}
   218	
   219		t.Width = -2
   220		t.Align = 0 // 0 means use t.Width, below
   221	
   222		et := t.Etype
   223		switch et {
   224		case TFUNC, TCHAN, TMAP, TSTRING:
   225			break
   226	
   227		// simtype == 0 during bootstrap
   228		default:
   229			if simtype[t.Etype] != 0 {
   230				et = simtype[t.Etype]
   231			}
   232		}
   233	
   234		var w int64
   235		switch et {
   236		default:
   237			Fatalf("dowidth: unknown type: %v", t)
   238	
   239		// compiler-specific stuff
   240		case TINT8, TUINT8, TBOOL:
   241			// bool is int8
   242			w = 1
   243	
   244		case TINT16, TUINT16:
   245			w = 2
   246	
   247		case TINT32, TUINT32, TFLOAT32:
   248			w = 4
   249	
   250		case TINT64, TUINT64, TFLOAT64:
   251			w = 8
   252			t.Align = uint8(Widthreg)
   253	
   254		case TCOMPLEX64:
   255			w = 8
   256			t.Align = 4
   257	
   258		case TCOMPLEX128:
   259			w = 16
   260			t.Align = uint8(Widthreg)
   261	
   262		case TPTR:
   263			w = int64(Widthptr)
   264			checkwidth(t.Elem())
   265	
   266		case TUNSAFEPTR:
   267			w = int64(Widthptr)
   268	
   269		case TINTER: // implemented as 2 pointers
   270			w = 2 * int64(Widthptr)
   271			t.Align = uint8(Widthptr)
   272			expandiface(t)
   273	
   274		case TCHAN: // implemented as pointer
   275			w = int64(Widthptr)
   276	
   277			checkwidth(t.Elem())
   278	
   279			// make fake type to check later to
   280			// trigger channel argument check.
   281			t1 := types.NewChanArgs(t)
   282			checkwidth(t1)
   283	
   284		case TCHANARGS:
   285			t1 := t.ChanArgs()
   286			dowidth(t1) // just in case
   287			if t1.Elem().Width >= 1<<16 {
   288				yyerror("channel element type too large (>64kB)")
   289			}
   290			w = 1 // anything will do
   291	
   292		case TMAP: // implemented as pointer
   293			w = int64(Widthptr)
   294			checkwidth(t.Elem())
   295			checkwidth(t.Key())
   296	
   297		case TFORW: // should have been filled in
   298			if !t.Broke() {
   299				t.SetBroke(true)
   300				yyerror("invalid recursive type %v", t)
   301			}
   302			w = 1 // anything will do
   303	
   304		case TANY:
   305			// dummy type; should be replaced before use.
   306			Fatalf("dowidth any")
   307	
   308		case TSTRING:
   309			if sizeof_String == 0 {
   310				Fatalf("early dowidth string")
   311			}
   312			w = int64(sizeof_String)
   313			t.Align = uint8(Widthptr)
   314	
   315		case TARRAY:
   316			if t.Elem() == nil {
   317				break
   318			}
   319			if t.IsDDDArray() {
   320				if !t.Broke() {
   321					yyerror("use of [...] array outside of array literal")
   322					t.SetBroke(true)
   323				}
   324				break
   325			}
   326	
   327			dowidth(t.Elem())
   328			if t.Elem().Width != 0 {
   329				cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width)
   330				if uint64(t.NumElem()) > cap {
   331					yyerror("type %L larger than address space", t)
   332				}
   333			}
   334			w = t.NumElem() * t.Elem().Width
   335			t.Align = t.Elem().Align
   336	
   337		case TSLICE:
   338			if t.Elem() == nil {
   339				break
   340			}
   341			w = int64(sizeof_Array)
   342			checkwidth(t.Elem())
   343			t.Align = uint8(Widthptr)
   344	
   345		case TSTRUCT:
   346			if t.IsFuncArgStruct() {
   347				Fatalf("dowidth fn struct %v", t)
   348			}
   349			w = widstruct(t, t, 0, 1)
   350	
   351		// make fake type to check later to
   352		// trigger function argument computation.
   353		case TFUNC:
   354			t1 := types.NewFuncArgs(t)
   355			checkwidth(t1)
   356			w = int64(Widthptr) // width of func type is pointer
   357	
   358		// function is 3 cated structures;
   359		// compute their widths as side-effect.
   360		case TFUNCARGS:
   361			t1 := t.FuncArgs()
   362			w = widstruct(t1, t1.Recvs(), 0, 0)
   363			w = widstruct(t1, t1.Params(), w, Widthreg)
   364			w = widstruct(t1, t1.Results(), w, Widthreg)
   365			t1.Extra.(*types.Func).Argwid = w
   366			if w%int64(Widthreg) != 0 {
   367				Warn("bad type %v %d\n", t1, w)
   368			}
   369			t.Align = 1
   370		}
   371	
   372		if Widthptr == 4 && w != int64(int32(w)) {
   373			yyerror("type %v too large", t)
   374		}
   375	
   376		t.Width = w
   377		if t.Align == 0 {
   378			if w == 0 || w > 8 || w&(w-1) != 0 {
   379				Fatalf("invalid alignment for %v", t)
   380			}
   381			t.Align = uint8(w)
   382		}
   383	
   384		if t.Etype == TINTER {
   385			// We defer calling these functions until after
   386			// setting t.Width and t.Align so the recursive calls
   387			// to dowidth within t.Fields() will succeed.
   388			checkdupfields("method", t)
   389			offmod(t)
   390		}
   391	
   392		lineno = lno
   393	
   394		if defercalc == 1 {
   395			resumecheckwidth()
   396		} else {
   397			defercalc--
   398		}
   399	}
   400	
   401	// when a type's width should be known, we call checkwidth
   402	// to compute it.  during a declaration like
   403	//
   404	//	type T *struct { next T }
   405	//
   406	// it is necessary to defer the calculation of the struct width
   407	// until after T has been initialized to be a pointer to that struct.
   408	// similarly, during import processing structs may be used
   409	// before their definition.  in those situations, calling
   410	// defercheckwidth() stops width calculations until
   411	// resumecheckwidth() is called, at which point all the
   412	// checkwidths that were deferred are executed.
   413	// dowidth should only be called when the type's size
   414	// is needed immediately.  checkwidth makes sure the
   415	// size is evaluated eventually.
   416	
   417	var deferredTypeStack []*types.Type
   418	
   419	func checkwidth(t *types.Type) {
   420		if t == nil {
   421			return
   422		}
   423	
   424		// function arg structs should not be checked
   425		// outside of the enclosing function.
   426		if t.IsFuncArgStruct() {
   427			Fatalf("checkwidth %v", t)
   428		}
   429	
   430		if defercalc == 0 {
   431			dowidth(t)
   432			return
   433		}
   434	
   435		// if type has not yet been pushed on deferredTypeStack yet, do it now
   436		if !t.Deferwidth() {
   437			t.SetDeferwidth(true)
   438			deferredTypeStack = append(deferredTypeStack, t)
   439		}
   440	}
   441	
   442	func defercheckwidth() {
   443		// we get out of sync on syntax errors, so don't be pedantic.
   444		if defercalc != 0 && nerrors == 0 {
   445			Fatalf("defercheckwidth")
   446		}
   447		defercalc = 1
   448	}
   449	
   450	func resumecheckwidth() {
   451		if defercalc == 0 {
   452			Fatalf("resumecheckwidth")
   453		}
   454	
   455		for len(deferredTypeStack) > 0 {
   456			t := deferredTypeStack[len(deferredTypeStack)-1]
   457			deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
   458			t.SetDeferwidth(false)
   459			dowidth(t)
   460		}
   461	
   462		defercalc = 0
   463	}
   464	

View as plain text