...

Source file src/pkg/cmd/compile/internal/gc/walk.go

     1	// Copyright 2009 The Go Authors. All rights reserved.
     2	// Use of this source code is governed by a BSD-style
     3	// license that can be found in the LICENSE file.
     4	
     5	package gc
     6	
     7	import (
     8		"cmd/compile/internal/types"
     9		"cmd/internal/objabi"
    10		"cmd/internal/sys"
    11		"encoding/binary"
    12		"fmt"
    13		"strings"
    14	)
    15	
    16	// The constant is known to runtime.
    17	const tmpstringbufsize = 32
    18	
    19	func walk(fn *Node) {
    20		Curfn = fn
    21	
    22		if Debug['W'] != 0 {
    23			s := fmt.Sprintf("\nbefore walk %v", Curfn.Func.Nname.Sym)
    24			dumplist(s, Curfn.Nbody)
    25		}
    26	
    27		lno := lineno
    28	
    29		// Final typecheck for any unused variables.
    30		for i, ln := range fn.Func.Dcl {
    31			if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) {
    32				ln = typecheck(ln, ctxExpr|ctxAssign)
    33				fn.Func.Dcl[i] = ln
    34			}
    35		}
    36	
    37		// Propagate the used flag for typeswitch variables up to the NONAME in its definition.
    38		for _, ln := range fn.Func.Dcl {
    39			if ln.Op == ONAME && (ln.Class() == PAUTO || ln.Class() == PAUTOHEAP) && ln.Name.Defn != nil && ln.Name.Defn.Op == OTYPESW && ln.Name.Used() {
    40				ln.Name.Defn.Left.Name.SetUsed(true)
    41			}
    42		}
    43	
    44		for _, ln := range fn.Func.Dcl {
    45			if ln.Op != ONAME || (ln.Class() != PAUTO && ln.Class() != PAUTOHEAP) || ln.Sym.Name[0] == '&' || ln.Name.Used() {
    46				continue
    47			}
    48			if defn := ln.Name.Defn; defn != nil && defn.Op == OTYPESW {
    49				if defn.Left.Name.Used() {
    50					continue
    51				}
    52				yyerrorl(defn.Left.Pos, "%v declared and not used", ln.Sym)
    53				defn.Left.Name.SetUsed(true) // suppress repeats
    54			} else {
    55				yyerrorl(ln.Pos, "%v declared and not used", ln.Sym)
    56			}
    57		}
    58	
    59		lineno = lno
    60		if nerrors != 0 {
    61			return
    62		}
    63		walkstmtlist(Curfn.Nbody.Slice())
    64		if Debug['W'] != 0 {
    65			s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym)
    66			dumplist(s, Curfn.Nbody)
    67		}
    68	
    69		zeroResults()
    70		heapmoves()
    71		if Debug['W'] != 0 && Curfn.Func.Enter.Len() > 0 {
    72			s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym)
    73			dumplist(s, Curfn.Func.Enter)
    74		}
    75	}
    76	
    77	func walkstmtlist(s []*Node) {
    78		for i := range s {
    79			s[i] = walkstmt(s[i])
    80		}
    81	}
    82	
    83	func samelist(a, b []*Node) bool {
    84		if len(a) != len(b) {
    85			return false
    86		}
    87		for i, n := range a {
    88			if n != b[i] {
    89				return false
    90			}
    91		}
    92		return true
    93	}
    94	
    95	func paramoutheap(fn *Node) bool {
    96		for _, ln := range fn.Func.Dcl {
    97			switch ln.Class() {
    98			case PPARAMOUT:
    99				if ln.isParamStackCopy() || ln.Addrtaken() {
   100					return true
   101				}
   102	
   103			case PAUTO:
   104				// stop early - parameters are over
   105				return false
   106			}
   107		}
   108	
   109		return false
   110	}
   111	
   112	// The result of walkstmt MUST be assigned back to n, e.g.
   113	// 	n.Left = walkstmt(n.Left)
   114	func walkstmt(n *Node) *Node {
   115		if n == nil {
   116			return n
   117		}
   118	
   119		setlineno(n)
   120	
   121		walkstmtlist(n.Ninit.Slice())
   122	
   123		switch n.Op {
   124		default:
   125			if n.Op == ONAME {
   126				yyerror("%v is not a top level statement", n.Sym)
   127			} else {
   128				yyerror("%v is not a top level statement", n.Op)
   129			}
   130			Dump("nottop", n)
   131	
   132		case OAS,
   133			OASOP,
   134			OAS2,
   135			OAS2DOTTYPE,
   136			OAS2RECV,
   137			OAS2FUNC,
   138			OAS2MAPR,
   139			OCLOSE,
   140			OCOPY,
   141			OCALLMETH,
   142			OCALLINTER,
   143			OCALL,
   144			OCALLFUNC,
   145			ODELETE,
   146			OSEND,
   147			OPRINT,
   148			OPRINTN,
   149			OPANIC,
   150			OEMPTY,
   151			ORECOVER,
   152			OGETG:
   153			if n.Typecheck() == 0 {
   154				Fatalf("missing typecheck: %+v", n)
   155			}
   156			wascopy := n.Op == OCOPY
   157			init := n.Ninit
   158			n.Ninit.Set(nil)
   159			n = walkexpr(n, &init)
   160			n = addinit(n, init.Slice())
   161			if wascopy && n.Op == OCONVNOP {
   162				n.Op = OEMPTY // don't leave plain values as statements.
   163			}
   164	
   165		// special case for a receive where we throw away
   166		// the value received.
   167		case ORECV:
   168			if n.Typecheck() == 0 {
   169				Fatalf("missing typecheck: %+v", n)
   170			}
   171			init := n.Ninit
   172			n.Ninit.Set(nil)
   173	
   174			n.Left = walkexpr(n.Left, &init)
   175			n = mkcall1(chanfn("chanrecv1", 2, n.Left.Type), nil, &init, n.Left, nodnil())
   176			n = walkexpr(n, &init)
   177	
   178			n = addinit(n, init.Slice())
   179	
   180		case OBREAK,
   181			OCONTINUE,
   182			OFALL,
   183			OGOTO,
   184			OLABEL,
   185			ODCLCONST,
   186			ODCLTYPE,
   187			OCHECKNIL,
   188			OVARDEF,
   189			OVARKILL,
   190			OVARLIVE:
   191			break
   192	
   193		case ODCL:
   194			v := n.Left
   195			if v.Class() == PAUTOHEAP {
   196				if compiling_runtime {
   197					yyerror("%v escapes to heap, not allowed in runtime.", v)
   198				}
   199				if prealloc[v] == nil {
   200					prealloc[v] = callnew(v.Type)
   201				}
   202				nn := nod(OAS, v.Name.Param.Heapaddr, prealloc[v])
   203				nn.SetColas(true)
   204				nn = typecheck(nn, ctxStmt)
   205				return walkstmt(nn)
   206			}
   207	
   208		case OBLOCK:
   209			walkstmtlist(n.List.Slice())
   210	
   211		case OXCASE:
   212			yyerror("case statement out of place")
   213			n.Op = OCASE
   214			fallthrough
   215	
   216		case OCASE:
   217			n.Right = walkstmt(n.Right)
   218	
   219		case ODEFER:
   220			Curfn.Func.SetHasDefer(true)
   221			fallthrough
   222		case OGO:
   223			switch n.Left.Op {
   224			case OPRINT, OPRINTN:
   225				n.Left = wrapCall(n.Left, &n.Ninit)
   226	
   227			case ODELETE:
   228				if mapfast(n.Left.List.First().Type) == mapslow {
   229					n.Left = wrapCall(n.Left, &n.Ninit)
   230				} else {
   231					n.Left = walkexpr(n.Left, &n.Ninit)
   232				}
   233	
   234			case OCOPY:
   235				n.Left = copyany(n.Left, &n.Ninit, true)
   236	
   237			default:
   238				n.Left = walkexpr(n.Left, &n.Ninit)
   239			}
   240	
   241		case OFOR, OFORUNTIL:
   242			if n.Left != nil {
   243				walkstmtlist(n.Left.Ninit.Slice())
   244				init := n.Left.Ninit
   245				n.Left.Ninit.Set(nil)
   246				n.Left = walkexpr(n.Left, &init)
   247				n.Left = addinit(n.Left, init.Slice())
   248			}
   249	
   250			n.Right = walkstmt(n.Right)
   251			if n.Op == OFORUNTIL {
   252				walkstmtlist(n.List.Slice())
   253			}
   254			walkstmtlist(n.Nbody.Slice())
   255	
   256		case OIF:
   257			n.Left = walkexpr(n.Left, &n.Ninit)
   258			walkstmtlist(n.Nbody.Slice())
   259			walkstmtlist(n.Rlist.Slice())
   260	
   261		case ORETURN:
   262			if n.List.Len() == 0 {
   263				break
   264			}
   265			if (Curfn.Type.FuncType().Outnamed && n.List.Len() > 1) || paramoutheap(Curfn) {
   266				// assign to the function out parameters,
   267				// so that reorder3 can fix up conflicts
   268				var rl []*Node
   269	
   270				for _, ln := range Curfn.Func.Dcl {
   271					cl := ln.Class()
   272					if cl == PAUTO || cl == PAUTOHEAP {
   273						break
   274					}
   275					if cl == PPARAMOUT {
   276						if ln.isParamStackCopy() {
   277							ln = walkexpr(typecheck(nod(ODEREF, ln.Name.Param.Heapaddr, nil), ctxExpr), nil)
   278						}
   279						rl = append(rl, ln)
   280					}
   281				}
   282	
   283				if got, want := n.List.Len(), len(rl); got != want {
   284					// order should have rewritten multi-value function calls
   285					// with explicit OAS2FUNC nodes.
   286					Fatalf("expected %v return arguments, have %v", want, got)
   287				}
   288	
   289				if samelist(rl, n.List.Slice()) {
   290					// special return in disguise
   291					// TODO(josharian, 1.12): is "special return" still relevant?
   292					// Tests still pass w/o this. See comments on https://go-review.googlesource.com/c/go/+/118318
   293					walkexprlist(n.List.Slice(), &n.Ninit)
   294					n.List.Set(nil)
   295	
   296					break
   297				}
   298	
   299				// move function calls out, to make reorder3's job easier.
   300				walkexprlistsafe(n.List.Slice(), &n.Ninit)
   301	
   302				ll := ascompatee(n.Op, rl, n.List.Slice(), &n.Ninit)
   303				n.List.Set(reorder3(ll))
   304				break
   305			}
   306			walkexprlist(n.List.Slice(), &n.Ninit)
   307	
   308			// For each return parameter (lhs), assign the corresponding result (rhs).
   309			lhs := Curfn.Type.Results()
   310			rhs := n.List.Slice()
   311			res := make([]*Node, lhs.NumFields())
   312			for i, nl := range lhs.FieldSlice() {
   313				nname := asNode(nl.Nname)
   314				if nname.isParamHeapCopy() {
   315					nname = nname.Name.Param.Stackcopy
   316				}
   317				a := nod(OAS, nname, rhs[i])
   318				res[i] = convas(a, &n.Ninit)
   319			}
   320			n.List.Set(res)
   321	
   322		case ORETJMP:
   323			break
   324	
   325		case OINLMARK:
   326			break
   327	
   328		case OSELECT:
   329			walkselect(n)
   330	
   331		case OSWITCH:
   332			walkswitch(n)
   333	
   334		case ORANGE:
   335			n = walkrange(n)
   336		}
   337	
   338		if n.Op == ONAME {
   339			Fatalf("walkstmt ended up with name: %+v", n)
   340		}
   341		return n
   342	}
   343	
   344	func isSmallMakeSlice(n *Node) bool {
   345		if n.Op != OMAKESLICE {
   346			return false
   347		}
   348		l := n.Left
   349		r := n.Right
   350		if r == nil {
   351			r = l
   352		}
   353		t := n.Type
   354	
   355		return smallintconst(l) && smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < maxImplicitStackVarSize/t.Elem().Width)
   356	}
   357	
   358	// walk the whole tree of the body of an
   359	// expression or simple statement.
   360	// the types expressions are calculated.
   361	// compile-time constants are evaluated.
   362	// complex side effects like statements are appended to init
   363	func walkexprlist(s []*Node, init *Nodes) {
   364		for i := range s {
   365			s[i] = walkexpr(s[i], init)
   366		}
   367	}
   368	
   369	func walkexprlistsafe(s []*Node, init *Nodes) {
   370		for i, n := range s {
   371			s[i] = safeexpr(n, init)
   372			s[i] = walkexpr(s[i], init)
   373		}
   374	}
   375	
   376	func walkexprlistcheap(s []*Node, init *Nodes) {
   377		for i, n := range s {
   378			s[i] = cheapexpr(n, init)
   379			s[i] = walkexpr(s[i], init)
   380		}
   381	}
   382	
   383	// convFuncName builds the runtime function name for interface conversion.
   384	// It also reports whether the function expects the data by address.
   385	// Not all names are possible. For example, we never generate convE2E or convE2I.
   386	func convFuncName(from, to *types.Type) (fnname string, needsaddr bool) {
   387		tkind := to.Tie()
   388		switch from.Tie() {
   389		case 'I':
   390			if tkind == 'I' {
   391				return "convI2I", false
   392			}
   393		case 'T':
   394			switch {
   395			case from.Size() == 2 && from.Align == 2:
   396				return "convT16", false
   397			case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
   398				return "convT32", false
   399			case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
   400				return "convT64", false
   401			}
   402			if sc := from.SoleComponent(); sc != nil {
   403				switch {
   404				case sc.IsString():
   405					return "convTstring", false
   406				case sc.IsSlice():
   407					return "convTslice", false
   408				}
   409			}
   410	
   411			switch tkind {
   412			case 'E':
   413				if !types.Haspointers(from) {
   414					return "convT2Enoptr", true
   415				}
   416				return "convT2E", true
   417			case 'I':
   418				if !types.Haspointers(from) {
   419					return "convT2Inoptr", true
   420				}
   421				return "convT2I", true
   422			}
   423		}
   424		Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
   425		panic("unreachable")
   426	}
   427	
   428	// The result of walkexpr MUST be assigned back to n, e.g.
   429	// 	n.Left = walkexpr(n.Left, init)
   430	func walkexpr(n *Node, init *Nodes) *Node {
   431		if n == nil {
   432			return n
   433		}
   434	
   435		// Eagerly checkwidth all expressions for the back end.
   436		if n.Type != nil && !n.Type.WidthCalculated() {
   437			switch n.Type.Etype {
   438			case TBLANK, TNIL, TIDEAL:
   439			default:
   440				checkwidth(n.Type)
   441			}
   442		}
   443	
   444		if init == &n.Ninit {
   445			// not okay to use n->ninit when walking n,
   446			// because we might replace n with some other node
   447			// and would lose the init list.
   448			Fatalf("walkexpr init == &n->ninit")
   449		}
   450	
   451		if n.Ninit.Len() != 0 {
   452			walkstmtlist(n.Ninit.Slice())
   453			init.AppendNodes(&n.Ninit)
   454		}
   455	
   456		lno := setlineno(n)
   457	
   458		if Debug['w'] > 1 {
   459			Dump("before walk expr", n)
   460		}
   461	
   462		if n.Typecheck() != 1 {
   463			Fatalf("missed typecheck: %+v", n)
   464		}
   465	
   466		if n.Type.IsUntyped() {
   467			Fatalf("expression has untyped type: %+v", n)
   468		}
   469	
   470		if n.Op == ONAME && n.Class() == PAUTOHEAP {
   471			nn := nod(ODEREF, n.Name.Param.Heapaddr, nil)
   472			nn = typecheck(nn, ctxExpr)
   473			nn = walkexpr(nn, init)
   474			nn.Left.SetNonNil(true)
   475			return nn
   476		}
   477	
   478	opswitch:
   479		switch n.Op {
   480		default:
   481			Dump("walk", n)
   482			Fatalf("walkexpr: switch 1 unknown op %+S", n)
   483	
   484		case ONONAME, OEMPTY, OGETG, ONEWOBJ:
   485	
   486		case OTYPE, ONAME, OLITERAL:
   487			// TODO(mdempsky): Just return n; see discussion on CL 38655.
   488			// Perhaps refactor to use Node.mayBeShared for these instead.
   489			// If these return early, make sure to still call
   490			// stringsym for constant strings.
   491	
   492		case ONOT, ONEG, OPLUS, OBITNOT, OREAL, OIMAG, ODOTMETH, ODOTINTER,
   493			ODEREF, OSPTR, OITAB, OIDATA, OADDR:
   494			n.Left = walkexpr(n.Left, init)
   495	
   496		case OEFACE, OAND, OSUB, OMUL, OADD, OOR, OXOR, OLSH, ORSH:
   497			n.Left = walkexpr(n.Left, init)
   498			n.Right = walkexpr(n.Right, init)
   499	
   500		case ODOT, ODOTPTR:
   501			usefield(n)
   502			n.Left = walkexpr(n.Left, init)
   503	
   504		case ODOTTYPE, ODOTTYPE2:
   505			n.Left = walkexpr(n.Left, init)
   506			// Set up interface type addresses for back end.
   507			n.Right = typename(n.Type)
   508			if n.Op == ODOTTYPE {
   509				n.Right.Right = typename(n.Left.Type)
   510			}
   511			if !n.Type.IsInterface() && !n.Left.Type.IsEmptyInterface() {
   512				n.List.Set1(itabname(n.Type, n.Left.Type))
   513			}
   514	
   515		case OLEN, OCAP:
   516			if isRuneCount(n) {
   517				// Replace len([]rune(string)) with runtime.countrunes(string).
   518				n = mkcall("countrunes", n.Type, init, conv(n.Left.Left, types.Types[TSTRING]))
   519				break
   520			}
   521	
   522			n.Left = walkexpr(n.Left, init)
   523	
   524			// replace len(*[10]int) with 10.
   525			// delayed until now to preserve side effects.
   526			t := n.Left.Type
   527	
   528			if t.IsPtr() {
   529				t = t.Elem()
   530			}
   531			if t.IsArray() {
   532				safeexpr(n.Left, init)
   533				setintconst(n, t.NumElem())
   534				n.SetTypecheck(1)
   535			}
   536	
   537		case OCOMPLEX:
   538			// Use results from call expression as arguments for complex.
   539			if n.Left == nil && n.Right == nil {
   540				n.Left = n.List.First()
   541				n.Right = n.List.Second()
   542			}
   543			n.Left = walkexpr(n.Left, init)
   544			n.Right = walkexpr(n.Right, init)
   545	
   546		case OEQ, ONE, OLT, OLE, OGT, OGE:
   547			n = walkcompare(n, init)
   548	
   549		case OANDAND, OOROR:
   550			n.Left = walkexpr(n.Left, init)
   551	
   552			// cannot put side effects from n.Right on init,
   553			// because they cannot run before n.Left is checked.
   554			// save elsewhere and store on the eventual n.Right.
   555			var ll Nodes
   556	
   557			n.Right = walkexpr(n.Right, &ll)
   558			n.Right = addinit(n.Right, ll.Slice())
   559			n = walkinrange(n, init)
   560	
   561		case OPRINT, OPRINTN:
   562			n = walkprint(n, init)
   563	
   564		case OPANIC:
   565			n = mkcall("gopanic", nil, init, n.Left)
   566	
   567		case ORECOVER:
   568			n = mkcall("gorecover", n.Type, init, nod(OADDR, nodfp, nil))
   569	
   570		case OCLOSUREVAR, OCFUNC:
   571			n.SetAddable(true)
   572	
   573		case OCALLINTER, OCALLFUNC, OCALLMETH:
   574			if n.Op == OCALLINTER {
   575				usemethod(n)
   576			}
   577	
   578			if n.Op == OCALLFUNC && n.Left.Op == OCLOSURE {
   579				// Transform direct call of a closure to call of a normal function.
   580				// transformclosure already did all preparation work.
   581	
   582				// Prepend captured variables to argument list.
   583				n.List.Prepend(n.Left.Func.Enter.Slice()...)
   584	
   585				n.Left.Func.Enter.Set(nil)
   586	
   587				// Replace OCLOSURE with ONAME/PFUNC.
   588				n.Left = n.Left.Func.Closure.Func.Nname
   589	
   590				// Update type of OCALLFUNC node.
   591				// Output arguments had not changed, but their offsets could.
   592				if n.Left.Type.NumResults() == 1 {
   593					n.Type = n.Left.Type.Results().Field(0).Type
   594				} else {
   595					n.Type = n.Left.Type.Results()
   596				}
   597			}
   598	
   599			walkCall(n, init)
   600	
   601		case OAS, OASOP:
   602			init.AppendNodes(&n.Ninit)
   603	
   604			// Recognize m[k] = append(m[k], ...) so we can reuse
   605			// the mapassign call.
   606			mapAppend := n.Left.Op == OINDEXMAP && n.Right.Op == OAPPEND
   607			if mapAppend && !samesafeexpr(n.Left, n.Right.List.First()) {
   608				Fatalf("not same expressions: %v != %v", n.Left, n.Right.List.First())
   609			}
   610	
   611			n.Left = walkexpr(n.Left, init)
   612			n.Left = safeexpr(n.Left, init)
   613	
   614			if mapAppend {
   615				n.Right.List.SetFirst(n.Left)
   616			}
   617	
   618			if n.Op == OASOP {
   619				// Rewrite x op= y into x = x op y.
   620				n.Right = nod(n.SubOp(), n.Left, n.Right)
   621				n.Right = typecheck(n.Right, ctxExpr)
   622	
   623				n.Op = OAS
   624				n.ResetAux()
   625			}
   626	
   627			if oaslit(n, init) {
   628				break
   629			}
   630	
   631			if n.Right == nil {
   632				// TODO(austin): Check all "implicit zeroing"
   633				break
   634			}
   635	
   636			if !instrumenting && isZero(n.Right) {
   637				break
   638			}
   639	
   640			switch n.Right.Op {
   641			default:
   642				n.Right = walkexpr(n.Right, init)
   643	
   644			case ORECV:
   645				// x = <-c; n.Left is x, n.Right.Left is c.
   646				// orderstmt made sure x is addressable.
   647				n.Right.Left = walkexpr(n.Right.Left, init)
   648	
   649				n1 := nod(OADDR, n.Left, nil)
   650				r := n.Right.Left // the channel
   651				n = mkcall1(chanfn("chanrecv1", 2, r.Type), nil, init, r, n1)
   652				n = walkexpr(n, init)
   653				break opswitch
   654	
   655			case OAPPEND:
   656				// x = append(...)
   657				r := n.Right
   658				if r.Type.Elem().NotInHeap() {
   659					yyerror("%v is go:notinheap; heap allocation disallowed", r.Type.Elem())
   660				}
   661				switch {
   662				case isAppendOfMake(r):
   663					// x = append(y, make([]T, y)...)
   664					r = extendslice(r, init)
   665				case r.IsDDD():
   666					r = appendslice(r, init) // also works for append(slice, string).
   667				default:
   668					r = walkappend(r, init, n)
   669				}
   670				n.Right = r
   671				if r.Op == OAPPEND {
   672					// Left in place for back end.
   673					// Do not add a new write barrier.
   674					// Set up address of type for back end.
   675					r.Left = typename(r.Type.Elem())
   676					break opswitch
   677				}
   678				// Otherwise, lowered for race detector.
   679				// Treat as ordinary assignment.
   680			}
   681	
   682			if n.Left != nil && n.Right != nil {
   683				n = convas(n, init)
   684			}
   685	
   686		case OAS2:
   687			init.AppendNodes(&n.Ninit)
   688			walkexprlistsafe(n.List.Slice(), init)
   689			walkexprlistsafe(n.Rlist.Slice(), init)
   690			ll := ascompatee(OAS, n.List.Slice(), n.Rlist.Slice(), init)
   691			ll = reorder3(ll)
   692			n = liststmt(ll)
   693	
   694		// a,b,... = fn()
   695		case OAS2FUNC:
   696			init.AppendNodes(&n.Ninit)
   697	
   698			r := n.Rlist.First()
   699			walkexprlistsafe(n.List.Slice(), init)
   700			r = walkexpr(r, init)
   701	
   702			if isIntrinsicCall(r) {
   703				n.Rlist.Set1(r)
   704				break
   705			}
   706			init.Append(r)
   707	
   708			ll := ascompatet(n.List, r.Type)
   709			n = liststmt(ll)
   710	
   711		// x, y = <-c
   712		// orderstmt made sure x is addressable.
   713		case OAS2RECV:
   714			init.AppendNodes(&n.Ninit)
   715	
   716			r := n.Rlist.First()
   717			walkexprlistsafe(n.List.Slice(), init)
   718			r.Left = walkexpr(r.Left, init)
   719			var n1 *Node
   720			if n.List.First().isBlank() {
   721				n1 = nodnil()
   722			} else {
   723				n1 = nod(OADDR, n.List.First(), nil)
   724			}
   725			fn := chanfn("chanrecv2", 2, r.Left.Type)
   726			ok := n.List.Second()
   727			call := mkcall1(fn, ok.Type, init, r.Left, n1)
   728			n = nod(OAS, ok, call)
   729			n = typecheck(n, ctxStmt)
   730	
   731		// a,b = m[i]
   732		case OAS2MAPR:
   733			init.AppendNodes(&n.Ninit)
   734	
   735			r := n.Rlist.First()
   736			walkexprlistsafe(n.List.Slice(), init)
   737			r.Left = walkexpr(r.Left, init)
   738			r.Right = walkexpr(r.Right, init)
   739			t := r.Left.Type
   740	
   741			fast := mapfast(t)
   742			var key *Node
   743			if fast != mapslow {
   744				// fast versions take key by value
   745				key = r.Right
   746			} else {
   747				// standard version takes key by reference
   748				// orderexpr made sure key is addressable.
   749				key = nod(OADDR, r.Right, nil)
   750			}
   751	
   752			// from:
   753			//   a,b = m[i]
   754			// to:
   755			//   var,b = mapaccess2*(t, m, i)
   756			//   a = *var
   757			a := n.List.First()
   758	
   759			if w := t.Elem().Width; w <= 1024 { // 1024 must match runtime/map.go:maxZero
   760				fn := mapfn(mapaccess2[fast], t)
   761				r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key)
   762			} else {
   763				fn := mapfn("mapaccess2_fat", t)
   764				z := zeroaddr(w)
   765				r = mkcall1(fn, fn.Type.Results(), init, typename(t), r.Left, key, z)
   766			}
   767	
   768			// mapaccess2* returns a typed bool, but due to spec changes,
   769			// the boolean result of i.(T) is now untyped so we make it the
   770			// same type as the variable on the lhs.
   771			if ok := n.List.Second(); !ok.isBlank() && ok.Type.IsBoolean() {
   772				r.Type.Field(1).Type = ok.Type
   773			}
   774			n.Rlist.Set1(r)
   775			n.Op = OAS2FUNC
   776	
   777			// don't generate a = *var if a is _
   778			if !a.isBlank() {
   779				var_ := temp(types.NewPtr(t.Elem()))
   780				var_.SetTypecheck(1)
   781				var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
   782				n.List.SetFirst(var_)
   783				n = walkexpr(n, init)
   784				init.Append(n)
   785				n = nod(OAS, a, nod(ODEREF, var_, nil))
   786			}
   787	
   788			n = typecheck(n, ctxStmt)
   789			n = walkexpr(n, init)
   790	
   791		case ODELETE:
   792			init.AppendNodes(&n.Ninit)
   793			map_ := n.List.First()
   794			key := n.List.Second()
   795			map_ = walkexpr(map_, init)
   796			key = walkexpr(key, init)
   797	
   798			t := map_.Type
   799			fast := mapfast(t)
   800			if fast == mapslow {
   801				// orderstmt made sure key is addressable.
   802				key = nod(OADDR, key, nil)
   803			}
   804			n = mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
   805	
   806		case OAS2DOTTYPE:
   807			walkexprlistsafe(n.List.Slice(), init)
   808			n.Rlist.SetFirst(walkexpr(n.Rlist.First(), init))
   809	
   810		case OCONVIFACE:
   811			n.Left = walkexpr(n.Left, init)
   812	
   813			fromType := n.Left.Type
   814			toType := n.Type
   815	
   816			// typeword generates the type word of the interface value.
   817			typeword := func() *Node {
   818				if toType.IsEmptyInterface() {
   819					return typename(fromType)
   820				}
   821				return itabname(fromType, toType)
   822			}
   823	
   824			// Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
   825			if isdirectiface(fromType) {
   826				l := nod(OEFACE, typeword(), n.Left)
   827				l.Type = toType
   828				l.SetTypecheck(n.Typecheck())
   829				n = l
   830				break
   831			}
   832	
   833			if staticbytes == nil {
   834				staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
   835				staticbytes.SetClass(PEXTERN)
   836				staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
   837				zerobase = newname(Runtimepkg.Lookup("zerobase"))
   838				zerobase.SetClass(PEXTERN)
   839				zerobase.Type = types.Types[TUINTPTR]
   840			}
   841	
   842			// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
   843			// by using an existing addressable value identical to n.Left
   844			// or creating one on the stack.
   845			var value *Node
   846			switch {
   847			case fromType.Size() == 0:
   848				// n.Left is zero-sized. Use zerobase.
   849				cheapexpr(n.Left, init) // Evaluate n.Left for side-effects. See issue 19246.
   850				value = zerobase
   851			case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()):
   852				// n.Left is a bool/byte. Use staticbytes[n.Left].
   853				n.Left = cheapexpr(n.Left, init)
   854				value = nod(OINDEX, staticbytes, byteindex(n.Left))
   855				value.SetBounded(true)
   856			case n.Left.Class() == PEXTERN && n.Left.Name != nil && n.Left.Name.Readonly():
   857				// n.Left is a readonly global; use it directly.
   858				value = n.Left
   859			case !fromType.IsInterface() && n.Esc == EscNone && fromType.Width <= 1024:
   860				// n.Left does not escape. Use a stack temporary initialized to n.Left.
   861				value = temp(fromType)
   862				init.Append(typecheck(nod(OAS, value, n.Left), ctxStmt))
   863			}
   864	
   865			if value != nil {
   866				// Value is identical to n.Left.
   867				// Construct the interface directly: {type/itab, &value}.
   868				l := nod(OEFACE, typeword(), typecheck(nod(OADDR, value, nil), ctxExpr))
   869				l.Type = toType
   870				l.SetTypecheck(n.Typecheck())
   871				n = l
   872				break
   873			}
   874	
   875			// Implement interface to empty interface conversion.
   876			// tmp = i.itab
   877			// if tmp != nil {
   878			//    tmp = tmp.type
   879			// }
   880			// e = iface{tmp, i.data}
   881			if toType.IsEmptyInterface() && fromType.IsInterface() && !fromType.IsEmptyInterface() {
   882				// Evaluate the input interface.
   883				c := temp(fromType)
   884				init.Append(nod(OAS, c, n.Left))
   885	
   886				// Get the itab out of the interface.
   887				tmp := temp(types.NewPtr(types.Types[TUINT8]))
   888				init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), ctxExpr)))
   889	
   890				// Get the type out of the itab.
   891				nif := nod(OIF, typecheck(nod(ONE, tmp, nodnil()), ctxExpr), nil)
   892				nif.Nbody.Set1(nod(OAS, tmp, itabType(tmp)))
   893				init.Append(nif)
   894	
   895				// Build the result.
   896				e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
   897				e.Type = toType // assign type manually, typecheck doesn't understand OEFACE.
   898				e.SetTypecheck(1)
   899				n = e
   900				break
   901			}
   902	
   903			fnname, needsaddr := convFuncName(fromType, toType)
   904	
   905			if !needsaddr && !fromType.IsInterface() {
   906				// Use a specialized conversion routine that only returns a data pointer.
   907				// ptr = convT2X(val)
   908				// e = iface{typ/tab, ptr}
   909				fn := syslook(fnname)
   910				dowidth(fromType)
   911				fn = substArgTypes(fn, fromType)
   912				dowidth(fn.Type)
   913				call := nod(OCALL, fn, nil)
   914				call.List.Set1(n.Left)
   915				call = typecheck(call, ctxExpr)
   916				call = walkexpr(call, init)
   917				call = safeexpr(call, init)
   918				e := nod(OEFACE, typeword(), call)
   919				e.Type = toType
   920				e.SetTypecheck(1)
   921				n = e
   922				break
   923			}
   924	
   925			var tab *Node
   926			if fromType.IsInterface() {
   927				// convI2I
   928				tab = typename(toType)
   929			} else {
   930				// convT2x
   931				tab = typeword()
   932			}
   933	
   934			v := n.Left
   935			if needsaddr {
   936				// Types of large or unknown size are passed by reference.
   937				// Orderexpr arranged for n.Left to be a temporary for all
   938				// the conversions it could see. Comparison of an interface
   939				// with a non-interface, especially in a switch on interface value
   940				// with non-interface cases, is not visible to orderstmt, so we
   941				// have to fall back on allocating a temp here.
   942				if !islvalue(v) {
   943					v = copyexpr(v, v.Type, init)
   944				}
   945				v = nod(OADDR, v, nil)
   946			}
   947	
   948			dowidth(fromType)
   949			fn := syslook(fnname)
   950			fn = substArgTypes(fn, fromType, toType)
   951			dowidth(fn.Type)
   952			n = nod(OCALL, fn, nil)
   953			n.List.Set2(tab, v)
   954			n = typecheck(n, ctxExpr)
   955			n = walkexpr(n, init)
   956	
   957		case OCONV, OCONVNOP:
   958			n.Left = walkexpr(n.Left, init)
   959			param, result := rtconvfn(n.Left.Type, n.Type)
   960			if param == Txxx {
   961				break
   962			}
   963			fn := basicnames[param] + "to" + basicnames[result]
   964			n = conv(mkcall(fn, types.Types[result], init, conv(n.Left, types.Types[param])), n.Type)
   965	
   966		case OANDNOT:
   967			n.Left = walkexpr(n.Left, init)
   968			n.Op = OAND
   969			n.Right = nod(OBITNOT, n.Right, nil)
   970			n.Right = typecheck(n.Right, ctxExpr)
   971			n.Right = walkexpr(n.Right, init)
   972	
   973		case ODIV, OMOD:
   974			n.Left = walkexpr(n.Left, init)
   975			n.Right = walkexpr(n.Right, init)
   976	
   977			// rewrite complex div into function call.
   978			et := n.Left.Type.Etype
   979	
   980			if isComplex[et] && n.Op == ODIV {
   981				t := n.Type
   982				n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
   983				n = conv(n, t)
   984				break
   985			}
   986	
   987			// Nothing to do for float divisions.
   988			if isFloat[et] {
   989				break
   990			}
   991	
   992			// rewrite 64-bit div and mod on 32-bit architectures.
   993			// TODO: Remove this code once we can introduce
   994			// runtime calls late in SSA processing.
   995			if Widthreg < 8 && (et == TINT64 || et == TUINT64) {
   996				if n.Right.Op == OLITERAL {
   997					// Leave div/mod by constant powers of 2.
   998					// The SSA backend will handle those.
   999					switch et {
  1000					case TINT64:
  1001						c := n.Right.Int64()
  1002						if c < 0 {
  1003							c = -c
  1004						}
  1005						if c != 0 && c&(c-1) == 0 {
  1006							break opswitch
  1007						}
  1008					case TUINT64:
  1009						c := uint64(n.Right.Int64())
  1010						if c != 0 && c&(c-1) == 0 {
  1011							break opswitch
  1012						}
  1013					}
  1014				}
  1015				var fn string
  1016				if et == TINT64 {
  1017					fn = "int64"
  1018				} else {
  1019					fn = "uint64"
  1020				}
  1021				if n.Op == ODIV {
  1022					fn += "div"
  1023				} else {
  1024					fn += "mod"
  1025				}
  1026				n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
  1027			}
  1028	
  1029		case OINDEX:
  1030			n.Left = walkexpr(n.Left, init)
  1031	
  1032			// save the original node for bounds checking elision.
  1033			// If it was a ODIV/OMOD walk might rewrite it.
  1034			r := n.Right
  1035	
  1036			n.Right = walkexpr(n.Right, init)
  1037	
  1038			// if range of type cannot exceed static array bound,
  1039			// disable bounds check.
  1040			if n.Bounded() {
  1041				break
  1042			}
  1043			t := n.Left.Type
  1044			if t != nil && t.IsPtr() {
  1045				t = t.Elem()
  1046			}
  1047			if t.IsArray() {
  1048				n.SetBounded(bounded(r, t.NumElem()))
  1049				if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1050					Warn("index bounds check elided")
  1051				}
  1052				if smallintconst(n.Right) && !n.Bounded() {
  1053					yyerror("index out of bounds")
  1054				}
  1055			} else if Isconst(n.Left, CTSTR) {
  1056				n.SetBounded(bounded(r, int64(len(n.Left.Val().U.(string)))))
  1057				if Debug['m'] != 0 && n.Bounded() && !Isconst(n.Right, CTINT) {
  1058					Warn("index bounds check elided")
  1059				}
  1060				if smallintconst(n.Right) && !n.Bounded() {
  1061					yyerror("index out of bounds")
  1062				}
  1063			}
  1064	
  1065			if Isconst(n.Right, CTINT) {
  1066				if n.Right.Val().U.(*Mpint).CmpInt64(0) < 0 || n.Right.Val().U.(*Mpint).Cmp(maxintval[TINT]) > 0 {
  1067					yyerror("index out of bounds")
  1068				}
  1069			}
  1070	
  1071		case OINDEXMAP:
  1072			// Replace m[k] with *map{access1,assign}(maptype, m, &k)
  1073			n.Left = walkexpr(n.Left, init)
  1074			n.Right = walkexpr(n.Right, init)
  1075			map_ := n.Left
  1076			key := n.Right
  1077			t := map_.Type
  1078			if n.IndexMapLValue() {
  1079				// This m[k] expression is on the left-hand side of an assignment.
  1080				fast := mapfast(t)
  1081				if fast == mapslow {
  1082					// standard version takes key by reference.
  1083					// orderexpr made sure key is addressable.
  1084					key = nod(OADDR, key, nil)
  1085				}
  1086				n = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
  1087			} else {
  1088				// m[k] is not the target of an assignment.
  1089				fast := mapfast(t)
  1090				if fast == mapslow {
  1091					// standard version takes key by reference.
  1092					// orderexpr made sure key is addressable.
  1093					key = nod(OADDR, key, nil)
  1094				}
  1095	
  1096				if w := t.Elem().Width; w <= 1024 { // 1024 must match runtime/map.go:maxZero
  1097					n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Elem()), init, typename(t), map_, key)
  1098				} else {
  1099					z := zeroaddr(w)
  1100					n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Elem()), init, typename(t), map_, key, z)
  1101				}
  1102			}
  1103			n.Type = types.NewPtr(t.Elem())
  1104			n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
  1105			n = nod(ODEREF, n, nil)
  1106			n.Type = t.Elem()
  1107			n.SetTypecheck(1)
  1108	
  1109		case ORECV:
  1110			Fatalf("walkexpr ORECV") // should see inside OAS only
  1111	
  1112		case OSLICEHEADER:
  1113			n.Left = walkexpr(n.Left, init)
  1114			n.List.SetFirst(walkexpr(n.List.First(), init))
  1115			n.List.SetSecond(walkexpr(n.List.Second(), init))
  1116	
  1117		case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
  1118			n.Left = walkexpr(n.Left, init)
  1119			low, high, max := n.SliceBounds()
  1120			low = walkexpr(low, init)
  1121			if low != nil && isZero(low) {
  1122				// Reduce x[0:j] to x[:j] and x[0:j:k] to x[:j:k].
  1123				low = nil
  1124			}
  1125			high = walkexpr(high, init)
  1126			max = walkexpr(max, init)
  1127			n.SetSliceBounds(low, high, max)
  1128			if n.Op.IsSlice3() {
  1129				if max != nil && max.Op == OCAP && samesafeexpr(n.Left, max.Left) {
  1130					// Reduce x[i:j:cap(x)] to x[i:j].
  1131					if n.Op == OSLICE3 {
  1132						n.Op = OSLICE
  1133					} else {
  1134						n.Op = OSLICEARR
  1135					}
  1136					n = reduceSlice(n)
  1137				}
  1138			} else {
  1139				n = reduceSlice(n)
  1140			}
  1141	
  1142		case ONEW:
  1143			if n.Esc == EscNone {
  1144				if n.Type.Elem().Width >= maxImplicitStackVarSize {
  1145					Fatalf("large ONEW with EscNone: %v", n)
  1146				}
  1147				r := temp(n.Type.Elem())
  1148				r = nod(OAS, r, nil) // zero temp
  1149				r = typecheck(r, ctxStmt)
  1150				init.Append(r)
  1151				r = nod(OADDR, r.Left, nil)
  1152				r = typecheck(r, ctxExpr)
  1153				n = r
  1154			} else {
  1155				n = callnew(n.Type.Elem())
  1156			}
  1157	
  1158		case OADDSTR:
  1159			n = addstr(n, init)
  1160	
  1161		case OAPPEND:
  1162			// order should make sure we only see OAS(node, OAPPEND), which we handle above.
  1163			Fatalf("append outside assignment")
  1164	
  1165		case OCOPY:
  1166			n = copyany(n, init, instrumenting && !compiling_runtime)
  1167	
  1168			// cannot use chanfn - closechan takes any, not chan any
  1169		case OCLOSE:
  1170			fn := syslook("closechan")
  1171	
  1172			fn = substArgTypes(fn, n.Left.Type)
  1173			n = mkcall1(fn, nil, init, n.Left)
  1174	
  1175		case OMAKECHAN:
  1176			// When size fits into int, use makechan instead of
  1177			// makechan64, which is faster and shorter on 32 bit platforms.
  1178			size := n.Left
  1179			fnname := "makechan64"
  1180			argtype := types.Types[TINT64]
  1181	
  1182			// Type checking guarantees that TIDEAL size is positive and fits in an int.
  1183			// The case of size overflow when converting TUINT or TUINTPTR to TINT
  1184			// will be handled by the negative range checks in makechan during runtime.
  1185			if size.Type.IsKind(TIDEAL) || maxintval[size.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1186				fnname = "makechan"
  1187				argtype = types.Types[TINT]
  1188			}
  1189	
  1190			n = mkcall1(chanfn(fnname, 1, n.Type), n.Type, init, typename(n.Type), conv(size, argtype))
  1191	
  1192		case OMAKEMAP:
  1193			t := n.Type
  1194			hmapType := hmap(t)
  1195			hint := n.Left
  1196	
  1197			// var h *hmap
  1198			var h *Node
  1199			if n.Esc == EscNone {
  1200				// Allocate hmap on stack.
  1201	
  1202				// var hv hmap
  1203				hv := temp(hmapType)
  1204				zero := nod(OAS, hv, nil)
  1205				zero = typecheck(zero, ctxStmt)
  1206				init.Append(zero)
  1207				// h = &hv
  1208				h = nod(OADDR, hv, nil)
  1209	
  1210				// Allocate one bucket pointed to by hmap.buckets on stack if hint
  1211				// is not larger than BUCKETSIZE. In case hint is larger than
  1212				// BUCKETSIZE runtime.makemap will allocate the buckets on the heap.
  1213				// Maximum key and elem size is 128 bytes, larger objects
  1214				// are stored with an indirection. So max bucket size is 2048+eps.
  1215				if !Isconst(hint, CTINT) ||
  1216					hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 {
  1217					// var bv bmap
  1218					bv := temp(bmap(t))
  1219	
  1220					zero = nod(OAS, bv, nil)
  1221					zero = typecheck(zero, ctxStmt)
  1222					init.Append(zero)
  1223	
  1224					// b = &bv
  1225					b := nod(OADDR, bv, nil)
  1226	
  1227					// h.buckets = b
  1228					bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
  1229					na := nod(OAS, nodSym(ODOT, h, bsym), b)
  1230					na = typecheck(na, ctxStmt)
  1231					init.Append(na)
  1232				}
  1233			}
  1234	
  1235			if Isconst(hint, CTINT) && hint.Val().U.(*Mpint).CmpInt64(BUCKETSIZE) <= 0 {
  1236				// Handling make(map[any]any) and
  1237				// make(map[any]any, hint) where hint <= BUCKETSIZE
  1238				// special allows for faster map initialization and
  1239				// improves binary size by using calls with fewer arguments.
  1240				// For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false
  1241				// and no buckets will be allocated by makemap. Therefore,
  1242				// no buckets need to be allocated in this code path.
  1243				if n.Esc == EscNone {
  1244					// Only need to initialize h.hash0 since
  1245					// hmap h has been allocated on the stack already.
  1246					// h.hash0 = fastrand()
  1247					rand := mkcall("fastrand", types.Types[TUINT32], init)
  1248					hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
  1249					a := nod(OAS, nodSym(ODOT, h, hashsym), rand)
  1250					a = typecheck(a, ctxStmt)
  1251					a = walkexpr(a, init)
  1252					init.Append(a)
  1253					n = convnop(h, t)
  1254				} else {
  1255					// Call runtime.makehmap to allocate an
  1256					// hmap on the heap and initialize hmap's hash0 field.
  1257					fn := syslook("makemap_small")
  1258					fn = substArgTypes(fn, t.Key(), t.Elem())
  1259					n = mkcall1(fn, n.Type, init)
  1260				}
  1261			} else {
  1262				if n.Esc != EscNone {
  1263					h = nodnil()
  1264				}
  1265				// Map initialization with a variable or large hint is
  1266				// more complicated. We therefore generate a call to
  1267				// runtime.makemap to intialize hmap and allocate the
  1268				// map buckets.
  1269	
  1270				// When hint fits into int, use makemap instead of
  1271				// makemap64, which is faster and shorter on 32 bit platforms.
  1272				fnname := "makemap64"
  1273				argtype := types.Types[TINT64]
  1274	
  1275				// Type checking guarantees that TIDEAL hint is positive and fits in an int.
  1276				// See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
  1277				// The case of hint overflow when converting TUINT or TUINTPTR to TINT
  1278				// will be handled by the negative range checks in makemap during runtime.
  1279				if hint.Type.IsKind(TIDEAL) || maxintval[hint.Type.Etype].Cmp(maxintval[TUINT]) <= 0 {
  1280					fnname = "makemap"
  1281					argtype = types.Types[TINT]
  1282				}
  1283	
  1284				fn := syslook(fnname)
  1285				fn = substArgTypes(fn, hmapType, t.Key(), t.Elem())
  1286				n = mkcall1(fn, n.Type, init, typename(n.Type), conv(hint, argtype), h)
  1287			}
  1288	
  1289		case OMAKESLICE:
  1290			l := n.Left
  1291			r := n.Right
  1292			if r == nil {
  1293				r = safeexpr(l, init)
  1294				l = r
  1295			}
  1296			t := n.Type
  1297			if n.Esc == EscNone {
  1298				if !isSmallMakeSlice(n) {
  1299					Fatalf("non-small OMAKESLICE with EscNone: %v", n)
  1300				}
  1301				// var arr [r]T
  1302				// n = arr[:l]
  1303				i := indexconst(r)
  1304				if i < 0 {
  1305					Fatalf("walkexpr: invalid index %v", r)
  1306				}
  1307				t = types.NewArray(t.Elem(), i) // [r]T
  1308				var_ := temp(t)
  1309				a := nod(OAS, var_, nil) // zero temp
  1310				a = typecheck(a, ctxStmt)
  1311				init.Append(a)
  1312				r := nod(OSLICE, var_, nil) // arr[:l]
  1313				r.SetSliceBounds(nil, l, nil)
  1314				r = conv(r, n.Type) // in case n.Type is named.
  1315				r = typecheck(r, ctxExpr)
  1316				r = walkexpr(r, init)
  1317				n = r
  1318			} else {
  1319				// n escapes; set up a call to makeslice.
  1320				// When len and cap can fit into int, use makeslice instead of
  1321				// makeslice64, which is faster and shorter on 32 bit platforms.
  1322	
  1323				if t.Elem().NotInHeap() {
  1324					yyerror("%v is go:notinheap; heap allocation disallowed", t.Elem())
  1325				}
  1326	
  1327				len, cap := l, r
  1328	
  1329				fnname := "makeslice64"
  1330				argtype := types.Types[TINT64]
  1331	
  1332				// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
  1333				// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
  1334				// will be handled by the negative range checks in makeslice during runtime.
  1335				if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
  1336					(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
  1337					fnname = "makeslice"
  1338					argtype = types.Types[TINT]
  1339				}
  1340	
  1341				m := nod(OSLICEHEADER, nil, nil)
  1342				m.Type = t
  1343	
  1344				fn := syslook(fnname)
  1345				m.Left = mkcall1(fn, types.Types[TUNSAFEPTR], init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
  1346				m.Left.SetNonNil(true)
  1347				m.List.Set2(conv(len, types.Types[TINT]), conv(cap, types.Types[TINT]))
  1348	
  1349				m = typecheck(m, ctxExpr)
  1350				m = walkexpr(m, init)
  1351				n = m
  1352			}
  1353	
  1354		case ORUNESTR:
  1355			a := nodnil()
  1356			if n.Esc == EscNone {
  1357				t := types.NewArray(types.Types[TUINT8], 4)
  1358				a = nod(OADDR, temp(t), nil)
  1359			}
  1360			// intstring(*[4]byte, rune)
  1361			n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
  1362	
  1363		case OBYTES2STR, ORUNES2STR:
  1364			a := nodnil()
  1365			if n.Esc == EscNone {
  1366				// Create temporary buffer for string on stack.
  1367				t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1368				a = nod(OADDR, temp(t), nil)
  1369			}
  1370			fn := "slicebytetostring"
  1371			if n.Op == ORUNES2STR {
  1372				fn = "slicerunetostring"
  1373			}
  1374			// slicebytetostring(*[32]byte, []byte) string
  1375			// slicerunetostring(*[32]byte, []rune) string
  1376			n = mkcall(fn, n.Type, init, a, n.Left)
  1377	
  1378		case OBYTES2STRTMP:
  1379			n.Left = walkexpr(n.Left, init)
  1380			if !instrumenting {
  1381				// Let the backend handle OBYTES2STRTMP directly
  1382				// to avoid a function call to slicebytetostringtmp.
  1383				break
  1384			}
  1385			// slicebytetostringtmp([]byte) string
  1386			n = mkcall("slicebytetostringtmp", n.Type, init, n.Left)
  1387	
  1388		case OSTR2BYTES:
  1389			s := n.Left
  1390			if Isconst(s, CTSTR) {
  1391				sc := s.Val().U.(string)
  1392	
  1393				// Allocate a [n]byte of the right size.
  1394				t := types.NewArray(types.Types[TUINT8], int64(len(sc)))
  1395				var a *Node
  1396				if n.Esc == EscNone && len(sc) <= int(maxImplicitStackVarSize) {
  1397					a = nod(OADDR, temp(t), nil)
  1398				} else {
  1399					a = callnew(t)
  1400				}
  1401				p := temp(t.PtrTo()) // *[n]byte
  1402				init.Append(typecheck(nod(OAS, p, a), ctxStmt))
  1403	
  1404				// Copy from the static string data to the [n]byte.
  1405				if len(sc) > 0 {
  1406					as := nod(OAS,
  1407						nod(ODEREF, p, nil),
  1408						nod(ODEREF, convnop(nod(OSPTR, s, nil), t.PtrTo()), nil))
  1409					as = typecheck(as, ctxStmt)
  1410					as = walkstmt(as)
  1411					init.Append(as)
  1412				}
  1413	
  1414				// Slice the [n]byte to a []byte.
  1415				n.Op = OSLICEARR
  1416				n.Left = p
  1417				n = walkexpr(n, init)
  1418				break
  1419			}
  1420	
  1421			a := nodnil()
  1422			if n.Esc == EscNone {
  1423				// Create temporary buffer for slice on stack.
  1424				t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  1425				a = nod(OADDR, temp(t), nil)
  1426			}
  1427			// stringtoslicebyte(*32[byte], string) []byte
  1428			n = mkcall("stringtoslicebyte", n.Type, init, a, conv(s, types.Types[TSTRING]))
  1429	
  1430		case OSTR2BYTESTMP:
  1431			// []byte(string) conversion that creates a slice
  1432			// referring to the actual string bytes.
  1433			// This conversion is handled later by the backend and
  1434			// is only for use by internal compiler optimizations
  1435			// that know that the slice won't be mutated.
  1436			// The only such case today is:
  1437			// for i, c := range []byte(string)
  1438			n.Left = walkexpr(n.Left, init)
  1439	
  1440		case OSTR2RUNES:
  1441			a := nodnil()
  1442			if n.Esc == EscNone {
  1443				// Create temporary buffer for slice on stack.
  1444				t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
  1445				a = nod(OADDR, temp(t), nil)
  1446			}
  1447			// stringtoslicerune(*[32]rune, string) []rune
  1448			n = mkcall("stringtoslicerune", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
  1449	
  1450		case OARRAYLIT, OSLICELIT, OMAPLIT, OSTRUCTLIT, OPTRLIT:
  1451			if isStaticCompositeLiteral(n) && !canSSAType(n.Type) {
  1452				// n can be directly represented in the read-only data section.
  1453				// Make direct reference to the static data. See issue 12841.
  1454				vstat := staticname(n.Type)
  1455				vstat.Name.SetReadonly(true)
  1456				fixedlit(inInitFunction, initKindStatic, n, vstat, init)
  1457				n = vstat
  1458				n = typecheck(n, ctxExpr)
  1459				break
  1460			}
  1461			var_ := temp(n.Type)
  1462			anylit(n, var_, init)
  1463			n = var_
  1464	
  1465		case OSEND:
  1466			n1 := n.Right
  1467			n1 = assignconv(n1, n.Left.Type.Elem(), "chan send")
  1468			n1 = walkexpr(n1, init)
  1469			n1 = nod(OADDR, n1, nil)
  1470			n = mkcall1(chanfn("chansend1", 2, n.Left.Type), nil, init, n.Left, n1)
  1471	
  1472		case OCLOSURE:
  1473			n = walkclosure(n, init)
  1474	
  1475		case OCALLPART:
  1476			n = walkpartialcall(n, init)
  1477		}
  1478	
  1479		// Expressions that are constant at run time but not
  1480		// considered const by the language spec are not turned into
  1481		// constants until walk. For example, if n is y%1 == 0, the
  1482		// walk of y%1 may have replaced it by 0.
  1483		// Check whether n with its updated args is itself now a constant.
  1484		t := n.Type
  1485		evconst(n)
  1486		if n.Type != t {
  1487			Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type)
  1488		}
  1489		if n.Op == OLITERAL {
  1490			n = typecheck(n, ctxExpr)
  1491			// Emit string symbol now to avoid emitting
  1492			// any concurrently during the backend.
  1493			if s, ok := n.Val().U.(string); ok {
  1494				_ = stringsym(n.Pos, s)
  1495			}
  1496		}
  1497	
  1498		updateHasCall(n)
  1499	
  1500		if Debug['w'] != 0 && n != nil {
  1501			Dump("after walk expr", n)
  1502		}
  1503	
  1504		lineno = lno
  1505		return n
  1506	}
  1507	
  1508	// rtconvfn returns the parameter and result types that will be used by a
  1509	// runtime function to convert from type src to type dst. The runtime function
  1510	// name can be derived from the names of the returned types.
  1511	//
  1512	// If no such function is necessary, it returns (Txxx, Txxx).
  1513	func rtconvfn(src, dst *types.Type) (param, result types.EType) {
  1514		if thearch.SoftFloat {
  1515			return Txxx, Txxx
  1516		}
  1517	
  1518		switch thearch.LinkArch.Family {
  1519		case sys.ARM, sys.MIPS:
  1520			if src.IsFloat() {
  1521				switch dst.Etype {
  1522				case TINT64, TUINT64:
  1523					return TFLOAT64, dst.Etype
  1524				}
  1525			}
  1526			if dst.IsFloat() {
  1527				switch src.Etype {
  1528				case TINT64, TUINT64:
  1529					return src.Etype, TFLOAT64
  1530				}
  1531			}
  1532	
  1533		case sys.I386:
  1534			if src.IsFloat() {
  1535				switch dst.Etype {
  1536				case TINT64, TUINT64:
  1537					return TFLOAT64, dst.Etype
  1538				case TUINT32, TUINT, TUINTPTR:
  1539					return TFLOAT64, TUINT32
  1540				}
  1541			}
  1542			if dst.IsFloat() {
  1543				switch src.Etype {
  1544				case TINT64, TUINT64:
  1545					return src.Etype, TFLOAT64
  1546				case TUINT32, TUINT, TUINTPTR:
  1547					return TUINT32, TFLOAT64
  1548				}
  1549			}
  1550		}
  1551		return Txxx, Txxx
  1552	}
  1553	
  1554	// TODO(josharian): combine this with its caller and simplify
  1555	func reduceSlice(n *Node) *Node {
  1556		low, high, max := n.SliceBounds()
  1557		if high != nil && high.Op == OLEN && samesafeexpr(n.Left, high.Left) {
  1558			// Reduce x[i:len(x)] to x[i:].
  1559			high = nil
  1560		}
  1561		n.SetSliceBounds(low, high, max)
  1562		if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil {
  1563			// Reduce x[:] to x.
  1564			if Debug_slice > 0 {
  1565				Warn("slice: omit slice operation")
  1566			}
  1567			return n.Left
  1568		}
  1569		return n
  1570	}
  1571	
  1572	func ascompatee1(l *Node, r *Node, init *Nodes) *Node {
  1573		// convas will turn map assigns into function calls,
  1574		// making it impossible for reorder3 to work.
  1575		n := nod(OAS, l, r)
  1576	
  1577		if l.Op == OINDEXMAP {
  1578			return n
  1579		}
  1580	
  1581		return convas(n, init)
  1582	}
  1583	
  1584	func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
  1585		// check assign expression list to
  1586		// an expression list. called in
  1587		//	expr-list = expr-list
  1588	
  1589		// ensure order of evaluation for function calls
  1590		for i := range nl {
  1591			nl[i] = safeexpr(nl[i], init)
  1592		}
  1593		for i1 := range nr {
  1594			nr[i1] = safeexpr(nr[i1], init)
  1595		}
  1596	
  1597		var nn []*Node
  1598		i := 0
  1599		for ; i < len(nl); i++ {
  1600			if i >= len(nr) {
  1601				break
  1602			}
  1603			// Do not generate 'x = x' during return. See issue 4014.
  1604			if op == ORETURN && samesafeexpr(nl[i], nr[i]) {
  1605				continue
  1606			}
  1607			nn = append(nn, ascompatee1(nl[i], nr[i], init))
  1608		}
  1609	
  1610		// cannot happen: caller checked that lists had same length
  1611		if i < len(nl) || i < len(nr) {
  1612			var nln, nrn Nodes
  1613			nln.Set(nl)
  1614			nrn.Set(nr)
  1615			Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname())
  1616		}
  1617		return nn
  1618	}
  1619	
  1620	// fncall reports whether assigning an rvalue of type rt to an lvalue l might involve a function call.
  1621	func fncall(l *Node, rt *types.Type) bool {
  1622		if l.HasCall() || l.Op == OINDEXMAP {
  1623			return true
  1624		}
  1625		if types.Identical(l.Type, rt) {
  1626			return false
  1627		}
  1628		// There might be a conversion required, which might involve a runtime call.
  1629		return true
  1630	}
  1631	
  1632	// check assign type list to
  1633	// an expression list. called in
  1634	//	expr-list = func()
  1635	func ascompatet(nl Nodes, nr *types.Type) []*Node {
  1636		if nl.Len() != nr.NumFields() {
  1637			Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
  1638		}
  1639	
  1640		var nn, mm Nodes
  1641		for i, l := range nl.Slice() {
  1642			if l.isBlank() {
  1643				continue
  1644			}
  1645			r := nr.Field(i)
  1646	
  1647			// Any assignment to an lvalue that might cause a function call must be
  1648			// deferred until all the returned values have been read.
  1649			if fncall(l, r.Type) {
  1650				tmp := temp(r.Type)
  1651				tmp = typecheck(tmp, ctxExpr)
  1652				a := nod(OAS, l, tmp)
  1653				a = convas(a, &mm)
  1654				mm.Append(a)
  1655				l = tmp
  1656			}
  1657	
  1658			res := nod(ORESULT, nil, nil)
  1659			res.Xoffset = Ctxt.FixedFrameSize() + r.Offset
  1660			res.Type = r.Type
  1661			res.SetTypecheck(1)
  1662	
  1663			a := nod(OAS, l, res)
  1664			a = convas(a, &nn)
  1665			updateHasCall(a)
  1666			if a.HasCall() {
  1667				Dump("ascompatet ucount", a)
  1668				Fatalf("ascompatet: too many function calls evaluating parameters")
  1669			}
  1670	
  1671			nn.Append(a)
  1672		}
  1673		return append(nn.Slice(), mm.Slice()...)
  1674	}
  1675	
  1676	// package all the arguments that match a ... T parameter into a []T.
  1677	func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
  1678		esc := uint16(EscUnknown)
  1679		if ddd != nil {
  1680			esc = ddd.Esc
  1681		}
  1682	
  1683		if len(args) == 0 {
  1684			n := nodnil()
  1685			n.Type = typ
  1686			return n
  1687		}
  1688	
  1689		n := nod(OCOMPLIT, nil, typenod(typ))
  1690		if ddd != nil && prealloc[ddd] != nil {
  1691			prealloc[n] = prealloc[ddd] // temporary to use
  1692		}
  1693		n.List.Set(args)
  1694		n.Esc = esc
  1695		n = typecheck(n, ctxExpr)
  1696		if n.Type == nil {
  1697			Fatalf("mkdotargslice: typecheck failed")
  1698		}
  1699		n = walkexpr(n, init)
  1700		return n
  1701	}
  1702	
  1703	func walkCall(n *Node, init *Nodes) {
  1704		if n.Rlist.Len() != 0 {
  1705			return // already walked
  1706		}
  1707		n.Left = walkexpr(n.Left, init)
  1708		walkexprlist(n.List.Slice(), init)
  1709	
  1710		params := n.Left.Type.Params()
  1711		args := n.List.Slice()
  1712		// If there's a ... parameter (which is only valid as the final
  1713		// parameter) and this is not a ... call expression,
  1714		// then assign the remaining arguments as a slice.
  1715		if nf := params.NumFields(); nf > 0 {
  1716			if last := params.Field(nf - 1); last.IsDDD() && !n.IsDDD() {
  1717				tail := args[nf-1:]
  1718				slice := mkdotargslice(last.Type, tail, init, n.Right)
  1719				// Allow immediate GC.
  1720				for i := range tail {
  1721					tail[i] = nil
  1722				}
  1723				args = append(args[:nf-1], slice)
  1724			}
  1725		}
  1726	
  1727		// If this is a method call, add the receiver at the beginning of the args.
  1728		if n.Op == OCALLMETH {
  1729			withRecv := make([]*Node, len(args)+1)
  1730			withRecv[0] = n.Left.Left
  1731			n.Left.Left = nil
  1732			copy(withRecv[1:], args)
  1733			args = withRecv
  1734		}
  1735	
  1736		// For any argument whose evaluation might require a function call,
  1737		// store that argument into a temporary variable,
  1738		// to prevent that calls from clobbering arguments already on the stack.
  1739		// When instrumenting, all arguments might require function calls.
  1740		var tempAssigns []*Node
  1741		for i, arg := range args {
  1742			updateHasCall(arg)
  1743			// Determine param type.
  1744			var t *types.Type
  1745			if n.Op == OCALLMETH {
  1746				if i == 0 {
  1747					t = n.Left.Type.Recv().Type
  1748				} else {
  1749					t = params.Field(i - 1).Type
  1750				}
  1751			} else {
  1752				t = params.Field(i).Type
  1753			}
  1754			if instrumenting || fncall(arg, t) {
  1755				// make assignment of fncall to tempname
  1756				tmp := temp(t)
  1757				a := nod(OAS, tmp, arg)
  1758				a = convas(a, init)
  1759				tempAssigns = append(tempAssigns, a)
  1760				// replace arg with temp
  1761				args[i] = tmp
  1762			}
  1763		}
  1764	
  1765		n.List.Set(tempAssigns)
  1766		n.Rlist.Set(args)
  1767	}
  1768	
  1769	// generate code for print
  1770	func walkprint(nn *Node, init *Nodes) *Node {
  1771		// Hoist all the argument evaluation up before the lock.
  1772		walkexprlistcheap(nn.List.Slice(), init)
  1773	
  1774		// For println, add " " between elements and "\n" at the end.
  1775		if nn.Op == OPRINTN {
  1776			s := nn.List.Slice()
  1777			t := make([]*Node, 0, len(s)*2)
  1778			for i, n := range s {
  1779				if i != 0 {
  1780					t = append(t, nodstr(" "))
  1781				}
  1782				t = append(t, n)
  1783			}
  1784			t = append(t, nodstr("\n"))
  1785			nn.List.Set(t)
  1786		}
  1787	
  1788		// Collapse runs of constant strings.
  1789		s := nn.List.Slice()
  1790		t := make([]*Node, 0, len(s))
  1791		for i := 0; i < len(s); {
  1792			var strs []string
  1793			for i < len(s) && Isconst(s[i], CTSTR) {
  1794				strs = append(strs, s[i].Val().U.(string))
  1795				i++
  1796			}
  1797			if len(strs) > 0 {
  1798				t = append(t, nodstr(strings.Join(strs, "")))
  1799			}
  1800			if i < len(s) {
  1801				t = append(t, s[i])
  1802				i++
  1803			}
  1804		}
  1805		nn.List.Set(t)
  1806	
  1807		calls := []*Node{mkcall("printlock", nil, init)}
  1808		for i, n := range nn.List.Slice() {
  1809			if n.Op == OLITERAL {
  1810				switch n.Val().Ctype() {
  1811				case CTRUNE:
  1812					n = defaultlit(n, types.Runetype)
  1813	
  1814				case CTINT:
  1815					n = defaultlit(n, types.Types[TINT64])
  1816	
  1817				case CTFLT:
  1818					n = defaultlit(n, types.Types[TFLOAT64])
  1819				}
  1820			}
  1821	
  1822			if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
  1823				n = defaultlit(n, types.Types[TINT64])
  1824			}
  1825			n = defaultlit(n, nil)
  1826			nn.List.SetIndex(i, n)
  1827			if n.Type == nil || n.Type.Etype == TFORW {
  1828				continue
  1829			}
  1830	
  1831			var on *Node
  1832			switch n.Type.Etype {
  1833			case TINTER:
  1834				if n.Type.IsEmptyInterface() {
  1835					on = syslook("printeface")
  1836				} else {
  1837					on = syslook("printiface")
  1838				}
  1839				on = substArgTypes(on, n.Type) // any-1
  1840			case TPTR, TCHAN, TMAP, TFUNC, TUNSAFEPTR:
  1841				on = syslook("printpointer")
  1842				on = substArgTypes(on, n.Type) // any-1
  1843			case TSLICE:
  1844				on = syslook("printslice")
  1845				on = substArgTypes(on, n.Type) // any-1
  1846			case TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR:
  1847				if isRuntimePkg(n.Type.Sym.Pkg) && n.Type.Sym.Name == "hex" {
  1848					on = syslook("printhex")
  1849				} else {
  1850					on = syslook("printuint")
  1851				}
  1852			case TINT, TINT8, TINT16, TINT32, TINT64:
  1853				on = syslook("printint")
  1854			case TFLOAT32, TFLOAT64:
  1855				on = syslook("printfloat")
  1856			case TCOMPLEX64, TCOMPLEX128:
  1857				on = syslook("printcomplex")
  1858			case TBOOL:
  1859				on = syslook("printbool")
  1860			case TSTRING:
  1861				cs := ""
  1862				if Isconst(n, CTSTR) {
  1863					cs = n.Val().U.(string)
  1864				}
  1865				switch cs {
  1866				case " ":
  1867					on = syslook("printsp")
  1868				case "\n":
  1869					on = syslook("printnl")
  1870				default:
  1871					on = syslook("printstring")
  1872				}
  1873			default:
  1874				badtype(OPRINT, n.Type, nil)
  1875				continue
  1876			}
  1877	
  1878			r := nod(OCALL, on, nil)
  1879			if params := on.Type.Params().FieldSlice(); len(params) > 0 {
  1880				t := params[0].Type
  1881				if !types.Identical(t, n.Type) {
  1882					n = nod(OCONV, n, nil)
  1883					n.Type = t
  1884				}
  1885				r.List.Append(n)
  1886			}
  1887			calls = append(calls, r)
  1888		}
  1889	
  1890		calls = append(calls, mkcall("printunlock", nil, init))
  1891	
  1892		typecheckslice(calls, ctxStmt)
  1893		walkexprlist(calls, init)
  1894	
  1895		r := nod(OEMPTY, nil, nil)
  1896		r = typecheck(r, ctxStmt)
  1897		r = walkexpr(r, init)
  1898		r.Ninit.Set(calls)
  1899		return r
  1900	}
  1901	
  1902	func callnew(t *types.Type) *Node {
  1903		if t.NotInHeap() {
  1904			yyerror("%v is go:notinheap; heap allocation disallowed", t)
  1905		}
  1906		dowidth(t)
  1907		n := nod(ONEWOBJ, typename(t), nil)
  1908		n.Type = types.NewPtr(t)
  1909		n.SetTypecheck(1)
  1910		n.SetNonNil(true)
  1911		return n
  1912	}
  1913	
  1914	// isReflectHeaderDataField reports whether l is an expression p.Data
  1915	// where p has type reflect.SliceHeader or reflect.StringHeader.
  1916	func isReflectHeaderDataField(l *Node) bool {
  1917		if l.Type != types.Types[TUINTPTR] {
  1918			return false
  1919		}
  1920	
  1921		var tsym *types.Sym
  1922		switch l.Op {
  1923		case ODOT:
  1924			tsym = l.Left.Type.Sym
  1925		case ODOTPTR:
  1926			tsym = l.Left.Type.Elem().Sym
  1927		default:
  1928			return false
  1929		}
  1930	
  1931		if tsym == nil || l.Sym.Name != "Data" || tsym.Pkg.Path != "reflect" {
  1932			return false
  1933		}
  1934		return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
  1935	}
  1936	
  1937	func convas(n *Node, init *Nodes) *Node {
  1938		if n.Op != OAS {
  1939			Fatalf("convas: not OAS %v", n.Op)
  1940		}
  1941		defer updateHasCall(n)
  1942	
  1943		n.SetTypecheck(1)
  1944	
  1945		if n.Left == nil || n.Right == nil {
  1946			return n
  1947		}
  1948	
  1949		lt := n.Left.Type
  1950		rt := n.Right.Type
  1951		if lt == nil || rt == nil {
  1952			return n
  1953		}
  1954	
  1955		if n.Left.isBlank() {
  1956			n.Right = defaultlit(n.Right, nil)
  1957			return n
  1958		}
  1959	
  1960		if !types.Identical(lt, rt) {
  1961			n.Right = assignconv(n.Right, lt, "assignment")
  1962			n.Right = walkexpr(n.Right, init)
  1963		}
  1964		dowidth(n.Right.Type)
  1965	
  1966		return n
  1967	}
  1968	
  1969	// from ascompat[ee]
  1970	//	a,b = c,d
  1971	// simultaneous assignment. there cannot
  1972	// be later use of an earlier lvalue.
  1973	//
  1974	// function calls have been removed.
  1975	func reorder3(all []*Node) []*Node {
  1976		// If a needed expression may be affected by an
  1977		// earlier assignment, make an early copy of that
  1978		// expression and use the copy instead.
  1979		var early []*Node
  1980	
  1981		var mapinit Nodes
  1982		for i, n := range all {
  1983			l := n.Left
  1984	
  1985			// Save subexpressions needed on left side.
  1986			// Drill through non-dereferences.
  1987			for {
  1988				if l.Op == ODOT || l.Op == OPAREN {
  1989					l = l.Left
  1990					continue
  1991				}
  1992	
  1993				if l.Op == OINDEX && l.Left.Type.IsArray() {
  1994					l.Right = reorder3save(l.Right, all, i, &early)
  1995					l = l.Left
  1996					continue
  1997				}
  1998	
  1999				break
  2000			}
  2001	
  2002			switch l.Op {
  2003			default:
  2004				Fatalf("reorder3 unexpected lvalue %#v", l.Op)
  2005	
  2006			case ONAME:
  2007				break
  2008	
  2009			case OINDEX, OINDEXMAP:
  2010				l.Left = reorder3save(l.Left, all, i, &early)
  2011				l.Right = reorder3save(l.Right, all, i, &early)
  2012				if l.Op == OINDEXMAP {
  2013					all[i] = convas(all[i], &mapinit)
  2014				}
  2015	
  2016			case ODEREF, ODOTPTR:
  2017				l.Left = reorder3save(l.Left, all, i, &early)
  2018			}
  2019	
  2020			// Save expression on right side.
  2021			all[i].Right = reorder3save(all[i].Right, all, i, &early)
  2022		}
  2023	
  2024		early = append(mapinit.Slice(), early...)
  2025		return append(early, all...)
  2026	}
  2027	
  2028	// if the evaluation of *np would be affected by the
  2029	// assignments in all up to but not including the ith assignment,
  2030	// copy into a temporary during *early and
  2031	// replace *np with that temp.
  2032	// The result of reorder3save MUST be assigned back to n, e.g.
  2033	// 	n.Left = reorder3save(n.Left, all, i, early)
  2034	func reorder3save(n *Node, all []*Node, i int, early *[]*Node) *Node {
  2035		if !aliased(n, all, i) {
  2036			return n
  2037		}
  2038	
  2039		q := temp(n.Type)
  2040		q = nod(OAS, q, n)
  2041		q = typecheck(q, ctxStmt)
  2042		*early = append(*early, q)
  2043		return q.Left
  2044	}
  2045	
  2046	// what's the outer value that a write to n affects?
  2047	// outer value means containing struct or array.
  2048	func outervalue(n *Node) *Node {
  2049		for {
  2050			switch n.Op {
  2051			case OXDOT:
  2052				Fatalf("OXDOT in walk")
  2053			case ODOT, OPAREN, OCONVNOP:
  2054				n = n.Left
  2055				continue
  2056			case OINDEX:
  2057				if n.Left.Type != nil && n.Left.Type.IsArray() {
  2058					n = n.Left
  2059					continue
  2060				}
  2061			}
  2062	
  2063			return n
  2064		}
  2065	}
  2066	
  2067	// Is it possible that the computation of n might be
  2068	// affected by writes in as up to but not including the ith element?
  2069	func aliased(n *Node, all []*Node, i int) bool {
  2070		if n == nil {
  2071			return false
  2072		}
  2073	
  2074		// Treat all fields of a struct as referring to the whole struct.
  2075		// We could do better but we would have to keep track of the fields.
  2076		for n.Op == ODOT {
  2077			n = n.Left
  2078		}
  2079	
  2080		// Look for obvious aliasing: a variable being assigned
  2081		// during the all list and appearing in n.
  2082		// Also record whether there are any writes to main memory.
  2083		// Also record whether there are any writes to variables
  2084		// whose addresses have been taken.
  2085		memwrite := false
  2086		varwrite := false
  2087		for _, an := range all[:i] {
  2088			a := outervalue(an.Left)
  2089	
  2090			for a.Op == ODOT {
  2091				a = a.Left
  2092			}
  2093	
  2094			if a.Op != ONAME {
  2095				memwrite = true
  2096				continue
  2097			}
  2098	
  2099			switch n.Class() {
  2100			default:
  2101				varwrite = true
  2102				continue
  2103	
  2104			case PAUTO, PPARAM, PPARAMOUT:
  2105				if n.Addrtaken() {
  2106					varwrite = true
  2107					continue
  2108				}
  2109	
  2110				if vmatch2(a, n) {
  2111					// Direct hit.
  2112					return true
  2113				}
  2114			}
  2115		}
  2116	
  2117		// The variables being written do not appear in n.
  2118		// However, n might refer to computed addresses
  2119		// that are being written.
  2120	
  2121		// If no computed addresses are affected by the writes, no aliasing.
  2122		if !memwrite && !varwrite {
  2123			return false
  2124		}
  2125	
  2126		// If n does not refer to computed addresses
  2127		// (that is, if n only refers to variables whose addresses
  2128		// have not been taken), no aliasing.
  2129		if varexpr(n) {
  2130			return false
  2131		}
  2132	
  2133		// Otherwise, both the writes and n refer to computed memory addresses.
  2134		// Assume that they might conflict.
  2135		return true
  2136	}
  2137	
  2138	// does the evaluation of n only refer to variables
  2139	// whose addresses have not been taken?
  2140	// (and no other memory)
  2141	func varexpr(n *Node) bool {
  2142		if n == nil {
  2143			return true
  2144		}
  2145	
  2146		switch n.Op {
  2147		case OLITERAL:
  2148			return true
  2149	
  2150		case ONAME:
  2151			switch n.Class() {
  2152			case PAUTO, PPARAM, PPARAMOUT:
  2153				if !n.Addrtaken() {
  2154					return true
  2155				}
  2156			}
  2157	
  2158			return false
  2159	
  2160		case OADD,
  2161			OSUB,
  2162			OOR,
  2163			OXOR,
  2164			OMUL,
  2165			ODIV,
  2166			OMOD,
  2167			OLSH,
  2168			ORSH,
  2169			OAND,
  2170			OANDNOT,
  2171			OPLUS,
  2172			ONEG,
  2173			OBITNOT,
  2174			OPAREN,
  2175			OANDAND,
  2176			OOROR,
  2177			OCONV,
  2178			OCONVNOP,
  2179			OCONVIFACE,
  2180			ODOTTYPE:
  2181			return varexpr(n.Left) && varexpr(n.Right)
  2182	
  2183		case ODOT: // but not ODOTPTR
  2184			// Should have been handled in aliased.
  2185			Fatalf("varexpr unexpected ODOT")
  2186		}
  2187	
  2188		// Be conservative.
  2189		return false
  2190	}
  2191	
  2192	// is the name l mentioned in r?
  2193	func vmatch2(l *Node, r *Node) bool {
  2194		if r == nil {
  2195			return false
  2196		}
  2197		switch r.Op {
  2198		// match each right given left
  2199		case ONAME:
  2200			return l == r
  2201	
  2202		case OLITERAL:
  2203			return false
  2204		}
  2205	
  2206		if vmatch2(l, r.Left) {
  2207			return true
  2208		}
  2209		if vmatch2(l, r.Right) {
  2210			return true
  2211		}
  2212		for _, n := range r.List.Slice() {
  2213			if vmatch2(l, n) {
  2214				return true
  2215			}
  2216		}
  2217		return false
  2218	}
  2219	
  2220	// is any name mentioned in l also mentioned in r?
  2221	// called by sinit.go
  2222	func vmatch1(l *Node, r *Node) bool {
  2223		// isolate all left sides
  2224		if l == nil || r == nil {
  2225			return false
  2226		}
  2227		switch l.Op {
  2228		case ONAME:
  2229			switch l.Class() {
  2230			case PPARAM, PAUTO:
  2231				break
  2232	
  2233			default:
  2234				// assignment to non-stack variable must be
  2235				// delayed if right has function calls.
  2236				if r.HasCall() {
  2237					return true
  2238				}
  2239			}
  2240	
  2241			return vmatch2(l, r)
  2242	
  2243		case OLITERAL:
  2244			return false
  2245		}
  2246	
  2247		if vmatch1(l.Left, r) {
  2248			return true
  2249		}
  2250		if vmatch1(l.Right, r) {
  2251			return true
  2252		}
  2253		for _, n := range l.List.Slice() {
  2254			if vmatch1(n, r) {
  2255				return true
  2256			}
  2257		}
  2258		return false
  2259	}
  2260	
  2261	// paramstoheap returns code to allocate memory for heap-escaped parameters
  2262	// and to copy non-result parameters' values from the stack.
  2263	func paramstoheap(params *types.Type) []*Node {
  2264		var nn []*Node
  2265		for _, t := range params.Fields().Slice() {
  2266			v := asNode(t.Nname)
  2267			if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
  2268				v = nil
  2269			}
  2270			if v == nil {
  2271				continue
  2272			}
  2273	
  2274			if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil {
  2275				nn = append(nn, walkstmt(nod(ODCL, v, nil)))
  2276				if stackcopy.Class() == PPARAM {
  2277					nn = append(nn, walkstmt(typecheck(nod(OAS, v, stackcopy), ctxStmt)))
  2278				}
  2279			}
  2280		}
  2281	
  2282		return nn
  2283	}
  2284	
  2285	// zeroResults zeros the return values at the start of the function.
  2286	// We need to do this very early in the function.  Defer might stop a
  2287	// panic and show the return values as they exist at the time of
  2288	// panic.  For precise stacks, the garbage collector assumes results
  2289	// are always live, so we need to zero them before any allocations,
  2290	// even allocations to move params/results to the heap.
  2291	// The generated code is added to Curfn's Enter list.
  2292	func zeroResults() {
  2293		for _, f := range Curfn.Type.Results().Fields().Slice() {
  2294			v := asNode(f.Nname)
  2295			if v != nil && v.Name.Param.Heapaddr != nil {
  2296				// The local which points to the return value is the
  2297				// thing that needs zeroing. This is already handled
  2298				// by a Needzero annotation in plive.go:livenessepilogue.
  2299				continue
  2300			}
  2301			if v.isParamHeapCopy() {
  2302				// TODO(josharian/khr): Investigate whether we can switch to "continue" here,
  2303				// and document more in either case.
  2304				// In the review of CL 114797, Keith wrote (roughly):
  2305				// I don't think the zeroing below matters.
  2306				// The stack return value will never be marked as live anywhere in the function.
  2307				// It is not written to until deferreturn returns.
  2308				v = v.Name.Param.Stackcopy
  2309			}
  2310			// Zero the stack location containing f.
  2311			Curfn.Func.Enter.Append(nodl(Curfn.Pos, OAS, v, nil))
  2312		}
  2313	}
  2314	
  2315	// returnsfromheap returns code to copy values for heap-escaped parameters
  2316	// back to the stack.
  2317	func returnsfromheap(params *types.Type) []*Node {
  2318		var nn []*Node
  2319		for _, t := range params.Fields().Slice() {
  2320			v := asNode(t.Nname)
  2321			if v == nil {
  2322				continue
  2323			}
  2324			if stackcopy := v.Name.Param.Stackcopy; stackcopy != nil && stackcopy.Class() == PPARAMOUT {
  2325				nn = append(nn, walkstmt(typecheck(nod(OAS, stackcopy, v), ctxStmt)))
  2326			}
  2327		}
  2328	
  2329		return nn
  2330	}
  2331	
  2332	// heapmoves generates code to handle migrating heap-escaped parameters
  2333	// between the stack and the heap. The generated code is added to Curfn's
  2334	// Enter and Exit lists.
  2335	func heapmoves() {
  2336		lno := lineno
  2337		lineno = Curfn.Pos
  2338		nn := paramstoheap(Curfn.Type.Recvs())
  2339		nn = append(nn, paramstoheap(Curfn.Type.Params())...)
  2340		nn = append(nn, paramstoheap(Curfn.Type.Results())...)
  2341		Curfn.Func.Enter.Append(nn...)
  2342		lineno = Curfn.Func.Endlineno
  2343		Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...)
  2344		lineno = lno
  2345	}
  2346	
  2347	func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
  2348		if fn.Type == nil || fn.Type.Etype != TFUNC {
  2349			Fatalf("mkcall %v %v", fn, fn.Type)
  2350		}
  2351	
  2352		n := fn.Type.NumParams()
  2353		if n != len(va) {
  2354			Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
  2355		}
  2356	
  2357		r := nod(OCALL, fn, nil)
  2358		r.List.Set(va)
  2359		if fn.Type.NumResults() > 0 {
  2360			r = typecheck(r, ctxExpr|ctxMultiOK)
  2361		} else {
  2362			r = typecheck(r, ctxStmt)
  2363		}
  2364		r = walkexpr(r, init)
  2365		r.Type = t
  2366		return r
  2367	}
  2368	
  2369	func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
  2370		return vmkcall(syslook(name), t, init, args)
  2371	}
  2372	
  2373	func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
  2374		return vmkcall(fn, t, init, args)
  2375	}
  2376	
  2377	func conv(n *Node, t *types.Type) *Node {
  2378		if types.Identical(n.Type, t) {
  2379			return n
  2380		}
  2381		n = nod(OCONV, n, nil)
  2382		n.Type = t
  2383		n = typecheck(n, ctxExpr)
  2384		return n
  2385	}
  2386	
  2387	// convnop converts node n to type t using the OCONVNOP op
  2388	// and typechecks the result with ctxExpr.
  2389	func convnop(n *Node, t *types.Type) *Node {
  2390		n = nod(OCONVNOP, n, nil)
  2391		n.Type = t
  2392		n = typecheck(n, ctxExpr)
  2393		return n
  2394	}
  2395	
  2396	// byteindex converts n, which is byte-sized, to a uint8.
  2397	// We cannot use conv, because we allow converting bool to uint8 here,
  2398	// which is forbidden in user code.
  2399	func byteindex(n *Node) *Node {
  2400		if types.Identical(n.Type, types.Types[TUINT8]) {
  2401			return n
  2402		}
  2403		n = nod(OCONV, n, nil)
  2404		n.Type = types.Types[TUINT8]
  2405		n.SetTypecheck(1)
  2406		return n
  2407	}
  2408	
  2409	func chanfn(name string, n int, t *types.Type) *Node {
  2410		if !t.IsChan() {
  2411			Fatalf("chanfn %v", t)
  2412		}
  2413		fn := syslook(name)
  2414		switch n {
  2415		default:
  2416			Fatalf("chanfn %d", n)
  2417		case 1:
  2418			fn = substArgTypes(fn, t.Elem())
  2419		case 2:
  2420			fn = substArgTypes(fn, t.Elem(), t.Elem())
  2421		}
  2422		return fn
  2423	}
  2424	
  2425	func mapfn(name string, t *types.Type) *Node {
  2426		if !t.IsMap() {
  2427			Fatalf("mapfn %v", t)
  2428		}
  2429		fn := syslook(name)
  2430		fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem())
  2431		return fn
  2432	}
  2433	
  2434	func mapfndel(name string, t *types.Type) *Node {
  2435		if !t.IsMap() {
  2436			Fatalf("mapfn %v", t)
  2437		}
  2438		fn := syslook(name)
  2439		fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key())
  2440		return fn
  2441	}
  2442	
  2443	const (
  2444		mapslow = iota
  2445		mapfast32
  2446		mapfast32ptr
  2447		mapfast64
  2448		mapfast64ptr
  2449		mapfaststr
  2450		nmapfast
  2451	)
  2452	
  2453	type mapnames [nmapfast]string
  2454	
  2455	func mkmapnames(base string, ptr string) mapnames {
  2456		return mapnames{base, base + "_fast32", base + "_fast32" + ptr, base + "_fast64", base + "_fast64" + ptr, base + "_faststr"}
  2457	}
  2458	
  2459	var mapaccess1 = mkmapnames("mapaccess1", "")
  2460	var mapaccess2 = mkmapnames("mapaccess2", "")
  2461	var mapassign = mkmapnames("mapassign", "ptr")
  2462	var mapdelete = mkmapnames("mapdelete", "")
  2463	
  2464	func mapfast(t *types.Type) int {
  2465		// Check runtime/map.go:maxElemSize before changing.
  2466		if t.Elem().Width > 128 {
  2467			return mapslow
  2468		}
  2469		switch algtype(t.Key()) {
  2470		case AMEM32:
  2471			if !t.Key().HasHeapPointer() {
  2472				return mapfast32
  2473			}
  2474			if Widthptr == 4 {
  2475				return mapfast32ptr
  2476			}
  2477			Fatalf("small pointer %v", t.Key())
  2478		case AMEM64:
  2479			if !t.Key().HasHeapPointer() {
  2480				return mapfast64
  2481			}
  2482			if Widthptr == 8 {
  2483				return mapfast64ptr
  2484			}
  2485			// Two-word object, at least one of which is a pointer.
  2486			// Use the slow path.
  2487		case ASTRING:
  2488			return mapfaststr
  2489		}
  2490		return mapslow
  2491	}
  2492	
  2493	func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
  2494		fn := syslook(name)
  2495		fn = substArgTypes(fn, l, r)
  2496		return fn
  2497	}
  2498	
  2499	func addstr(n *Node, init *Nodes) *Node {
  2500		// orderexpr rewrote OADDSTR to have a list of strings.
  2501		c := n.List.Len()
  2502	
  2503		if c < 2 {
  2504			Fatalf("addstr count %d too small", c)
  2505		}
  2506	
  2507		buf := nodnil()
  2508		if n.Esc == EscNone {
  2509			sz := int64(0)
  2510			for _, n1 := range n.List.Slice() {
  2511				if n1.Op == OLITERAL {
  2512					sz += int64(len(n1.Val().U.(string)))
  2513				}
  2514			}
  2515	
  2516			// Don't allocate the buffer if the result won't fit.
  2517			if sz < tmpstringbufsize {
  2518				// Create temporary buffer for result string on stack.
  2519				t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
  2520				buf = nod(OADDR, temp(t), nil)
  2521			}
  2522		}
  2523	
  2524		// build list of string arguments
  2525		args := []*Node{buf}
  2526		for _, n2 := range n.List.Slice() {
  2527			args = append(args, conv(n2, types.Types[TSTRING]))
  2528		}
  2529	
  2530		var fn string
  2531		if c <= 5 {
  2532			// small numbers of strings use direct runtime helpers.
  2533			// note: orderexpr knows this cutoff too.
  2534			fn = fmt.Sprintf("concatstring%d", c)
  2535		} else {
  2536			// large numbers of strings are passed to the runtime as a slice.
  2537			fn = "concatstrings"
  2538	
  2539			t := types.NewSlice(types.Types[TSTRING])
  2540			slice := nod(OCOMPLIT, nil, typenod(t))
  2541			if prealloc[n] != nil {
  2542				prealloc[slice] = prealloc[n]
  2543			}
  2544			slice.List.Set(args[1:]) // skip buf arg
  2545			args = []*Node{buf, slice}
  2546			slice.Esc = EscNone
  2547		}
  2548	
  2549		cat := syslook(fn)
  2550		r := nod(OCALL, cat, nil)
  2551		r.List.Set(args)
  2552		r = typecheck(r, ctxExpr)
  2553		r = walkexpr(r, init)
  2554		r.Type = n.Type
  2555	
  2556		return r
  2557	}
  2558	
  2559	func walkAppendArgs(n *Node, init *Nodes) {
  2560		walkexprlistsafe(n.List.Slice(), init)
  2561	
  2562		// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2563		// and n are name or literal, but those may index the slice we're
  2564		// modifying here. Fix explicitly.
  2565		ls := n.List.Slice()
  2566		for i1, n1 := range ls {
  2567			ls[i1] = cheapexpr(n1, init)
  2568		}
  2569	}
  2570	
  2571	// expand append(l1, l2...) to
  2572	//   init {
  2573	//     s := l1
  2574	//     n := len(s) + len(l2)
  2575	//     // Compare as uint so growslice can panic on overflow.
  2576	//     if uint(n) > uint(cap(s)) {
  2577	//       s = growslice(s, n)
  2578	//     }
  2579	//     s = s[:n]
  2580	//     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2581	//   }
  2582	//   s
  2583	//
  2584	// l2 is allowed to be a string.
  2585	func appendslice(n *Node, init *Nodes) *Node {
  2586		walkAppendArgs(n, init)
  2587	
  2588		l1 := n.List.First()
  2589		l2 := n.List.Second()
  2590	
  2591		var nodes Nodes
  2592	
  2593		// var s []T
  2594		s := temp(l1.Type)
  2595		nodes.Append(nod(OAS, s, l1)) // s = l1
  2596	
  2597		elemtype := s.Type.Elem()
  2598	
  2599		// n := len(s) + len(l2)
  2600		nn := temp(types.Types[TINT])
  2601		nodes.Append(nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
  2602	
  2603		// if uint(n) > uint(cap(s))
  2604		nif := nod(OIF, nil, nil)
  2605		nuint := conv(nn, types.Types[TUINT])
  2606		scapuint := conv(nod(OCAP, s, nil), types.Types[TUINT])
  2607		nif.Left = nod(OGT, nuint, scapuint)
  2608	
  2609		// instantiate growslice(typ *type, []any, int) []any
  2610		fn := syslook("growslice")
  2611		fn = substArgTypes(fn, elemtype, elemtype)
  2612	
  2613		// s = growslice(T, s, n)
  2614		nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(elemtype), s, nn)))
  2615		nodes.Append(nif)
  2616	
  2617		// s = s[:n]
  2618		nt := nod(OSLICE, s, nil)
  2619		nt.SetSliceBounds(nil, nn, nil)
  2620		nt.SetBounded(true)
  2621		nodes.Append(nod(OAS, s, nt))
  2622	
  2623		var ncopy *Node
  2624		if elemtype.HasHeapPointer() {
  2625			// copy(s[len(l1):], l2)
  2626			nptr1 := nod(OSLICE, s, nil)
  2627			nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2628	
  2629			nptr2 := l2
  2630	
  2631			Curfn.Func.setWBPos(n.Pos)
  2632	
  2633			// instantiate typedslicecopy(typ *type, dst any, src any) int
  2634			fn := syslook("typedslicecopy")
  2635			fn = substArgTypes(fn, l1.Type, l2.Type)
  2636			ncopy = mkcall1(fn, types.Types[TINT], &nodes, typename(elemtype), nptr1, nptr2)
  2637	
  2638		} else if instrumenting && !compiling_runtime {
  2639			// rely on runtime to instrument copy.
  2640			// copy(s[len(l1):], l2)
  2641			nptr1 := nod(OSLICE, s, nil)
  2642			nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
  2643	
  2644			nptr2 := l2
  2645	
  2646			if l2.Type.IsString() {
  2647				// instantiate func slicestringcopy(to any, fr any) int
  2648				fn := syslook("slicestringcopy")
  2649				fn = substArgTypes(fn, l1.Type, l2.Type)
  2650				ncopy = mkcall1(fn, types.Types[TINT], &nodes, nptr1, nptr2)
  2651			} else {
  2652				// instantiate func slicecopy(to any, fr any, wid uintptr) int
  2653				fn := syslook("slicecopy")
  2654				fn = substArgTypes(fn, l1.Type, l2.Type)
  2655				ncopy = mkcall1(fn, types.Types[TINT], &nodes, nptr1, nptr2, nodintconst(elemtype.Width))
  2656			}
  2657	
  2658		} else {
  2659			// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
  2660			nptr1 := nod(OINDEX, s, nod(OLEN, l1, nil))
  2661			nptr1.SetBounded(true)
  2662			nptr1 = nod(OADDR, nptr1, nil)
  2663	
  2664			nptr2 := nod(OSPTR, l2, nil)
  2665	
  2666			nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &nodes)
  2667			nwid = nod(OMUL, nwid, nodintconst(elemtype.Width))
  2668	
  2669			// instantiate func memmove(to *any, frm *any, length uintptr)
  2670			fn := syslook("memmove")
  2671			fn = substArgTypes(fn, elemtype, elemtype)
  2672			ncopy = mkcall1(fn, nil, &nodes, nptr1, nptr2, nwid)
  2673		}
  2674		ln := append(nodes.Slice(), ncopy)
  2675	
  2676		typecheckslice(ln, ctxStmt)
  2677		walkstmtlist(ln)
  2678		init.Append(ln...)
  2679		return s
  2680	}
  2681	
  2682	// isAppendOfMake reports whether n is of the form append(x , make([]T, y)...).
  2683	// isAppendOfMake assumes n has already been typechecked.
  2684	func isAppendOfMake(n *Node) bool {
  2685		if Debug['N'] != 0 || instrumenting {
  2686			return false
  2687		}
  2688	
  2689		if n.Typecheck() == 0 {
  2690			Fatalf("missing typecheck: %+v", n)
  2691		}
  2692	
  2693		if n.Op != OAPPEND || !n.IsDDD() || n.List.Len() != 2 {
  2694			return false
  2695		}
  2696	
  2697		second := n.List.Second()
  2698		if second.Op != OMAKESLICE || second.Right != nil {
  2699			return false
  2700		}
  2701	
  2702		// y must be either an integer constant or a variable of type int.
  2703		// typecheck checks that constant arguments to make are not negative and
  2704		// fit into an int.
  2705		// runtime.growslice uses int as type for the newcap argument.
  2706		// Constraining variables to be type int avoids the need for runtime checks
  2707		// that e.g. check if an int64 value fits into an int.
  2708		// TODO(moehrmann): support other integer types that always fit in an int
  2709		y := second.Left
  2710		if !Isconst(y, CTINT) && y.Type.Etype != TINT {
  2711			return false
  2712		}
  2713	
  2714		return true
  2715	}
  2716	
  2717	// extendslice rewrites append(l1, make([]T, l2)...) to
  2718	//   init {
  2719	//     if l2 < 0 {
  2720	//       panicmakeslicelen()
  2721	//     }
  2722	//     s := l1
  2723	//     n := len(s) + l2
  2724	//     // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
  2725	//     // cap is a positive int and n can become negative when len(s) + l2
  2726	//     // overflows int. Interpreting n when negative as uint makes it larger
  2727	//     // than cap(s). growslice will check the int n arg and panic if n is
  2728	//     // negative. This prevents the overflow from being undetected.
  2729	//     if uint(n) > uint(cap(s)) {
  2730	//       s = growslice(T, s, n)
  2731	//     }
  2732	//     s = s[:n]
  2733	//     lptr := &l1[0]
  2734	//     sptr := &s[0]
  2735	//     if lptr == sptr || !hasPointers(T) {
  2736	//       // growslice did not clear the whole underlying array (or did not get called)
  2737	//       hp := &s[len(l1)]
  2738	//       hn := l2 * sizeof(T)
  2739	//       memclr(hp, hn)
  2740	//     }
  2741	//   }
  2742	//   s
  2743	func extendslice(n *Node, init *Nodes) *Node {
  2744		// isAppendOfMake made sure l2 fits in an int.
  2745		l2 := conv(n.List.Second().Left, types.Types[TINT])
  2746		l2 = typecheck(l2, ctxExpr)
  2747		n.List.SetSecond(l2) // walkAppendArgs expects l2 in n.List.Second().
  2748	
  2749		walkAppendArgs(n, init)
  2750	
  2751		l1 := n.List.First()
  2752		l2 = n.List.Second() // re-read l2, as it may have been updated by walkAppendArgs
  2753	
  2754		var nodes []*Node
  2755	
  2756		// if l2 < 0
  2757		nifneg := nod(OIF, nod(OLT, l2, nodintconst(0)), nil)
  2758		nifneg.SetLikely(false)
  2759	
  2760		// panicmakeslicelen()
  2761		nifneg.Nbody.Set1(mkcall("panicmakeslicelen", nil, init))
  2762		nodes = append(nodes, nifneg)
  2763	
  2764		// s := l1
  2765		s := temp(l1.Type)
  2766		nodes = append(nodes, nod(OAS, s, l1))
  2767	
  2768		elemtype := s.Type.Elem()
  2769	
  2770		// n := len(s) + l2
  2771		nn := temp(types.Types[TINT])
  2772		nodes = append(nodes, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), l2)))
  2773	
  2774		// if uint(n) > uint(cap(s))
  2775		nuint := conv(nn, types.Types[TUINT])
  2776		capuint := conv(nod(OCAP, s, nil), types.Types[TUINT])
  2777		nif := nod(OIF, nod(OGT, nuint, capuint), nil)
  2778	
  2779		// instantiate growslice(typ *type, old []any, newcap int) []any
  2780		fn := syslook("growslice")
  2781		fn = substArgTypes(fn, elemtype, elemtype)
  2782	
  2783		// s = growslice(T, s, n)
  2784		nif.Nbody.Set1(nod(OAS, s, mkcall1(fn, s.Type, &nif.Ninit, typename(elemtype), s, nn)))
  2785		nodes = append(nodes, nif)
  2786	
  2787		// s = s[:n]
  2788		nt := nod(OSLICE, s, nil)
  2789		nt.SetSliceBounds(nil, nn, nil)
  2790		nt.SetBounded(true)
  2791		nodes = append(nodes, nod(OAS, s, nt))
  2792	
  2793		// lptr := &l1[0]
  2794		l1ptr := temp(l1.Type.Elem().PtrTo())
  2795		tmp := nod(OSPTR, l1, nil)
  2796		nodes = append(nodes, nod(OAS, l1ptr, tmp))
  2797	
  2798		// sptr := &s[0]
  2799		sptr := temp(elemtype.PtrTo())
  2800		tmp = nod(OSPTR, s, nil)
  2801		nodes = append(nodes, nod(OAS, sptr, tmp))
  2802	
  2803		// hp := &s[len(l1)]
  2804		hp := nod(OINDEX, s, nod(OLEN, l1, nil))
  2805		hp.SetBounded(true)
  2806		hp = nod(OADDR, hp, nil)
  2807		hp = convnop(hp, types.Types[TUNSAFEPTR])
  2808	
  2809		// hn := l2 * sizeof(elem(s))
  2810		hn := nod(OMUL, l2, nodintconst(elemtype.Width))
  2811		hn = conv(hn, types.Types[TUINTPTR])
  2812	
  2813		clrname := "memclrNoHeapPointers"
  2814		hasPointers := types.Haspointers(elemtype)
  2815		if hasPointers {
  2816			clrname = "memclrHasPointers"
  2817			Curfn.Func.setWBPos(n.Pos)
  2818		}
  2819	
  2820		var clr Nodes
  2821		clrfn := mkcall(clrname, nil, &clr, hp, hn)
  2822		clr.Append(clrfn)
  2823	
  2824		if hasPointers {
  2825			// if l1ptr == sptr
  2826			nifclr := nod(OIF, nod(OEQ, l1ptr, sptr), nil)
  2827			nifclr.Nbody = clr
  2828			nodes = append(nodes, nifclr)
  2829		} else {
  2830			nodes = append(nodes, clr.Slice()...)
  2831		}
  2832	
  2833		typecheckslice(nodes, ctxStmt)
  2834		walkstmtlist(nodes)
  2835		init.Append(nodes...)
  2836		return s
  2837	}
  2838	
  2839	// Rewrite append(src, x, y, z) so that any side effects in
  2840	// x, y, z (including runtime panics) are evaluated in
  2841	// initialization statements before the append.
  2842	// For normal code generation, stop there and leave the
  2843	// rest to cgen_append.
  2844	//
  2845	// For race detector, expand append(src, a [, b]* ) to
  2846	//
  2847	//   init {
  2848	//     s := src
  2849	//     const argc = len(args) - 1
  2850	//     if cap(s) - len(s) < argc {
  2851	//	    s = growslice(s, len(s)+argc)
  2852	//     }
  2853	//     n := len(s)
  2854	//     s = s[:n+argc]
  2855	//     s[n] = a
  2856	//     s[n+1] = b
  2857	//     ...
  2858	//   }
  2859	//   s
  2860	func walkappend(n *Node, init *Nodes, dst *Node) *Node {
  2861		if !samesafeexpr(dst, n.List.First()) {
  2862			n.List.SetFirst(safeexpr(n.List.First(), init))
  2863			n.List.SetFirst(walkexpr(n.List.First(), init))
  2864		}
  2865		walkexprlistsafe(n.List.Slice()[1:], init)
  2866	
  2867		nsrc := n.List.First()
  2868	
  2869		// walkexprlistsafe will leave OINDEX (s[n]) alone if both s
  2870		// and n are name or literal, but those may index the slice we're
  2871		// modifying here. Fix explicitly.
  2872		// Using cheapexpr also makes sure that the evaluation
  2873		// of all arguments (and especially any panics) happen
  2874		// before we begin to modify the slice in a visible way.
  2875		ls := n.List.Slice()[1:]
  2876		for i, n := range ls {
  2877			n = cheapexpr(n, init)
  2878			if !types.Identical(n.Type, nsrc.Type.Elem()) {
  2879				n = assignconv(n, nsrc.Type.Elem(), "append")
  2880				n = walkexpr(n, init)
  2881			}
  2882			ls[i] = n
  2883		}
  2884	
  2885		argc := n.List.Len() - 1
  2886		if argc < 1 {
  2887			return nsrc
  2888		}
  2889	
  2890		// General case, with no function calls left as arguments.
  2891		// Leave for gen, except that instrumentation requires old form.
  2892		if !instrumenting || compiling_runtime {
  2893			return n
  2894		}
  2895	
  2896		var l []*Node
  2897	
  2898		ns := temp(nsrc.Type)
  2899		l = append(l, nod(OAS, ns, nsrc)) // s = src
  2900	
  2901		na := nodintconst(int64(argc)) // const argc
  2902		nx := nod(OIF, nil, nil)       // if cap(s) - len(s) < argc
  2903		nx.Left = nod(OLT, nod(OSUB, nod(OCAP, ns, nil), nod(OLEN, ns, nil)), na)
  2904	
  2905		fn := syslook("growslice") //   growslice(<type>, old []T, mincap int) (ret []T)
  2906		fn = substArgTypes(fn, ns.Type.Elem(), ns.Type.Elem())
  2907	
  2908		nx.Nbody.Set1(nod(OAS, ns,
  2909			mkcall1(fn, ns.Type, &nx.Ninit, typename(ns.Type.Elem()), ns,
  2910				nod(OADD, nod(OLEN, ns, nil), na))))
  2911	
  2912		l = append(l, nx)
  2913	
  2914		nn := temp(types.Types[TINT])
  2915		l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
  2916	
  2917		nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
  2918		nx.SetSliceBounds(nil, nod(OADD, nn, na), nil)
  2919		nx.SetBounded(true)
  2920		l = append(l, nod(OAS, ns, nx)) // s = s[:n+argc]
  2921	
  2922		ls = n.List.Slice()[1:]
  2923		for i, n := range ls {
  2924			nx = nod(OINDEX, ns, nn) // s[n] ...
  2925			nx.SetBounded(true)
  2926			l = append(l, nod(OAS, nx, n)) // s[n] = arg
  2927			if i+1 < len(ls) {
  2928				l = append(l, nod(OAS, nn, nod(OADD, nn, nodintconst(1)))) // n = n + 1
  2929			}
  2930		}
  2931	
  2932		typecheckslice(l, ctxStmt)
  2933		walkstmtlist(l)
  2934		init.Append(l...)
  2935		return ns
  2936	}
  2937	
  2938	// Lower copy(a, b) to a memmove call or a runtime call.
  2939	//
  2940	// init {
  2941	//   n := len(a)
  2942	//   if n > len(b) { n = len(b) }
  2943	//   if a.ptr != b.ptr { memmove(a.ptr, b.ptr, n*sizeof(elem(a))) }
  2944	// }
  2945	// n;
  2946	//
  2947	// Also works if b is a string.
  2948	//
  2949	func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
  2950		if n.Left.Type.Elem().HasHeapPointer() {
  2951			Curfn.Func.setWBPos(n.Pos)
  2952			fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
  2953			return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
  2954		}
  2955	
  2956		if runtimecall {
  2957			if n.Right.Type.IsString() {
  2958				fn := syslook("slicestringcopy")
  2959				fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  2960				return mkcall1(fn, n.Type, init, n.Left, n.Right)
  2961			}
  2962	
  2963			fn := syslook("slicecopy")
  2964			fn = substArgTypes(fn, n.Left.Type, n.Right.Type)
  2965			return mkcall1(fn, n.Type, init, n.Left, n.Right, nodintconst(n.Left.Type.Elem().Width))
  2966		}
  2967	
  2968		n.Left = walkexpr(n.Left, init)
  2969		n.Right = walkexpr(n.Right, init)
  2970		nl := temp(n.Left.Type)
  2971		nr := temp(n.Right.Type)
  2972		var l []*Node
  2973		l = append(l, nod(OAS, nl, n.Left))
  2974		l = append(l, nod(OAS, nr, n.Right))
  2975	
  2976		nfrm := nod(OSPTR, nr, nil)
  2977		nto := nod(OSPTR, nl, nil)
  2978	
  2979		nlen := temp(types.Types[TINT])
  2980	
  2981		// n = len(to)
  2982		l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
  2983	
  2984		// if n > len(frm) { n = len(frm) }
  2985		nif := nod(OIF, nil, nil)
  2986	
  2987		nif.Left = nod(OGT, nlen, nod(OLEN, nr, nil))
  2988		nif.Nbody.Append(nod(OAS, nlen, nod(OLEN, nr, nil)))
  2989		l = append(l, nif)
  2990	
  2991		// if to.ptr != frm.ptr { memmove( ... ) }
  2992		ne := nod(OIF, nod(ONE, nto, nfrm), nil)
  2993		ne.SetLikely(true)
  2994		l = append(l, ne)
  2995	
  2996		fn := syslook("memmove")
  2997		fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
  2998		nwid := temp(types.Types[TUINTPTR])
  2999		setwid := nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR]))
  3000		ne.Nbody.Append(setwid)
  3001		nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
  3002		call := mkcall1(fn, nil, init, nto, nfrm, nwid)
  3003		ne.Nbody.Append(call)
  3004	
  3005		typecheckslice(l, ctxStmt)
  3006		walkstmtlist(l)
  3007		init.Append(l...)
  3008		return nlen
  3009	}
  3010	
  3011	func eqfor(t *types.Type) (n *Node, needsize bool) {
  3012		// Should only arrive here with large memory or
  3013		// a struct/array containing a non-memory field/element.
  3014		// Small memory is handled inline, and single non-memory
  3015		// is handled by walkcompare.
  3016		switch a, _ := algtype1(t); a {
  3017		case AMEM:
  3018			n := syslook("memequal")
  3019			n = substArgTypes(n, t, t)
  3020			return n, true
  3021		case ASPECIAL:
  3022			sym := typesymprefix(".eq", t)
  3023			n := newname(sym)
  3024			n.SetClass(PFUNC)
  3025			n.Sym.SetFunc(true)
  3026			n.Type = functype(nil, []*Node{
  3027				anonfield(types.NewPtr(t)),
  3028				anonfield(types.NewPtr(t)),
  3029			}, []*Node{
  3030				anonfield(types.Types[TBOOL]),
  3031			})
  3032			return n, false
  3033		}
  3034		Fatalf("eqfor %v", t)
  3035		return nil, false
  3036	}
  3037	
  3038	// The result of walkcompare MUST be assigned back to n, e.g.
  3039	// 	n.Left = walkcompare(n.Left, init)
  3040	func walkcompare(n *Node, init *Nodes) *Node {
  3041		if n.Left.Type.IsInterface() && n.Right.Type.IsInterface() && n.Left.Op != OLITERAL && n.Right.Op != OLITERAL {
  3042			return walkcompareInterface(n, init)
  3043		}
  3044	
  3045		if n.Left.Type.IsString() && n.Right.Type.IsString() {
  3046			return walkcompareString(n, init)
  3047		}
  3048	
  3049		n.Left = walkexpr(n.Left, init)
  3050		n.Right = walkexpr(n.Right, init)
  3051	
  3052		// Given interface value l and concrete value r, rewrite
  3053		//   l == r
  3054		// into types-equal && data-equal.
  3055		// This is efficient, avoids allocations, and avoids runtime calls.
  3056		var l, r *Node
  3057		if n.Left.Type.IsInterface() && !n.Right.Type.IsInterface() {
  3058			l = n.Left
  3059			r = n.Right
  3060		} else if !n.Left.Type.IsInterface() && n.Right.Type.IsInterface() {
  3061			l = n.Right
  3062			r = n.Left
  3063		}
  3064	
  3065		if l != nil {
  3066			// Handle both == and !=.
  3067			eq := n.Op
  3068			andor := OOROR
  3069			if eq == OEQ {
  3070				andor = OANDAND
  3071			}
  3072			// Check for types equal.
  3073			// For empty interface, this is:
  3074			//   l.tab == type(r)
  3075			// For non-empty interface, this is:
  3076			//   l.tab != nil && l.tab._type == type(r)
  3077			var eqtype *Node
  3078			tab := nod(OITAB, l, nil)
  3079			rtyp := typename(r.Type)
  3080			if l.Type.IsEmptyInterface() {
  3081				tab.Type = types.NewPtr(types.Types[TUINT8])
  3082				tab.SetTypecheck(1)
  3083				eqtype = nod(eq, tab, rtyp)
  3084			} else {
  3085				nonnil := nod(brcom(eq), nodnil(), tab)
  3086				match := nod(eq, itabType(tab), rtyp)
  3087				eqtype = nod(andor, nonnil, match)
  3088			}
  3089			// Check for data equal.
  3090			eqdata := nod(eq, ifaceData(l, r.Type), r)
  3091			// Put it all together.
  3092			expr := nod(andor, eqtype, eqdata)
  3093			n = finishcompare(n, expr, init)
  3094			return n
  3095		}
  3096	
  3097		// Must be comparison of array or struct.
  3098		// Otherwise back end handles it.
  3099		// While we're here, decide whether to
  3100		// inline or call an eq alg.
  3101		t := n.Left.Type
  3102		var inline bool
  3103	
  3104		maxcmpsize := int64(4)
  3105		unalignedLoad := canMergeLoads()
  3106		if unalignedLoad {
  3107			// Keep this low enough to generate less code than a function call.
  3108			maxcmpsize = 2 * int64(thearch.LinkArch.RegSize)
  3109		}
  3110	
  3111		switch t.Etype {
  3112		default:
  3113			return n
  3114		case TARRAY:
  3115			// We can compare several elements at once with 2/4/8 byte integer compares
  3116			inline = t.NumElem() <= 1 || (issimple[t.Elem().Etype] && (t.NumElem() <= 4 || t.Elem().Width*t.NumElem() <= maxcmpsize))
  3117		case TSTRUCT:
  3118			inline = t.NumComponents(types.IgnoreBlankFields) <= 4
  3119		}
  3120	
  3121		cmpl := n.Left
  3122		for cmpl != nil && cmpl.Op == OCONVNOP {
  3123			cmpl = cmpl.Left
  3124		}
  3125		cmpr := n.Right
  3126		for cmpr != nil && cmpr.Op == OCONVNOP {
  3127			cmpr = cmpr.Left
  3128		}
  3129	
  3130		// Chose not to inline. Call equality function directly.
  3131		if !inline {
  3132			if isvaluelit(cmpl) {
  3133				var_ := temp(cmpl.Type)
  3134				anylit(cmpl, var_, init)
  3135				cmpl = var_
  3136			}
  3137			if isvaluelit(cmpr) {
  3138				var_ := temp(cmpr.Type)
  3139				anylit(cmpr, var_, init)
  3140				cmpr = var_
  3141			}
  3142			if !islvalue(cmpl) || !islvalue(cmpr) {
  3143				Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr)
  3144			}
  3145	
  3146			// eq algs take pointers
  3147			pl := temp(types.NewPtr(t))
  3148			al := nod(OAS, pl, nod(OADDR, cmpl, nil))
  3149			al = typecheck(al, ctxStmt)
  3150			init.Append(al)
  3151	
  3152			pr := temp(types.NewPtr(t))
  3153			ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
  3154			ar = typecheck(ar, ctxStmt)
  3155			init.Append(ar)
  3156	
  3157			fn, needsize := eqfor(t)
  3158			call := nod(OCALL, fn, nil)
  3159			call.List.Append(pl)
  3160			call.List.Append(pr)
  3161			if needsize {
  3162				call.List.Append(nodintconst(t.Width))
  3163			}
  3164			res := call
  3165			if n.Op != OEQ {
  3166				res = nod(ONOT, res, nil)
  3167			}
  3168			n = finishcompare(n, res, init)
  3169			return n
  3170		}
  3171	
  3172		// inline: build boolean expression comparing element by element
  3173		andor := OANDAND
  3174		if n.Op == ONE {
  3175			andor = OOROR
  3176		}
  3177		var expr *Node
  3178		compare := func(el, er *Node) {
  3179			a := nod(n.Op, el, er)
  3180			if expr == nil {
  3181				expr = a
  3182			} else {
  3183				expr = nod(andor, expr, a)
  3184			}
  3185		}
  3186		cmpl = safeexpr(cmpl, init)
  3187		cmpr = safeexpr(cmpr, init)
  3188		if t.IsStruct() {
  3189			for _, f := range t.Fields().Slice() {
  3190				sym := f.Sym
  3191				if sym.IsBlank() {
  3192					continue
  3193				}
  3194				compare(
  3195					nodSym(OXDOT, cmpl, sym),
  3196					nodSym(OXDOT, cmpr, sym),
  3197				)
  3198			}
  3199		} else {
  3200			step := int64(1)
  3201			remains := t.NumElem() * t.Elem().Width
  3202			combine64bit := unalignedLoad && Widthreg == 8 && t.Elem().Width <= 4 && t.Elem().IsInteger()
  3203			combine32bit := unalignedLoad && t.Elem().Width <= 2 && t.Elem().IsInteger()
  3204			combine16bit := unalignedLoad && t.Elem().Width == 1 && t.Elem().IsInteger()
  3205			for i := int64(0); remains > 0; {
  3206				var convType *types.Type
  3207				switch {
  3208				case remains >= 8 && combine64bit:
  3209					convType = types.Types[TINT64]
  3210					step = 8 / t.Elem().Width
  3211				case remains >= 4 && combine32bit:
  3212					convType = types.Types[TUINT32]
  3213					step = 4 / t.Elem().Width
  3214				case remains >= 2 && combine16bit:
  3215					convType = types.Types[TUINT16]
  3216					step = 2 / t.Elem().Width
  3217				default:
  3218					step = 1
  3219				}
  3220				if step == 1 {
  3221					compare(
  3222						nod(OINDEX, cmpl, nodintconst(i)),
  3223						nod(OINDEX, cmpr, nodintconst(i)),
  3224					)
  3225					i++
  3226					remains -= t.Elem().Width
  3227				} else {
  3228					elemType := t.Elem().ToUnsigned()
  3229					cmplw := nod(OINDEX, cmpl, nodintconst(i))
  3230					cmplw = conv(cmplw, elemType) // convert to unsigned
  3231					cmplw = conv(cmplw, convType) // widen
  3232					cmprw := nod(OINDEX, cmpr, nodintconst(i))
  3233					cmprw = conv(cmprw, elemType)
  3234					cmprw = conv(cmprw, convType)
  3235					// For code like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3236					// ssa will generate a single large load.
  3237					for offset := int64(1); offset < step; offset++ {
  3238						lb := nod(OINDEX, cmpl, nodintconst(i+offset))
  3239						lb = conv(lb, elemType)
  3240						lb = conv(lb, convType)
  3241						lb = nod(OLSH, lb, nodintconst(8*t.Elem().Width*offset))
  3242						cmplw = nod(OOR, cmplw, lb)
  3243						rb := nod(OINDEX, cmpr, nodintconst(i+offset))
  3244						rb = conv(rb, elemType)
  3245						rb = conv(rb, convType)
  3246						rb = nod(OLSH, rb, nodintconst(8*t.Elem().Width*offset))
  3247						cmprw = nod(OOR, cmprw, rb)
  3248					}
  3249					compare(cmplw, cmprw)
  3250					i += step
  3251					remains -= step * t.Elem().Width
  3252				}
  3253			}
  3254		}
  3255		if expr == nil {
  3256			expr = nodbool(n.Op == OEQ)
  3257			// We still need to use cmpl and cmpr, in case they contain
  3258			// an expression which might panic. See issue 23837.
  3259			t := temp(cmpl.Type)
  3260			a1 := nod(OAS, t, cmpl)
  3261			a1 = typecheck(a1, ctxStmt)
  3262			a2 := nod(OAS, t, cmpr)
  3263			a2 = typecheck(a2, ctxStmt)
  3264			init.Append(a1, a2)
  3265		}
  3266		n = finishcompare(n, expr, init)
  3267		return n
  3268	}
  3269	
  3270	func walkcompareInterface(n *Node, init *Nodes) *Node {
  3271		// ifaceeq(i1 any-1, i2 any-2) (ret bool);
  3272		if !types.Identical(n.Left.Type, n.Right.Type) {
  3273			Fatalf("ifaceeq %v %v %v", n.Op, n.Left.Type, n.Right.Type)
  3274		}
  3275		var fn *Node
  3276		if n.Left.Type.IsEmptyInterface() {
  3277			fn = syslook("efaceeq")
  3278		} else {
  3279			fn = syslook("ifaceeq")
  3280		}
  3281	
  3282		n.Right = cheapexpr(n.Right, init)
  3283		n.Left = cheapexpr(n.Left, init)
  3284		lt := nod(OITAB, n.Left, nil)
  3285		rt := nod(OITAB, n.Right, nil)
  3286		ld := nod(OIDATA, n.Left, nil)
  3287		rd := nod(OIDATA, n.Right, nil)
  3288		ld.Type = types.Types[TUNSAFEPTR]
  3289		rd.Type = types.Types[TUNSAFEPTR]
  3290		ld.SetTypecheck(1)
  3291		rd.SetTypecheck(1)
  3292		call := mkcall1(fn, n.Type, init, lt, ld, rd)
  3293	
  3294		// Check itable/type before full compare.
  3295		// Note: short-circuited because order matters.
  3296		var cmp *Node
  3297		if n.Op == OEQ {
  3298			cmp = nod(OANDAND, nod(OEQ, lt, rt), call)
  3299		} else {
  3300			cmp = nod(OOROR, nod(ONE, lt, rt), nod(ONOT, call, nil))
  3301		}
  3302		return finishcompare(n, cmp, init)
  3303	}
  3304	
  3305	func walkcompareString(n *Node, init *Nodes) *Node {
  3306		// Rewrite comparisons to short constant strings as length+byte-wise comparisons.
  3307		var cs, ncs *Node // const string, non-const string
  3308		switch {
  3309		case Isconst(n.Left, CTSTR) && Isconst(n.Right, CTSTR):
  3310			// ignore; will be constant evaluated
  3311		case Isconst(n.Left, CTSTR):
  3312			cs = n.Left
  3313			ncs = n.Right
  3314		case Isconst(n.Right, CTSTR):
  3315			cs = n.Right
  3316			ncs = n.Left
  3317		}
  3318		if cs != nil {
  3319			cmp := n.Op
  3320			// Our comparison below assumes that the non-constant string
  3321			// is on the left hand side, so rewrite "" cmp x to x cmp "".
  3322			// See issue 24817.
  3323			if Isconst(n.Left, CTSTR) {
  3324				cmp = brrev(cmp)
  3325			}
  3326	
  3327			// maxRewriteLen was chosen empirically.
  3328			// It is the value that minimizes cmd/go file size
  3329			// across most architectures.
  3330			// See the commit description for CL 26758 for details.
  3331			maxRewriteLen := 6
  3332			// Some architectures can load unaligned byte sequence as 1 word.
  3333			// So we can cover longer strings with the same amount of code.
  3334			canCombineLoads := canMergeLoads()
  3335			combine64bit := false
  3336			if canCombineLoads {
  3337				// Keep this low enough to generate less code than a function call.
  3338				maxRewriteLen = 2 * thearch.LinkArch.RegSize
  3339				combine64bit = thearch.LinkArch.RegSize >= 8
  3340			}
  3341	
  3342			var and Op
  3343			switch cmp {
  3344			case OEQ:
  3345				and = OANDAND
  3346			case ONE:
  3347				and = OOROR
  3348			default:
  3349				// Don't do byte-wise comparisons for <, <=, etc.
  3350				// They're fairly complicated.
  3351				// Length-only checks are ok, though.
  3352				maxRewriteLen = 0
  3353			}
  3354			if s := cs.Val().U.(string); len(s) <= maxRewriteLen {
  3355				if len(s) > 0 {
  3356					ncs = safeexpr(ncs, init)
  3357				}
  3358				r := nod(cmp, nod(OLEN, ncs, nil), nodintconst(int64(len(s))))
  3359				remains := len(s)
  3360				for i := 0; remains > 0; {
  3361					if remains == 1 || !canCombineLoads {
  3362						cb := nodintconst(int64(s[i]))
  3363						ncb := nod(OINDEX, ncs, nodintconst(int64(i)))
  3364						r = nod(and, r, nod(cmp, ncb, cb))
  3365						remains--
  3366						i++
  3367						continue
  3368					}
  3369					var step int
  3370					var convType *types.Type
  3371					switch {
  3372					case remains >= 8 && combine64bit:
  3373						convType = types.Types[TINT64]
  3374						step = 8
  3375					case remains >= 4:
  3376						convType = types.Types[TUINT32]
  3377						step = 4
  3378					case remains >= 2:
  3379						convType = types.Types[TUINT16]
  3380						step = 2
  3381					}
  3382					ncsubstr := nod(OINDEX, ncs, nodintconst(int64(i)))
  3383					ncsubstr = conv(ncsubstr, convType)
  3384					csubstr := int64(s[i])
  3385					// Calculate large constant from bytes as sequence of shifts and ors.
  3386					// Like this:  uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
  3387					// ssa will combine this into a single large load.
  3388					for offset := 1; offset < step; offset++ {
  3389						b := nod(OINDEX, ncs, nodintconst(int64(i+offset)))
  3390						b = conv(b, convType)
  3391						b = nod(OLSH, b, nodintconst(int64(8*offset)))
  3392						ncsubstr = nod(OOR, ncsubstr, b)
  3393						csubstr |= int64(s[i+offset]) << uint8(8*offset)
  3394					}
  3395					csubstrPart := nodintconst(csubstr)
  3396					// Compare "step" bytes as once
  3397					r = nod(and, r, nod(cmp, csubstrPart, ncsubstr))
  3398					remains -= step
  3399					i += step
  3400				}
  3401				return finishcompare(n, r, init)
  3402			}
  3403		}
  3404	
  3405		var r *Node
  3406		if n.Op == OEQ || n.Op == ONE {
  3407			// prepare for rewrite below
  3408			n.Left = cheapexpr(n.Left, init)
  3409			n.Right = cheapexpr(n.Right, init)
  3410	
  3411			lstr := conv(n.Left, types.Types[TSTRING])
  3412			rstr := conv(n.Right, types.Types[TSTRING])
  3413			lptr := nod(OSPTR, lstr, nil)
  3414			rptr := nod(OSPTR, rstr, nil)
  3415			llen := conv(nod(OLEN, lstr, nil), types.Types[TUINTPTR])
  3416			rlen := conv(nod(OLEN, rstr, nil), types.Types[TUINTPTR])
  3417	
  3418			fn := syslook("memequal")
  3419			fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8])
  3420			r = mkcall1(fn, types.Types[TBOOL], init, lptr, rptr, llen)
  3421	
  3422			// quick check of len before full compare for == or !=.
  3423			// memequal then tests equality up to length len.
  3424			if n.Op == OEQ {
  3425				// len(left) == len(right) && memequal(left, right, len)
  3426				r = nod(OANDAND, nod(OEQ, llen, rlen), r)
  3427			} else {
  3428				// len(left) != len(right) || !memequal(left, right, len)
  3429				r = nod(ONOT, r, nil)
  3430				r = nod(OOROR, nod(ONE, llen, rlen), r)
  3431			}
  3432		} else {
  3433			// sys_cmpstring(s1, s2) :: 0
  3434			r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
  3435			r = nod(n.Op, r, nodintconst(0))
  3436		}
  3437	
  3438		return finishcompare(n, r, init)
  3439	}
  3440	
  3441	// The result of finishcompare MUST be assigned back to n, e.g.
  3442	// 	n.Left = finishcompare(n.Left, x, r, init)
  3443	func finishcompare(n, r *Node, init *Nodes) *Node {
  3444		r = typecheck(r, ctxExpr)
  3445		r = conv(r, n.Type)
  3446		r = walkexpr(r, init)
  3447		return r
  3448	}
  3449	
  3450	// isIntOrdering reports whether n is a <, ≤, >, or ≥ ordering between integers.
  3451	func (n *Node) isIntOrdering() bool {
  3452		switch n.Op {
  3453		case OLE, OLT, OGE, OGT:
  3454		default:
  3455			return false
  3456		}
  3457		return n.Left.Type.IsInteger() && n.Right.Type.IsInteger()
  3458	}
  3459	
  3460	// walkinrange optimizes integer-in-range checks, such as 4 <= x && x < 10.
  3461	// n must be an OANDAND or OOROR node.
  3462	// The result of walkinrange MUST be assigned back to n, e.g.
  3463	// 	n.Left = walkinrange(n.Left)
  3464	func walkinrange(n *Node, init *Nodes) *Node {
  3465		// We are looking for something equivalent to a opl b OP b opr c, where:
  3466		// * a, b, and c have integer type
  3467		// * b is side-effect-free
  3468		// * opl and opr are each < or ≤
  3469		// * OP is &&
  3470		l := n.Left
  3471		r := n.Right
  3472		if !l.isIntOrdering() || !r.isIntOrdering() {
  3473			return n
  3474		}
  3475	
  3476		// Find b, if it exists, and rename appropriately.
  3477		// Input is: l.Left l.Op l.Right ANDAND/OROR r.Left r.Op r.Right
  3478		// Output is: a opl b(==x) ANDAND/OROR b(==x) opr c
  3479		a, opl, b := l.Left, l.Op, l.Right
  3480		x, opr, c := r.Left, r.Op, r.Right
  3481		for i := 0; ; i++ {
  3482			if samesafeexpr(b, x) {
  3483				break
  3484			}
  3485			if i == 3 {
  3486				// Tried all permutations and couldn't find an appropriate b == x.
  3487				return n
  3488			}
  3489			if i&1 == 0 {
  3490				a, opl, b = b, brrev(opl), a
  3491			} else {
  3492				x, opr, c = c, brrev(opr), x
  3493			}
  3494		}
  3495	
  3496		// If n.Op is ||, apply de Morgan.
  3497		// Negate the internal ops now; we'll negate the top level op at the end.
  3498		// Henceforth assume &&.
  3499		negateResult := n.Op == OOROR
  3500		if negateResult {
  3501			opl = brcom(opl)
  3502			opr = brcom(opr)
  3503		}
  3504	
  3505		cmpdir := func(o Op) int {
  3506			switch o {
  3507			case OLE, OLT:
  3508				return -1
  3509			case OGE, OGT:
  3510				return +1
  3511			}
  3512			Fatalf("walkinrange cmpdir %v", o)
  3513			return 0
  3514		}
  3515		if cmpdir(opl) != cmpdir(opr) {
  3516			// Not a range check; something like b < a && b < c.
  3517			return n
  3518		}
  3519	
  3520		switch opl {
  3521		case OGE, OGT:
  3522			// We have something like a > b && b ≥ c.
  3523			// Switch and reverse ops and rename constants,
  3524			// to make it look like a ≤ b && b < c.
  3525			a, c = c, a
  3526			opl, opr = brrev(opr), brrev(opl)
  3527		}
  3528	
  3529		// We must ensure that c-a is non-negative.
  3530		// For now, require a and c to be constants.
  3531		// In the future, we could also support a == 0 and c == len/cap(...).
  3532		// Unfortunately, by this point, most len/cap expressions have been
  3533		// stored into temporary variables.
  3534		if !Isconst(a, CTINT) || !Isconst(c, CTINT) {
  3535			return n
  3536		}
  3537	
  3538		// Ensure that Int64() does not overflow on a and c (it'll happen
  3539		// for any const above 2**63; see issue #27143).
  3540		if !a.CanInt64() || !c.CanInt64() {
  3541			return n
  3542		}
  3543	
  3544		if opl == OLT {
  3545			// We have a < b && ...
  3546			// We need a ≤ b && ... to safely use unsigned comparison tricks.
  3547			// If a is not the maximum constant for b's type,
  3548			// we can increment a and switch to ≤.
  3549			if a.Int64() >= maxintval[b.Type.Etype].Int64() {
  3550				return n
  3551			}
  3552			a = nodintconst(a.Int64() + 1)
  3553			opl = OLE
  3554		}
  3555	
  3556		bound := c.Int64() - a.Int64()
  3557		if bound < 0 {
  3558			// Bad news. Something like 5 <= x && x < 3.
  3559			// Rare in practice, and we still need to generate side-effects,
  3560			// so just leave it alone.
  3561			return n
  3562		}
  3563	
  3564		// We have a ≤ b && b < c (or a ≤ b && b ≤ c).
  3565		// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
  3566		// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
  3567		// which is equivalent to uint(b-a) < uint(c-a).
  3568		ut := b.Type.ToUnsigned()
  3569		lhs := conv(nod(OSUB, b, a), ut)
  3570		rhs := nodintconst(bound)
  3571		if negateResult {
  3572			// Negate top level.
  3573			opr = brcom(opr)
  3574		}
  3575		cmp := nod(opr, lhs, rhs)
  3576		cmp.Pos = n.Pos
  3577		cmp = addinit(cmp, l.Ninit.Slice())
  3578		cmp = addinit(cmp, r.Ninit.Slice())
  3579		// Typecheck the AST rooted at cmp...
  3580		cmp = typecheck(cmp, ctxExpr)
  3581		// ...but then reset cmp's type to match n's type.
  3582		cmp.Type = n.Type
  3583		cmp = walkexpr(cmp, init)
  3584		return cmp
  3585	}
  3586	
  3587	// return 1 if integer n must be in range [0, max), 0 otherwise
  3588	func bounded(n *Node, max int64) bool {
  3589		if n.Type == nil || !n.Type.IsInteger() {
  3590			return false
  3591		}
  3592	
  3593		sign := n.Type.IsSigned()
  3594		bits := int32(8 * n.Type.Width)
  3595	
  3596		if smallintconst(n) {
  3597			v := n.Int64()
  3598			return 0 <= v && v < max
  3599		}
  3600	
  3601		switch n.Op {
  3602		case OAND:
  3603			v := int64(-1)
  3604			if smallintconst(n.Left) {
  3605				v = n.Left.Int64()
  3606			} else if smallintconst(n.Right) {
  3607				v = n.Right.Int64()
  3608			}
  3609	
  3610			if 0 <= v && v < max {
  3611				return true
  3612			}
  3613	
  3614		case OMOD:
  3615			if !sign && smallintconst(n.Right) {
  3616				v := n.Right.Int64()
  3617				if 0 <= v && v <= max {
  3618					return true
  3619				}
  3620			}
  3621	
  3622		case ODIV:
  3623			if !sign && smallintconst(n.Right) {
  3624				v := n.Right.Int64()
  3625				for bits > 0 && v >= 2 {
  3626					bits--
  3627					v >>= 1
  3628				}
  3629			}
  3630	
  3631		case ORSH:
  3632			if !sign && smallintconst(n.Right) {
  3633				v := n.Right.Int64()
  3634				if v > int64(bits) {
  3635					return true
  3636				}
  3637				bits -= int32(v)
  3638			}
  3639		}
  3640	
  3641		if !sign && bits <= 62 && 1<<uint(bits) <= max {
  3642			return true
  3643		}
  3644	
  3645		return false
  3646	}
  3647	
  3648	// usemethod checks interface method calls for uses of reflect.Type.Method.
  3649	func usemethod(n *Node) {
  3650		t := n.Left.Type
  3651	
  3652		// Looking for either of:
  3653		//	Method(int) reflect.Method
  3654		//	MethodByName(string) (reflect.Method, bool)
  3655		//
  3656		// TODO(crawshaw): improve precision of match by working out
  3657		//                 how to check the method name.
  3658		if n := t.NumParams(); n != 1 {
  3659			return
  3660		}
  3661		if n := t.NumResults(); n != 1 && n != 2 {
  3662			return
  3663		}
  3664		p0 := t.Params().Field(0)
  3665		res0 := t.Results().Field(0)
  3666		var res1 *types.Field
  3667		if t.NumResults() == 2 {
  3668			res1 = t.Results().Field(1)
  3669		}
  3670	
  3671		if res1 == nil {
  3672			if p0.Type.Etype != TINT {
  3673				return
  3674			}
  3675		} else {
  3676			if !p0.Type.IsString() {
  3677				return
  3678			}
  3679			if !res1.Type.IsBoolean() {
  3680				return
  3681			}
  3682		}
  3683	
  3684		// Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
  3685		//       (including global variables such as numImports - was issue #19028).
  3686		if s := res0.Type.Sym; s != nil && s.Name == "Method" && s.Pkg != nil && s.Pkg.Path == "reflect" {
  3687			Curfn.Func.SetReflectMethod(true)
  3688		}
  3689	}
  3690	
  3691	func usefield(n *Node) {
  3692		if objabi.Fieldtrack_enabled == 0 {
  3693			return
  3694		}
  3695	
  3696		switch n.Op {
  3697		default:
  3698			Fatalf("usefield %v", n.Op)
  3699	
  3700		case ODOT, ODOTPTR:
  3701			break
  3702		}
  3703		if n.Sym == nil {
  3704			// No field name.  This DOTPTR was built by the compiler for access
  3705			// to runtime data structures.  Ignore.
  3706			return
  3707		}
  3708	
  3709		t := n.Left.Type
  3710		if t.IsPtr() {
  3711			t = t.Elem()
  3712		}
  3713		field := dotField[typeSymKey{t.Orig, n.Sym}]
  3714		if field == nil {
  3715			Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
  3716		}
  3717		if !strings.Contains(field.Note, "go:\"track\"") {
  3718			return
  3719		}
  3720	
  3721		outer := n.Left.Type
  3722		if outer.IsPtr() {
  3723			outer = outer.Elem()
  3724		}
  3725		if outer.Sym == nil {
  3726			yyerror("tracked field must be in named struct type")
  3727		}
  3728		if !types.IsExported(field.Sym.Name) {
  3729			yyerror("tracked field must be exported (upper case)")
  3730		}
  3731	
  3732		sym := tracksym(outer, field)
  3733		if Curfn.Func.FieldTrack == nil {
  3734			Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
  3735		}
  3736		Curfn.Func.FieldTrack[sym] = struct{}{}
  3737	}
  3738	
  3739	func candiscardlist(l Nodes) bool {
  3740		for _, n := range l.Slice() {
  3741			if !candiscard(n) {
  3742				return false
  3743			}
  3744		}
  3745		return true
  3746	}
  3747	
  3748	func candiscard(n *Node) bool {
  3749		if n == nil {
  3750			return true
  3751		}
  3752	
  3753		switch n.Op {
  3754		default:
  3755			return false
  3756	
  3757			// Discardable as long as the subpieces are.
  3758		case ONAME,
  3759			ONONAME,
  3760			OTYPE,
  3761			OPACK,
  3762			OLITERAL,
  3763			OADD,
  3764			OSUB,
  3765			OOR,
  3766			OXOR,
  3767			OADDSTR,
  3768			OADDR,
  3769			OANDAND,
  3770			OBYTES2STR,
  3771			ORUNES2STR,
  3772			OSTR2BYTES,
  3773			OSTR2RUNES,
  3774			OCAP,
  3775			OCOMPLIT,
  3776			OMAPLIT,
  3777			OSTRUCTLIT,
  3778			OARRAYLIT,
  3779			OSLICELIT,
  3780			OPTRLIT,
  3781			OCONV,
  3782			OCONVIFACE,
  3783			OCONVNOP,
  3784			ODOT,
  3785			OEQ,
  3786			ONE,
  3787			OLT,
  3788			OLE,
  3789			OGT,
  3790			OGE,
  3791			OKEY,
  3792			OSTRUCTKEY,
  3793			OLEN,
  3794			OMUL,
  3795			OLSH,
  3796			ORSH,
  3797			OAND,
  3798			OANDNOT,
  3799			ONEW,
  3800			ONOT,
  3801			OBITNOT,
  3802			OPLUS,
  3803			ONEG,
  3804			OOROR,
  3805			OPAREN,
  3806			ORUNESTR,
  3807			OREAL,
  3808			OIMAG,
  3809			OCOMPLEX:
  3810			break
  3811	
  3812			// Discardable as long as we know it's not division by zero.
  3813		case ODIV, OMOD:
  3814			if Isconst(n.Right, CTINT) && n.Right.Val().U.(*Mpint).CmpInt64(0) != 0 {
  3815				break
  3816			}
  3817			if Isconst(n.Right, CTFLT) && n.Right.Val().U.(*Mpflt).CmpFloat64(0) != 0 {
  3818				break
  3819			}
  3820			return false
  3821	
  3822			// Discardable as long as we know it won't fail because of a bad size.
  3823		case OMAKECHAN, OMAKEMAP:
  3824			if Isconst(n.Left, CTINT) && n.Left.Val().U.(*Mpint).CmpInt64(0) == 0 {
  3825				break
  3826			}
  3827			return false
  3828	
  3829			// Difficult to tell what sizes are okay.
  3830		case OMAKESLICE:
  3831			return false
  3832		}
  3833	
  3834		if !candiscard(n.Left) || !candiscard(n.Right) || !candiscardlist(n.Ninit) || !candiscardlist(n.Nbody) || !candiscardlist(n.List) || !candiscardlist(n.Rlist) {
  3835			return false
  3836		}
  3837	
  3838		return true
  3839	}
  3840	
  3841	// Rewrite
  3842	//	go builtin(x, y, z)
  3843	// into
  3844	//	go func(a1, a2, a3) {
  3845	//		builtin(a1, a2, a3)
  3846	//	}(x, y, z)
  3847	// for print, println, and delete.
  3848	
  3849	var wrapCall_prgen int
  3850	
  3851	// The result of wrapCall MUST be assigned back to n, e.g.
  3852	// 	n.Left = wrapCall(n.Left, init)
  3853	func wrapCall(n *Node, init *Nodes) *Node {
  3854		if n.Ninit.Len() != 0 {
  3855			walkstmtlist(n.Ninit.Slice())
  3856			init.AppendNodes(&n.Ninit)
  3857		}
  3858	
  3859		t := nod(OTFUNC, nil, nil)
  3860		for i, arg := range n.List.Slice() {
  3861			s := lookupN("a", i)
  3862			t.List.Append(symfield(s, arg.Type))
  3863		}
  3864	
  3865		wrapCall_prgen++
  3866		sym := lookupN("wrap·", wrapCall_prgen)
  3867		fn := dclfunc(sym, t)
  3868	
  3869		a := nod(n.Op, nil, nil)
  3870		a.List.Set(paramNnames(t.Type))
  3871		a = typecheck(a, ctxStmt)
  3872		fn.Nbody.Set1(a)
  3873	
  3874		funcbody()
  3875	
  3876		fn = typecheck(fn, ctxStmt)
  3877		typecheckslice(fn.Nbody.Slice(), ctxStmt)
  3878		xtop = append(xtop, fn)
  3879	
  3880		a = nod(OCALL, nil, nil)
  3881		a.Left = fn.Func.Nname
  3882		a.List.Set(n.List.Slice())
  3883		a = typecheck(a, ctxStmt)
  3884		a = walkexpr(a, init)
  3885		return a
  3886	}
  3887	
  3888	// substArgTypes substitutes the given list of types for
  3889	// successive occurrences of the "any" placeholder in the
  3890	// type syntax expression n.Type.
  3891	// The result of substArgTypes MUST be assigned back to old, e.g.
  3892	// 	n.Left = substArgTypes(n.Left, t1, t2)
  3893	func substArgTypes(old *Node, types_ ...*types.Type) *Node {
  3894		n := old.copy()
  3895	
  3896		for _, t := range types_ {
  3897			dowidth(t)
  3898		}
  3899		n.Type = types.SubstAny(n.Type, &types_)
  3900		if len(types_) > 0 {
  3901			Fatalf("substArgTypes: too many argument types")
  3902		}
  3903		return n
  3904	}
  3905	
  3906	// canMergeLoads reports whether the backend optimization passes for
  3907	// the current architecture can combine adjacent loads into a single
  3908	// larger, possibly unaligned, load. Note that currently the
  3909	// optimizations must be able to handle little endian byte order.
  3910	func canMergeLoads() bool {
  3911		switch thearch.LinkArch.Family {
  3912		case sys.ARM64, sys.AMD64, sys.I386, sys.S390X:
  3913			return true
  3914		case sys.PPC64:
  3915			// Load combining only supported on ppc64le.
  3916			return thearch.LinkArch.ByteOrder == binary.LittleEndian
  3917		}
  3918		return false
  3919	}
  3920	
  3921	// isRuneCount reports whether n is of the form len([]rune(string)).
  3922	// These are optimized into a call to runtime.countrunes.
  3923	func isRuneCount(n *Node) bool {
  3924		return Debug['N'] == 0 && !instrumenting && n.Op == OLEN && n.Left.Op == OSTR2RUNES
  3925	}
  3926	

View as plain text