...

Source file src/pkg/cmd/compile/internal/ssa/rewriteMIPS.go

     1	// Code generated from gen/MIPS.rules; DO NOT EDIT.
     2	// generated with: cd gen; go run *.go
     3	
     4	package ssa
     5	
     6	import "fmt"
     7	import "math"
     8	import "cmd/internal/obj"
     9	import "cmd/internal/objabi"
    10	import "cmd/compile/internal/types"
    11	
    12	var _ = fmt.Println   // in case not otherwise used
    13	var _ = math.MinInt8  // in case not otherwise used
    14	var _ = obj.ANOP      // in case not otherwise used
    15	var _ = objabi.GOROOT // in case not otherwise used
    16	var _ = types.TypeMem // in case not otherwise used
    17	
    18	func rewriteValueMIPS(v *Value) bool {
    19		switch v.Op {
    20		case OpAdd16:
    21			return rewriteValueMIPS_OpAdd16_0(v)
    22		case OpAdd32:
    23			return rewriteValueMIPS_OpAdd32_0(v)
    24		case OpAdd32F:
    25			return rewriteValueMIPS_OpAdd32F_0(v)
    26		case OpAdd32withcarry:
    27			return rewriteValueMIPS_OpAdd32withcarry_0(v)
    28		case OpAdd64F:
    29			return rewriteValueMIPS_OpAdd64F_0(v)
    30		case OpAdd8:
    31			return rewriteValueMIPS_OpAdd8_0(v)
    32		case OpAddPtr:
    33			return rewriteValueMIPS_OpAddPtr_0(v)
    34		case OpAddr:
    35			return rewriteValueMIPS_OpAddr_0(v)
    36		case OpAnd16:
    37			return rewriteValueMIPS_OpAnd16_0(v)
    38		case OpAnd32:
    39			return rewriteValueMIPS_OpAnd32_0(v)
    40		case OpAnd8:
    41			return rewriteValueMIPS_OpAnd8_0(v)
    42		case OpAndB:
    43			return rewriteValueMIPS_OpAndB_0(v)
    44		case OpAtomicAdd32:
    45			return rewriteValueMIPS_OpAtomicAdd32_0(v)
    46		case OpAtomicAnd8:
    47			return rewriteValueMIPS_OpAtomicAnd8_0(v)
    48		case OpAtomicCompareAndSwap32:
    49			return rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v)
    50		case OpAtomicExchange32:
    51			return rewriteValueMIPS_OpAtomicExchange32_0(v)
    52		case OpAtomicLoad32:
    53			return rewriteValueMIPS_OpAtomicLoad32_0(v)
    54		case OpAtomicLoadPtr:
    55			return rewriteValueMIPS_OpAtomicLoadPtr_0(v)
    56		case OpAtomicOr8:
    57			return rewriteValueMIPS_OpAtomicOr8_0(v)
    58		case OpAtomicStore32:
    59			return rewriteValueMIPS_OpAtomicStore32_0(v)
    60		case OpAtomicStorePtrNoWB:
    61			return rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v)
    62		case OpAvg32u:
    63			return rewriteValueMIPS_OpAvg32u_0(v)
    64		case OpBitLen32:
    65			return rewriteValueMIPS_OpBitLen32_0(v)
    66		case OpClosureCall:
    67			return rewriteValueMIPS_OpClosureCall_0(v)
    68		case OpCom16:
    69			return rewriteValueMIPS_OpCom16_0(v)
    70		case OpCom32:
    71			return rewriteValueMIPS_OpCom32_0(v)
    72		case OpCom8:
    73			return rewriteValueMIPS_OpCom8_0(v)
    74		case OpConst16:
    75			return rewriteValueMIPS_OpConst16_0(v)
    76		case OpConst32:
    77			return rewriteValueMIPS_OpConst32_0(v)
    78		case OpConst32F:
    79			return rewriteValueMIPS_OpConst32F_0(v)
    80		case OpConst64F:
    81			return rewriteValueMIPS_OpConst64F_0(v)
    82		case OpConst8:
    83			return rewriteValueMIPS_OpConst8_0(v)
    84		case OpConstBool:
    85			return rewriteValueMIPS_OpConstBool_0(v)
    86		case OpConstNil:
    87			return rewriteValueMIPS_OpConstNil_0(v)
    88		case OpCtz32:
    89			return rewriteValueMIPS_OpCtz32_0(v)
    90		case OpCtz32NonZero:
    91			return rewriteValueMIPS_OpCtz32NonZero_0(v)
    92		case OpCvt32Fto32:
    93			return rewriteValueMIPS_OpCvt32Fto32_0(v)
    94		case OpCvt32Fto64F:
    95			return rewriteValueMIPS_OpCvt32Fto64F_0(v)
    96		case OpCvt32to32F:
    97			return rewriteValueMIPS_OpCvt32to32F_0(v)
    98		case OpCvt32to64F:
    99			return rewriteValueMIPS_OpCvt32to64F_0(v)
   100		case OpCvt64Fto32:
   101			return rewriteValueMIPS_OpCvt64Fto32_0(v)
   102		case OpCvt64Fto32F:
   103			return rewriteValueMIPS_OpCvt64Fto32F_0(v)
   104		case OpDiv16:
   105			return rewriteValueMIPS_OpDiv16_0(v)
   106		case OpDiv16u:
   107			return rewriteValueMIPS_OpDiv16u_0(v)
   108		case OpDiv32:
   109			return rewriteValueMIPS_OpDiv32_0(v)
   110		case OpDiv32F:
   111			return rewriteValueMIPS_OpDiv32F_0(v)
   112		case OpDiv32u:
   113			return rewriteValueMIPS_OpDiv32u_0(v)
   114		case OpDiv64F:
   115			return rewriteValueMIPS_OpDiv64F_0(v)
   116		case OpDiv8:
   117			return rewriteValueMIPS_OpDiv8_0(v)
   118		case OpDiv8u:
   119			return rewriteValueMIPS_OpDiv8u_0(v)
   120		case OpEq16:
   121			return rewriteValueMIPS_OpEq16_0(v)
   122		case OpEq32:
   123			return rewriteValueMIPS_OpEq32_0(v)
   124		case OpEq32F:
   125			return rewriteValueMIPS_OpEq32F_0(v)
   126		case OpEq64F:
   127			return rewriteValueMIPS_OpEq64F_0(v)
   128		case OpEq8:
   129			return rewriteValueMIPS_OpEq8_0(v)
   130		case OpEqB:
   131			return rewriteValueMIPS_OpEqB_0(v)
   132		case OpEqPtr:
   133			return rewriteValueMIPS_OpEqPtr_0(v)
   134		case OpGeq16:
   135			return rewriteValueMIPS_OpGeq16_0(v)
   136		case OpGeq16U:
   137			return rewriteValueMIPS_OpGeq16U_0(v)
   138		case OpGeq32:
   139			return rewriteValueMIPS_OpGeq32_0(v)
   140		case OpGeq32F:
   141			return rewriteValueMIPS_OpGeq32F_0(v)
   142		case OpGeq32U:
   143			return rewriteValueMIPS_OpGeq32U_0(v)
   144		case OpGeq64F:
   145			return rewriteValueMIPS_OpGeq64F_0(v)
   146		case OpGeq8:
   147			return rewriteValueMIPS_OpGeq8_0(v)
   148		case OpGeq8U:
   149			return rewriteValueMIPS_OpGeq8U_0(v)
   150		case OpGetCallerPC:
   151			return rewriteValueMIPS_OpGetCallerPC_0(v)
   152		case OpGetCallerSP:
   153			return rewriteValueMIPS_OpGetCallerSP_0(v)
   154		case OpGetClosurePtr:
   155			return rewriteValueMIPS_OpGetClosurePtr_0(v)
   156		case OpGreater16:
   157			return rewriteValueMIPS_OpGreater16_0(v)
   158		case OpGreater16U:
   159			return rewriteValueMIPS_OpGreater16U_0(v)
   160		case OpGreater32:
   161			return rewriteValueMIPS_OpGreater32_0(v)
   162		case OpGreater32F:
   163			return rewriteValueMIPS_OpGreater32F_0(v)
   164		case OpGreater32U:
   165			return rewriteValueMIPS_OpGreater32U_0(v)
   166		case OpGreater64F:
   167			return rewriteValueMIPS_OpGreater64F_0(v)
   168		case OpGreater8:
   169			return rewriteValueMIPS_OpGreater8_0(v)
   170		case OpGreater8U:
   171			return rewriteValueMIPS_OpGreater8U_0(v)
   172		case OpHmul32:
   173			return rewriteValueMIPS_OpHmul32_0(v)
   174		case OpHmul32u:
   175			return rewriteValueMIPS_OpHmul32u_0(v)
   176		case OpInterCall:
   177			return rewriteValueMIPS_OpInterCall_0(v)
   178		case OpIsInBounds:
   179			return rewriteValueMIPS_OpIsInBounds_0(v)
   180		case OpIsNonNil:
   181			return rewriteValueMIPS_OpIsNonNil_0(v)
   182		case OpIsSliceInBounds:
   183			return rewriteValueMIPS_OpIsSliceInBounds_0(v)
   184		case OpLeq16:
   185			return rewriteValueMIPS_OpLeq16_0(v)
   186		case OpLeq16U:
   187			return rewriteValueMIPS_OpLeq16U_0(v)
   188		case OpLeq32:
   189			return rewriteValueMIPS_OpLeq32_0(v)
   190		case OpLeq32F:
   191			return rewriteValueMIPS_OpLeq32F_0(v)
   192		case OpLeq32U:
   193			return rewriteValueMIPS_OpLeq32U_0(v)
   194		case OpLeq64F:
   195			return rewriteValueMIPS_OpLeq64F_0(v)
   196		case OpLeq8:
   197			return rewriteValueMIPS_OpLeq8_0(v)
   198		case OpLeq8U:
   199			return rewriteValueMIPS_OpLeq8U_0(v)
   200		case OpLess16:
   201			return rewriteValueMIPS_OpLess16_0(v)
   202		case OpLess16U:
   203			return rewriteValueMIPS_OpLess16U_0(v)
   204		case OpLess32:
   205			return rewriteValueMIPS_OpLess32_0(v)
   206		case OpLess32F:
   207			return rewriteValueMIPS_OpLess32F_0(v)
   208		case OpLess32U:
   209			return rewriteValueMIPS_OpLess32U_0(v)
   210		case OpLess64F:
   211			return rewriteValueMIPS_OpLess64F_0(v)
   212		case OpLess8:
   213			return rewriteValueMIPS_OpLess8_0(v)
   214		case OpLess8U:
   215			return rewriteValueMIPS_OpLess8U_0(v)
   216		case OpLoad:
   217			return rewriteValueMIPS_OpLoad_0(v)
   218		case OpLocalAddr:
   219			return rewriteValueMIPS_OpLocalAddr_0(v)
   220		case OpLsh16x16:
   221			return rewriteValueMIPS_OpLsh16x16_0(v)
   222		case OpLsh16x32:
   223			return rewriteValueMIPS_OpLsh16x32_0(v)
   224		case OpLsh16x64:
   225			return rewriteValueMIPS_OpLsh16x64_0(v)
   226		case OpLsh16x8:
   227			return rewriteValueMIPS_OpLsh16x8_0(v)
   228		case OpLsh32x16:
   229			return rewriteValueMIPS_OpLsh32x16_0(v)
   230		case OpLsh32x32:
   231			return rewriteValueMIPS_OpLsh32x32_0(v)
   232		case OpLsh32x64:
   233			return rewriteValueMIPS_OpLsh32x64_0(v)
   234		case OpLsh32x8:
   235			return rewriteValueMIPS_OpLsh32x8_0(v)
   236		case OpLsh8x16:
   237			return rewriteValueMIPS_OpLsh8x16_0(v)
   238		case OpLsh8x32:
   239			return rewriteValueMIPS_OpLsh8x32_0(v)
   240		case OpLsh8x64:
   241			return rewriteValueMIPS_OpLsh8x64_0(v)
   242		case OpLsh8x8:
   243			return rewriteValueMIPS_OpLsh8x8_0(v)
   244		case OpMIPSADD:
   245			return rewriteValueMIPS_OpMIPSADD_0(v)
   246		case OpMIPSADDconst:
   247			return rewriteValueMIPS_OpMIPSADDconst_0(v)
   248		case OpMIPSAND:
   249			return rewriteValueMIPS_OpMIPSAND_0(v)
   250		case OpMIPSANDconst:
   251			return rewriteValueMIPS_OpMIPSANDconst_0(v)
   252		case OpMIPSCMOVZ:
   253			return rewriteValueMIPS_OpMIPSCMOVZ_0(v)
   254		case OpMIPSCMOVZzero:
   255			return rewriteValueMIPS_OpMIPSCMOVZzero_0(v)
   256		case OpMIPSLoweredAtomicAdd:
   257			return rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v)
   258		case OpMIPSLoweredAtomicStore:
   259			return rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v)
   260		case OpMIPSMOVBUload:
   261			return rewriteValueMIPS_OpMIPSMOVBUload_0(v)
   262		case OpMIPSMOVBUreg:
   263			return rewriteValueMIPS_OpMIPSMOVBUreg_0(v)
   264		case OpMIPSMOVBload:
   265			return rewriteValueMIPS_OpMIPSMOVBload_0(v)
   266		case OpMIPSMOVBreg:
   267			return rewriteValueMIPS_OpMIPSMOVBreg_0(v)
   268		case OpMIPSMOVBstore:
   269			return rewriteValueMIPS_OpMIPSMOVBstore_0(v)
   270		case OpMIPSMOVBstorezero:
   271			return rewriteValueMIPS_OpMIPSMOVBstorezero_0(v)
   272		case OpMIPSMOVDload:
   273			return rewriteValueMIPS_OpMIPSMOVDload_0(v)
   274		case OpMIPSMOVDstore:
   275			return rewriteValueMIPS_OpMIPSMOVDstore_0(v)
   276		case OpMIPSMOVFload:
   277			return rewriteValueMIPS_OpMIPSMOVFload_0(v)
   278		case OpMIPSMOVFstore:
   279			return rewriteValueMIPS_OpMIPSMOVFstore_0(v)
   280		case OpMIPSMOVHUload:
   281			return rewriteValueMIPS_OpMIPSMOVHUload_0(v)
   282		case OpMIPSMOVHUreg:
   283			return rewriteValueMIPS_OpMIPSMOVHUreg_0(v)
   284		case OpMIPSMOVHload:
   285			return rewriteValueMIPS_OpMIPSMOVHload_0(v)
   286		case OpMIPSMOVHreg:
   287			return rewriteValueMIPS_OpMIPSMOVHreg_0(v)
   288		case OpMIPSMOVHstore:
   289			return rewriteValueMIPS_OpMIPSMOVHstore_0(v)
   290		case OpMIPSMOVHstorezero:
   291			return rewriteValueMIPS_OpMIPSMOVHstorezero_0(v)
   292		case OpMIPSMOVWload:
   293			return rewriteValueMIPS_OpMIPSMOVWload_0(v)
   294		case OpMIPSMOVWreg:
   295			return rewriteValueMIPS_OpMIPSMOVWreg_0(v)
   296		case OpMIPSMOVWstore:
   297			return rewriteValueMIPS_OpMIPSMOVWstore_0(v)
   298		case OpMIPSMOVWstorezero:
   299			return rewriteValueMIPS_OpMIPSMOVWstorezero_0(v)
   300		case OpMIPSMUL:
   301			return rewriteValueMIPS_OpMIPSMUL_0(v)
   302		case OpMIPSNEG:
   303			return rewriteValueMIPS_OpMIPSNEG_0(v)
   304		case OpMIPSNOR:
   305			return rewriteValueMIPS_OpMIPSNOR_0(v)
   306		case OpMIPSNORconst:
   307			return rewriteValueMIPS_OpMIPSNORconst_0(v)
   308		case OpMIPSOR:
   309			return rewriteValueMIPS_OpMIPSOR_0(v)
   310		case OpMIPSORconst:
   311			return rewriteValueMIPS_OpMIPSORconst_0(v)
   312		case OpMIPSSGT:
   313			return rewriteValueMIPS_OpMIPSSGT_0(v)
   314		case OpMIPSSGTU:
   315			return rewriteValueMIPS_OpMIPSSGTU_0(v)
   316		case OpMIPSSGTUconst:
   317			return rewriteValueMIPS_OpMIPSSGTUconst_0(v)
   318		case OpMIPSSGTUzero:
   319			return rewriteValueMIPS_OpMIPSSGTUzero_0(v)
   320		case OpMIPSSGTconst:
   321			return rewriteValueMIPS_OpMIPSSGTconst_0(v) || rewriteValueMIPS_OpMIPSSGTconst_10(v)
   322		case OpMIPSSGTzero:
   323			return rewriteValueMIPS_OpMIPSSGTzero_0(v)
   324		case OpMIPSSLL:
   325			return rewriteValueMIPS_OpMIPSSLL_0(v)
   326		case OpMIPSSLLconst:
   327			return rewriteValueMIPS_OpMIPSSLLconst_0(v)
   328		case OpMIPSSRA:
   329			return rewriteValueMIPS_OpMIPSSRA_0(v)
   330		case OpMIPSSRAconst:
   331			return rewriteValueMIPS_OpMIPSSRAconst_0(v)
   332		case OpMIPSSRL:
   333			return rewriteValueMIPS_OpMIPSSRL_0(v)
   334		case OpMIPSSRLconst:
   335			return rewriteValueMIPS_OpMIPSSRLconst_0(v)
   336		case OpMIPSSUB:
   337			return rewriteValueMIPS_OpMIPSSUB_0(v)
   338		case OpMIPSSUBconst:
   339			return rewriteValueMIPS_OpMIPSSUBconst_0(v)
   340		case OpMIPSXOR:
   341			return rewriteValueMIPS_OpMIPSXOR_0(v)
   342		case OpMIPSXORconst:
   343			return rewriteValueMIPS_OpMIPSXORconst_0(v)
   344		case OpMod16:
   345			return rewriteValueMIPS_OpMod16_0(v)
   346		case OpMod16u:
   347			return rewriteValueMIPS_OpMod16u_0(v)
   348		case OpMod32:
   349			return rewriteValueMIPS_OpMod32_0(v)
   350		case OpMod32u:
   351			return rewriteValueMIPS_OpMod32u_0(v)
   352		case OpMod8:
   353			return rewriteValueMIPS_OpMod8_0(v)
   354		case OpMod8u:
   355			return rewriteValueMIPS_OpMod8u_0(v)
   356		case OpMove:
   357			return rewriteValueMIPS_OpMove_0(v) || rewriteValueMIPS_OpMove_10(v)
   358		case OpMul16:
   359			return rewriteValueMIPS_OpMul16_0(v)
   360		case OpMul32:
   361			return rewriteValueMIPS_OpMul32_0(v)
   362		case OpMul32F:
   363			return rewriteValueMIPS_OpMul32F_0(v)
   364		case OpMul32uhilo:
   365			return rewriteValueMIPS_OpMul32uhilo_0(v)
   366		case OpMul64F:
   367			return rewriteValueMIPS_OpMul64F_0(v)
   368		case OpMul8:
   369			return rewriteValueMIPS_OpMul8_0(v)
   370		case OpNeg16:
   371			return rewriteValueMIPS_OpNeg16_0(v)
   372		case OpNeg32:
   373			return rewriteValueMIPS_OpNeg32_0(v)
   374		case OpNeg32F:
   375			return rewriteValueMIPS_OpNeg32F_0(v)
   376		case OpNeg64F:
   377			return rewriteValueMIPS_OpNeg64F_0(v)
   378		case OpNeg8:
   379			return rewriteValueMIPS_OpNeg8_0(v)
   380		case OpNeq16:
   381			return rewriteValueMIPS_OpNeq16_0(v)
   382		case OpNeq32:
   383			return rewriteValueMIPS_OpNeq32_0(v)
   384		case OpNeq32F:
   385			return rewriteValueMIPS_OpNeq32F_0(v)
   386		case OpNeq64F:
   387			return rewriteValueMIPS_OpNeq64F_0(v)
   388		case OpNeq8:
   389			return rewriteValueMIPS_OpNeq8_0(v)
   390		case OpNeqB:
   391			return rewriteValueMIPS_OpNeqB_0(v)
   392		case OpNeqPtr:
   393			return rewriteValueMIPS_OpNeqPtr_0(v)
   394		case OpNilCheck:
   395			return rewriteValueMIPS_OpNilCheck_0(v)
   396		case OpNot:
   397			return rewriteValueMIPS_OpNot_0(v)
   398		case OpOffPtr:
   399			return rewriteValueMIPS_OpOffPtr_0(v)
   400		case OpOr16:
   401			return rewriteValueMIPS_OpOr16_0(v)
   402		case OpOr32:
   403			return rewriteValueMIPS_OpOr32_0(v)
   404		case OpOr8:
   405			return rewriteValueMIPS_OpOr8_0(v)
   406		case OpOrB:
   407			return rewriteValueMIPS_OpOrB_0(v)
   408		case OpPanicBounds:
   409			return rewriteValueMIPS_OpPanicBounds_0(v)
   410		case OpPanicExtend:
   411			return rewriteValueMIPS_OpPanicExtend_0(v)
   412		case OpRotateLeft16:
   413			return rewriteValueMIPS_OpRotateLeft16_0(v)
   414		case OpRotateLeft32:
   415			return rewriteValueMIPS_OpRotateLeft32_0(v)
   416		case OpRotateLeft64:
   417			return rewriteValueMIPS_OpRotateLeft64_0(v)
   418		case OpRotateLeft8:
   419			return rewriteValueMIPS_OpRotateLeft8_0(v)
   420		case OpRound32F:
   421			return rewriteValueMIPS_OpRound32F_0(v)
   422		case OpRound64F:
   423			return rewriteValueMIPS_OpRound64F_0(v)
   424		case OpRsh16Ux16:
   425			return rewriteValueMIPS_OpRsh16Ux16_0(v)
   426		case OpRsh16Ux32:
   427			return rewriteValueMIPS_OpRsh16Ux32_0(v)
   428		case OpRsh16Ux64:
   429			return rewriteValueMIPS_OpRsh16Ux64_0(v)
   430		case OpRsh16Ux8:
   431			return rewriteValueMIPS_OpRsh16Ux8_0(v)
   432		case OpRsh16x16:
   433			return rewriteValueMIPS_OpRsh16x16_0(v)
   434		case OpRsh16x32:
   435			return rewriteValueMIPS_OpRsh16x32_0(v)
   436		case OpRsh16x64:
   437			return rewriteValueMIPS_OpRsh16x64_0(v)
   438		case OpRsh16x8:
   439			return rewriteValueMIPS_OpRsh16x8_0(v)
   440		case OpRsh32Ux16:
   441			return rewriteValueMIPS_OpRsh32Ux16_0(v)
   442		case OpRsh32Ux32:
   443			return rewriteValueMIPS_OpRsh32Ux32_0(v)
   444		case OpRsh32Ux64:
   445			return rewriteValueMIPS_OpRsh32Ux64_0(v)
   446		case OpRsh32Ux8:
   447			return rewriteValueMIPS_OpRsh32Ux8_0(v)
   448		case OpRsh32x16:
   449			return rewriteValueMIPS_OpRsh32x16_0(v)
   450		case OpRsh32x32:
   451			return rewriteValueMIPS_OpRsh32x32_0(v)
   452		case OpRsh32x64:
   453			return rewriteValueMIPS_OpRsh32x64_0(v)
   454		case OpRsh32x8:
   455			return rewriteValueMIPS_OpRsh32x8_0(v)
   456		case OpRsh8Ux16:
   457			return rewriteValueMIPS_OpRsh8Ux16_0(v)
   458		case OpRsh8Ux32:
   459			return rewriteValueMIPS_OpRsh8Ux32_0(v)
   460		case OpRsh8Ux64:
   461			return rewriteValueMIPS_OpRsh8Ux64_0(v)
   462		case OpRsh8Ux8:
   463			return rewriteValueMIPS_OpRsh8Ux8_0(v)
   464		case OpRsh8x16:
   465			return rewriteValueMIPS_OpRsh8x16_0(v)
   466		case OpRsh8x32:
   467			return rewriteValueMIPS_OpRsh8x32_0(v)
   468		case OpRsh8x64:
   469			return rewriteValueMIPS_OpRsh8x64_0(v)
   470		case OpRsh8x8:
   471			return rewriteValueMIPS_OpRsh8x8_0(v)
   472		case OpSelect0:
   473			return rewriteValueMIPS_OpSelect0_0(v) || rewriteValueMIPS_OpSelect0_10(v)
   474		case OpSelect1:
   475			return rewriteValueMIPS_OpSelect1_0(v) || rewriteValueMIPS_OpSelect1_10(v)
   476		case OpSignExt16to32:
   477			return rewriteValueMIPS_OpSignExt16to32_0(v)
   478		case OpSignExt8to16:
   479			return rewriteValueMIPS_OpSignExt8to16_0(v)
   480		case OpSignExt8to32:
   481			return rewriteValueMIPS_OpSignExt8to32_0(v)
   482		case OpSignmask:
   483			return rewriteValueMIPS_OpSignmask_0(v)
   484		case OpSlicemask:
   485			return rewriteValueMIPS_OpSlicemask_0(v)
   486		case OpSqrt:
   487			return rewriteValueMIPS_OpSqrt_0(v)
   488		case OpStaticCall:
   489			return rewriteValueMIPS_OpStaticCall_0(v)
   490		case OpStore:
   491			return rewriteValueMIPS_OpStore_0(v)
   492		case OpSub16:
   493			return rewriteValueMIPS_OpSub16_0(v)
   494		case OpSub32:
   495			return rewriteValueMIPS_OpSub32_0(v)
   496		case OpSub32F:
   497			return rewriteValueMIPS_OpSub32F_0(v)
   498		case OpSub32withcarry:
   499			return rewriteValueMIPS_OpSub32withcarry_0(v)
   500		case OpSub64F:
   501			return rewriteValueMIPS_OpSub64F_0(v)
   502		case OpSub8:
   503			return rewriteValueMIPS_OpSub8_0(v)
   504		case OpSubPtr:
   505			return rewriteValueMIPS_OpSubPtr_0(v)
   506		case OpTrunc16to8:
   507			return rewriteValueMIPS_OpTrunc16to8_0(v)
   508		case OpTrunc32to16:
   509			return rewriteValueMIPS_OpTrunc32to16_0(v)
   510		case OpTrunc32to8:
   511			return rewriteValueMIPS_OpTrunc32to8_0(v)
   512		case OpWB:
   513			return rewriteValueMIPS_OpWB_0(v)
   514		case OpXor16:
   515			return rewriteValueMIPS_OpXor16_0(v)
   516		case OpXor32:
   517			return rewriteValueMIPS_OpXor32_0(v)
   518		case OpXor8:
   519			return rewriteValueMIPS_OpXor8_0(v)
   520		case OpZero:
   521			return rewriteValueMIPS_OpZero_0(v) || rewriteValueMIPS_OpZero_10(v)
   522		case OpZeroExt16to32:
   523			return rewriteValueMIPS_OpZeroExt16to32_0(v)
   524		case OpZeroExt8to16:
   525			return rewriteValueMIPS_OpZeroExt8to16_0(v)
   526		case OpZeroExt8to32:
   527			return rewriteValueMIPS_OpZeroExt8to32_0(v)
   528		case OpZeromask:
   529			return rewriteValueMIPS_OpZeromask_0(v)
   530		}
   531		return false
   532	}
   533	func rewriteValueMIPS_OpAdd16_0(v *Value) bool {
   534		// match: (Add16 x y)
   535		// cond:
   536		// result: (ADD x y)
   537		for {
   538			y := v.Args[1]
   539			x := v.Args[0]
   540			v.reset(OpMIPSADD)
   541			v.AddArg(x)
   542			v.AddArg(y)
   543			return true
   544		}
   545	}
   546	func rewriteValueMIPS_OpAdd32_0(v *Value) bool {
   547		// match: (Add32 x y)
   548		// cond:
   549		// result: (ADD x y)
   550		for {
   551			y := v.Args[1]
   552			x := v.Args[0]
   553			v.reset(OpMIPSADD)
   554			v.AddArg(x)
   555			v.AddArg(y)
   556			return true
   557		}
   558	}
   559	func rewriteValueMIPS_OpAdd32F_0(v *Value) bool {
   560		// match: (Add32F x y)
   561		// cond:
   562		// result: (ADDF x y)
   563		for {
   564			y := v.Args[1]
   565			x := v.Args[0]
   566			v.reset(OpMIPSADDF)
   567			v.AddArg(x)
   568			v.AddArg(y)
   569			return true
   570		}
   571	}
   572	func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool {
   573		b := v.Block
   574		// match: (Add32withcarry <t> x y c)
   575		// cond:
   576		// result: (ADD c (ADD <t> x y))
   577		for {
   578			t := v.Type
   579			c := v.Args[2]
   580			x := v.Args[0]
   581			y := v.Args[1]
   582			v.reset(OpMIPSADD)
   583			v.AddArg(c)
   584			v0 := b.NewValue0(v.Pos, OpMIPSADD, t)
   585			v0.AddArg(x)
   586			v0.AddArg(y)
   587			v.AddArg(v0)
   588			return true
   589		}
   590	}
   591	func rewriteValueMIPS_OpAdd64F_0(v *Value) bool {
   592		// match: (Add64F x y)
   593		// cond:
   594		// result: (ADDD x y)
   595		for {
   596			y := v.Args[1]
   597			x := v.Args[0]
   598			v.reset(OpMIPSADDD)
   599			v.AddArg(x)
   600			v.AddArg(y)
   601			return true
   602		}
   603	}
   604	func rewriteValueMIPS_OpAdd8_0(v *Value) bool {
   605		// match: (Add8 x y)
   606		// cond:
   607		// result: (ADD x y)
   608		for {
   609			y := v.Args[1]
   610			x := v.Args[0]
   611			v.reset(OpMIPSADD)
   612			v.AddArg(x)
   613			v.AddArg(y)
   614			return true
   615		}
   616	}
   617	func rewriteValueMIPS_OpAddPtr_0(v *Value) bool {
   618		// match: (AddPtr x y)
   619		// cond:
   620		// result: (ADD x y)
   621		for {
   622			y := v.Args[1]
   623			x := v.Args[0]
   624			v.reset(OpMIPSADD)
   625			v.AddArg(x)
   626			v.AddArg(y)
   627			return true
   628		}
   629	}
   630	func rewriteValueMIPS_OpAddr_0(v *Value) bool {
   631		// match: (Addr {sym} base)
   632		// cond:
   633		// result: (MOVWaddr {sym} base)
   634		for {
   635			sym := v.Aux
   636			base := v.Args[0]
   637			v.reset(OpMIPSMOVWaddr)
   638			v.Aux = sym
   639			v.AddArg(base)
   640			return true
   641		}
   642	}
   643	func rewriteValueMIPS_OpAnd16_0(v *Value) bool {
   644		// match: (And16 x y)
   645		// cond:
   646		// result: (AND x y)
   647		for {
   648			y := v.Args[1]
   649			x := v.Args[0]
   650			v.reset(OpMIPSAND)
   651			v.AddArg(x)
   652			v.AddArg(y)
   653			return true
   654		}
   655	}
   656	func rewriteValueMIPS_OpAnd32_0(v *Value) bool {
   657		// match: (And32 x y)
   658		// cond:
   659		// result: (AND x y)
   660		for {
   661			y := v.Args[1]
   662			x := v.Args[0]
   663			v.reset(OpMIPSAND)
   664			v.AddArg(x)
   665			v.AddArg(y)
   666			return true
   667		}
   668	}
   669	func rewriteValueMIPS_OpAnd8_0(v *Value) bool {
   670		// match: (And8 x y)
   671		// cond:
   672		// result: (AND x y)
   673		for {
   674			y := v.Args[1]
   675			x := v.Args[0]
   676			v.reset(OpMIPSAND)
   677			v.AddArg(x)
   678			v.AddArg(y)
   679			return true
   680		}
   681	}
   682	func rewriteValueMIPS_OpAndB_0(v *Value) bool {
   683		// match: (AndB x y)
   684		// cond:
   685		// result: (AND x y)
   686		for {
   687			y := v.Args[1]
   688			x := v.Args[0]
   689			v.reset(OpMIPSAND)
   690			v.AddArg(x)
   691			v.AddArg(y)
   692			return true
   693		}
   694	}
   695	func rewriteValueMIPS_OpAtomicAdd32_0(v *Value) bool {
   696		// match: (AtomicAdd32 ptr val mem)
   697		// cond:
   698		// result: (LoweredAtomicAdd ptr val mem)
   699		for {
   700			mem := v.Args[2]
   701			ptr := v.Args[0]
   702			val := v.Args[1]
   703			v.reset(OpMIPSLoweredAtomicAdd)
   704			v.AddArg(ptr)
   705			v.AddArg(val)
   706			v.AddArg(mem)
   707			return true
   708		}
   709	}
   710	func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
   711		b := v.Block
   712		config := b.Func.Config
   713		typ := &b.Func.Config.Types
   714		// match: (AtomicAnd8 ptr val mem)
   715		// cond: !config.BigEndian
   716		// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem)
   717		for {
   718			mem := v.Args[2]
   719			ptr := v.Args[0]
   720			val := v.Args[1]
   721			if !(!config.BigEndian) {
   722				break
   723			}
   724			v.reset(OpMIPSLoweredAtomicAnd)
   725			v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   726			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   727			v1.AuxInt = ^3
   728			v0.AddArg(v1)
   729			v0.AddArg(ptr)
   730			v.AddArg(v0)
   731			v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   732			v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   733			v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   734			v4.AddArg(val)
   735			v3.AddArg(v4)
   736			v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   737			v5.AuxInt = 3
   738			v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   739			v6.AuxInt = 3
   740			v6.AddArg(ptr)
   741			v5.AddArg(v6)
   742			v3.AddArg(v5)
   743			v2.AddArg(v3)
   744			v7 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   745			v7.AuxInt = 0
   746			v8 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   747			v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   748			v9.AuxInt = 0xff
   749			v8.AddArg(v9)
   750			v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   751			v10.AuxInt = 3
   752			v11 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   753			v11.AuxInt = 3
   754			v11.AddArg(ptr)
   755			v10.AddArg(v11)
   756			v8.AddArg(v10)
   757			v7.AddArg(v8)
   758			v2.AddArg(v7)
   759			v.AddArg(v2)
   760			v.AddArg(mem)
   761			return true
   762		}
   763		// match: (AtomicAnd8 ptr val mem)
   764		// cond: config.BigEndian
   765		// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))))) mem)
   766		for {
   767			mem := v.Args[2]
   768			ptr := v.Args[0]
   769			val := v.Args[1]
   770			if !(config.BigEndian) {
   771				break
   772			}
   773			v.reset(OpMIPSLoweredAtomicAnd)
   774			v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   775			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   776			v1.AuxInt = ^3
   777			v0.AddArg(v1)
   778			v0.AddArg(ptr)
   779			v.AddArg(v0)
   780			v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32)
   781			v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   782			v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   783			v4.AddArg(val)
   784			v3.AddArg(v4)
   785			v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   786			v5.AuxInt = 3
   787			v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   788			v6.AuxInt = 3
   789			v7 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   790			v7.AuxInt = 3
   791			v7.AddArg(ptr)
   792			v6.AddArg(v7)
   793			v5.AddArg(v6)
   794			v3.AddArg(v5)
   795			v2.AddArg(v3)
   796			v8 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32)
   797			v8.AuxInt = 0
   798			v9 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   799			v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   800			v10.AuxInt = 0xff
   801			v9.AddArg(v10)
   802			v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   803			v11.AuxInt = 3
   804			v12 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   805			v12.AuxInt = 3
   806			v13 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   807			v13.AuxInt = 3
   808			v13.AddArg(ptr)
   809			v12.AddArg(v13)
   810			v11.AddArg(v12)
   811			v9.AddArg(v11)
   812			v8.AddArg(v9)
   813			v2.AddArg(v8)
   814			v.AddArg(v2)
   815			v.AddArg(mem)
   816			return true
   817		}
   818		return false
   819	}
   820	func rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v *Value) bool {
   821		// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   822		// cond:
   823		// result: (LoweredAtomicCas ptr old new_ mem)
   824		for {
   825			mem := v.Args[3]
   826			ptr := v.Args[0]
   827			old := v.Args[1]
   828			new_ := v.Args[2]
   829			v.reset(OpMIPSLoweredAtomicCas)
   830			v.AddArg(ptr)
   831			v.AddArg(old)
   832			v.AddArg(new_)
   833			v.AddArg(mem)
   834			return true
   835		}
   836	}
   837	func rewriteValueMIPS_OpAtomicExchange32_0(v *Value) bool {
   838		// match: (AtomicExchange32 ptr val mem)
   839		// cond:
   840		// result: (LoweredAtomicExchange ptr val mem)
   841		for {
   842			mem := v.Args[2]
   843			ptr := v.Args[0]
   844			val := v.Args[1]
   845			v.reset(OpMIPSLoweredAtomicExchange)
   846			v.AddArg(ptr)
   847			v.AddArg(val)
   848			v.AddArg(mem)
   849			return true
   850		}
   851	}
   852	func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool {
   853		// match: (AtomicLoad32 ptr mem)
   854		// cond:
   855		// result: (LoweredAtomicLoad ptr mem)
   856		for {
   857			mem := v.Args[1]
   858			ptr := v.Args[0]
   859			v.reset(OpMIPSLoweredAtomicLoad)
   860			v.AddArg(ptr)
   861			v.AddArg(mem)
   862			return true
   863		}
   864	}
   865	func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool {
   866		// match: (AtomicLoadPtr ptr mem)
   867		// cond:
   868		// result: (LoweredAtomicLoad ptr mem)
   869		for {
   870			mem := v.Args[1]
   871			ptr := v.Args[0]
   872			v.reset(OpMIPSLoweredAtomicLoad)
   873			v.AddArg(ptr)
   874			v.AddArg(mem)
   875			return true
   876		}
   877	}
   878	func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
   879		b := v.Block
   880		config := b.Func.Config
   881		typ := &b.Func.Config.Types
   882		// match: (AtomicOr8 ptr val mem)
   883		// cond: !config.BigEndian
   884		// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem)
   885		for {
   886			mem := v.Args[2]
   887			ptr := v.Args[0]
   888			val := v.Args[1]
   889			if !(!config.BigEndian) {
   890				break
   891			}
   892			v.reset(OpMIPSLoweredAtomicOr)
   893			v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   894			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   895			v1.AuxInt = ^3
   896			v0.AddArg(v1)
   897			v0.AddArg(ptr)
   898			v.AddArg(v0)
   899			v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   900			v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   901			v3.AddArg(val)
   902			v2.AddArg(v3)
   903			v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   904			v4.AuxInt = 3
   905			v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   906			v5.AuxInt = 3
   907			v5.AddArg(ptr)
   908			v4.AddArg(v5)
   909			v2.AddArg(v4)
   910			v.AddArg(v2)
   911			v.AddArg(mem)
   912			return true
   913		}
   914		// match: (AtomicOr8 ptr val mem)
   915		// cond: config.BigEndian
   916		// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) mem)
   917		for {
   918			mem := v.Args[2]
   919			ptr := v.Args[0]
   920			val := v.Args[1]
   921			if !(config.BigEndian) {
   922				break
   923			}
   924			v.reset(OpMIPSLoweredAtomicOr)
   925			v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr)
   926			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
   927			v1.AuxInt = ^3
   928			v0.AddArg(v1)
   929			v0.AddArg(ptr)
   930			v.AddArg(v0)
   931			v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32)
   932			v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   933			v3.AddArg(val)
   934			v2.AddArg(v3)
   935			v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
   936			v4.AuxInt = 3
   937			v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32)
   938			v5.AuxInt = 3
   939			v6 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32)
   940			v6.AuxInt = 3
   941			v6.AddArg(ptr)
   942			v5.AddArg(v6)
   943			v4.AddArg(v5)
   944			v2.AddArg(v4)
   945			v.AddArg(v2)
   946			v.AddArg(mem)
   947			return true
   948		}
   949		return false
   950	}
   951	func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool {
   952		// match: (AtomicStore32 ptr val mem)
   953		// cond:
   954		// result: (LoweredAtomicStore ptr val mem)
   955		for {
   956			mem := v.Args[2]
   957			ptr := v.Args[0]
   958			val := v.Args[1]
   959			v.reset(OpMIPSLoweredAtomicStore)
   960			v.AddArg(ptr)
   961			v.AddArg(val)
   962			v.AddArg(mem)
   963			return true
   964		}
   965	}
   966	func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool {
   967		// match: (AtomicStorePtrNoWB ptr val mem)
   968		// cond:
   969		// result: (LoweredAtomicStore ptr val mem)
   970		for {
   971			mem := v.Args[2]
   972			ptr := v.Args[0]
   973			val := v.Args[1]
   974			v.reset(OpMIPSLoweredAtomicStore)
   975			v.AddArg(ptr)
   976			v.AddArg(val)
   977			v.AddArg(mem)
   978			return true
   979		}
   980	}
   981	func rewriteValueMIPS_OpAvg32u_0(v *Value) bool {
   982		b := v.Block
   983		// match: (Avg32u <t> x y)
   984		// cond:
   985		// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
   986		for {
   987			t := v.Type
   988			y := v.Args[1]
   989			x := v.Args[0]
   990			v.reset(OpMIPSADD)
   991			v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t)
   992			v0.AuxInt = 1
   993			v1 := b.NewValue0(v.Pos, OpMIPSSUB, t)
   994			v1.AddArg(x)
   995			v1.AddArg(y)
   996			v0.AddArg(v1)
   997			v.AddArg(v0)
   998			v.AddArg(y)
   999			return true
  1000		}
  1001	}
  1002	func rewriteValueMIPS_OpBitLen32_0(v *Value) bool {
  1003		b := v.Block
  1004		typ := &b.Func.Config.Types
  1005		// match: (BitLen32 <t> x)
  1006		// cond:
  1007		// result: (SUB (MOVWconst [32]) (CLZ <t> x))
  1008		for {
  1009			t := v.Type
  1010			x := v.Args[0]
  1011			v.reset(OpMIPSSUB)
  1012			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1013			v0.AuxInt = 32
  1014			v.AddArg(v0)
  1015			v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1016			v1.AddArg(x)
  1017			v.AddArg(v1)
  1018			return true
  1019		}
  1020	}
  1021	func rewriteValueMIPS_OpClosureCall_0(v *Value) bool {
  1022		// match: (ClosureCall [argwid] entry closure mem)
  1023		// cond:
  1024		// result: (CALLclosure [argwid] entry closure mem)
  1025		for {
  1026			argwid := v.AuxInt
  1027			mem := v.Args[2]
  1028			entry := v.Args[0]
  1029			closure := v.Args[1]
  1030			v.reset(OpMIPSCALLclosure)
  1031			v.AuxInt = argwid
  1032			v.AddArg(entry)
  1033			v.AddArg(closure)
  1034			v.AddArg(mem)
  1035			return true
  1036		}
  1037	}
  1038	func rewriteValueMIPS_OpCom16_0(v *Value) bool {
  1039		// match: (Com16 x)
  1040		// cond:
  1041		// result: (NORconst [0] x)
  1042		for {
  1043			x := v.Args[0]
  1044			v.reset(OpMIPSNORconst)
  1045			v.AuxInt = 0
  1046			v.AddArg(x)
  1047			return true
  1048		}
  1049	}
  1050	func rewriteValueMIPS_OpCom32_0(v *Value) bool {
  1051		// match: (Com32 x)
  1052		// cond:
  1053		// result: (NORconst [0] x)
  1054		for {
  1055			x := v.Args[0]
  1056			v.reset(OpMIPSNORconst)
  1057			v.AuxInt = 0
  1058			v.AddArg(x)
  1059			return true
  1060		}
  1061	}
  1062	func rewriteValueMIPS_OpCom8_0(v *Value) bool {
  1063		// match: (Com8 x)
  1064		// cond:
  1065		// result: (NORconst [0] x)
  1066		for {
  1067			x := v.Args[0]
  1068			v.reset(OpMIPSNORconst)
  1069			v.AuxInt = 0
  1070			v.AddArg(x)
  1071			return true
  1072		}
  1073	}
  1074	func rewriteValueMIPS_OpConst16_0(v *Value) bool {
  1075		// match: (Const16 [val])
  1076		// cond:
  1077		// result: (MOVWconst [val])
  1078		for {
  1079			val := v.AuxInt
  1080			v.reset(OpMIPSMOVWconst)
  1081			v.AuxInt = val
  1082			return true
  1083		}
  1084	}
  1085	func rewriteValueMIPS_OpConst32_0(v *Value) bool {
  1086		// match: (Const32 [val])
  1087		// cond:
  1088		// result: (MOVWconst [val])
  1089		for {
  1090			val := v.AuxInt
  1091			v.reset(OpMIPSMOVWconst)
  1092			v.AuxInt = val
  1093			return true
  1094		}
  1095	}
  1096	func rewriteValueMIPS_OpConst32F_0(v *Value) bool {
  1097		// match: (Const32F [val])
  1098		// cond:
  1099		// result: (MOVFconst [val])
  1100		for {
  1101			val := v.AuxInt
  1102			v.reset(OpMIPSMOVFconst)
  1103			v.AuxInt = val
  1104			return true
  1105		}
  1106	}
  1107	func rewriteValueMIPS_OpConst64F_0(v *Value) bool {
  1108		// match: (Const64F [val])
  1109		// cond:
  1110		// result: (MOVDconst [val])
  1111		for {
  1112			val := v.AuxInt
  1113			v.reset(OpMIPSMOVDconst)
  1114			v.AuxInt = val
  1115			return true
  1116		}
  1117	}
  1118	func rewriteValueMIPS_OpConst8_0(v *Value) bool {
  1119		// match: (Const8 [val])
  1120		// cond:
  1121		// result: (MOVWconst [val])
  1122		for {
  1123			val := v.AuxInt
  1124			v.reset(OpMIPSMOVWconst)
  1125			v.AuxInt = val
  1126			return true
  1127		}
  1128	}
  1129	func rewriteValueMIPS_OpConstBool_0(v *Value) bool {
  1130		// match: (ConstBool [b])
  1131		// cond:
  1132		// result: (MOVWconst [b])
  1133		for {
  1134			b := v.AuxInt
  1135			v.reset(OpMIPSMOVWconst)
  1136			v.AuxInt = b
  1137			return true
  1138		}
  1139	}
  1140	func rewriteValueMIPS_OpConstNil_0(v *Value) bool {
  1141		// match: (ConstNil)
  1142		// cond:
  1143		// result: (MOVWconst [0])
  1144		for {
  1145			v.reset(OpMIPSMOVWconst)
  1146			v.AuxInt = 0
  1147			return true
  1148		}
  1149	}
  1150	func rewriteValueMIPS_OpCtz32_0(v *Value) bool {
  1151		b := v.Block
  1152		typ := &b.Func.Config.Types
  1153		// match: (Ctz32 <t> x)
  1154		// cond:
  1155		// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
  1156		for {
  1157			t := v.Type
  1158			x := v.Args[0]
  1159			v.reset(OpMIPSSUB)
  1160			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1161			v0.AuxInt = 32
  1162			v.AddArg(v0)
  1163			v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t)
  1164			v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t)
  1165			v2.AuxInt = 1
  1166			v3 := b.NewValue0(v.Pos, OpMIPSAND, t)
  1167			v3.AddArg(x)
  1168			v4 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  1169			v4.AddArg(x)
  1170			v3.AddArg(v4)
  1171			v2.AddArg(v3)
  1172			v1.AddArg(v2)
  1173			v.AddArg(v1)
  1174			return true
  1175		}
  1176	}
  1177	func rewriteValueMIPS_OpCtz32NonZero_0(v *Value) bool {
  1178		// match: (Ctz32NonZero x)
  1179		// cond:
  1180		// result: (Ctz32 x)
  1181		for {
  1182			x := v.Args[0]
  1183			v.reset(OpCtz32)
  1184			v.AddArg(x)
  1185			return true
  1186		}
  1187	}
  1188	func rewriteValueMIPS_OpCvt32Fto32_0(v *Value) bool {
  1189		// match: (Cvt32Fto32 x)
  1190		// cond:
  1191		// result: (TRUNCFW x)
  1192		for {
  1193			x := v.Args[0]
  1194			v.reset(OpMIPSTRUNCFW)
  1195			v.AddArg(x)
  1196			return true
  1197		}
  1198	}
  1199	func rewriteValueMIPS_OpCvt32Fto64F_0(v *Value) bool {
  1200		// match: (Cvt32Fto64F x)
  1201		// cond:
  1202		// result: (MOVFD x)
  1203		for {
  1204			x := v.Args[0]
  1205			v.reset(OpMIPSMOVFD)
  1206			v.AddArg(x)
  1207			return true
  1208		}
  1209	}
  1210	func rewriteValueMIPS_OpCvt32to32F_0(v *Value) bool {
  1211		// match: (Cvt32to32F x)
  1212		// cond:
  1213		// result: (MOVWF x)
  1214		for {
  1215			x := v.Args[0]
  1216			v.reset(OpMIPSMOVWF)
  1217			v.AddArg(x)
  1218			return true
  1219		}
  1220	}
  1221	func rewriteValueMIPS_OpCvt32to64F_0(v *Value) bool {
  1222		// match: (Cvt32to64F x)
  1223		// cond:
  1224		// result: (MOVWD x)
  1225		for {
  1226			x := v.Args[0]
  1227			v.reset(OpMIPSMOVWD)
  1228			v.AddArg(x)
  1229			return true
  1230		}
  1231	}
  1232	func rewriteValueMIPS_OpCvt64Fto32_0(v *Value) bool {
  1233		// match: (Cvt64Fto32 x)
  1234		// cond:
  1235		// result: (TRUNCDW x)
  1236		for {
  1237			x := v.Args[0]
  1238			v.reset(OpMIPSTRUNCDW)
  1239			v.AddArg(x)
  1240			return true
  1241		}
  1242	}
  1243	func rewriteValueMIPS_OpCvt64Fto32F_0(v *Value) bool {
  1244		// match: (Cvt64Fto32F x)
  1245		// cond:
  1246		// result: (MOVDF x)
  1247		for {
  1248			x := v.Args[0]
  1249			v.reset(OpMIPSMOVDF)
  1250			v.AddArg(x)
  1251			return true
  1252		}
  1253	}
  1254	func rewriteValueMIPS_OpDiv16_0(v *Value) bool {
  1255		b := v.Block
  1256		typ := &b.Func.Config.Types
  1257		// match: (Div16 x y)
  1258		// cond:
  1259		// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  1260		for {
  1261			y := v.Args[1]
  1262			x := v.Args[0]
  1263			v.reset(OpSelect1)
  1264			v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1265			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1266			v1.AddArg(x)
  1267			v0.AddArg(v1)
  1268			v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1269			v2.AddArg(y)
  1270			v0.AddArg(v2)
  1271			v.AddArg(v0)
  1272			return true
  1273		}
  1274	}
  1275	func rewriteValueMIPS_OpDiv16u_0(v *Value) bool {
  1276		b := v.Block
  1277		typ := &b.Func.Config.Types
  1278		// match: (Div16u x y)
  1279		// cond:
  1280		// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1281		for {
  1282			y := v.Args[1]
  1283			x := v.Args[0]
  1284			v.reset(OpSelect1)
  1285			v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1286			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1287			v1.AddArg(x)
  1288			v0.AddArg(v1)
  1289			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1290			v2.AddArg(y)
  1291			v0.AddArg(v2)
  1292			v.AddArg(v0)
  1293			return true
  1294		}
  1295	}
  1296	func rewriteValueMIPS_OpDiv32_0(v *Value) bool {
  1297		b := v.Block
  1298		typ := &b.Func.Config.Types
  1299		// match: (Div32 x y)
  1300		// cond:
  1301		// result: (Select1 (DIV x y))
  1302		for {
  1303			y := v.Args[1]
  1304			x := v.Args[0]
  1305			v.reset(OpSelect1)
  1306			v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1307			v0.AddArg(x)
  1308			v0.AddArg(y)
  1309			v.AddArg(v0)
  1310			return true
  1311		}
  1312	}
  1313	func rewriteValueMIPS_OpDiv32F_0(v *Value) bool {
  1314		// match: (Div32F x y)
  1315		// cond:
  1316		// result: (DIVF x y)
  1317		for {
  1318			y := v.Args[1]
  1319			x := v.Args[0]
  1320			v.reset(OpMIPSDIVF)
  1321			v.AddArg(x)
  1322			v.AddArg(y)
  1323			return true
  1324		}
  1325	}
  1326	func rewriteValueMIPS_OpDiv32u_0(v *Value) bool {
  1327		b := v.Block
  1328		typ := &b.Func.Config.Types
  1329		// match: (Div32u x y)
  1330		// cond:
  1331		// result: (Select1 (DIVU x y))
  1332		for {
  1333			y := v.Args[1]
  1334			x := v.Args[0]
  1335			v.reset(OpSelect1)
  1336			v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1337			v0.AddArg(x)
  1338			v0.AddArg(y)
  1339			v.AddArg(v0)
  1340			return true
  1341		}
  1342	}
  1343	func rewriteValueMIPS_OpDiv64F_0(v *Value) bool {
  1344		// match: (Div64F x y)
  1345		// cond:
  1346		// result: (DIVD x y)
  1347		for {
  1348			y := v.Args[1]
  1349			x := v.Args[0]
  1350			v.reset(OpMIPSDIVD)
  1351			v.AddArg(x)
  1352			v.AddArg(y)
  1353			return true
  1354		}
  1355	}
  1356	func rewriteValueMIPS_OpDiv8_0(v *Value) bool {
  1357		b := v.Block
  1358		typ := &b.Func.Config.Types
  1359		// match: (Div8 x y)
  1360		// cond:
  1361		// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  1362		for {
  1363			y := v.Args[1]
  1364			x := v.Args[0]
  1365			v.reset(OpSelect1)
  1366			v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  1367			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1368			v1.AddArg(x)
  1369			v0.AddArg(v1)
  1370			v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1371			v2.AddArg(y)
  1372			v0.AddArg(v2)
  1373			v.AddArg(v0)
  1374			return true
  1375		}
  1376	}
  1377	func rewriteValueMIPS_OpDiv8u_0(v *Value) bool {
  1378		b := v.Block
  1379		typ := &b.Func.Config.Types
  1380		// match: (Div8u x y)
  1381		// cond:
  1382		// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1383		for {
  1384			y := v.Args[1]
  1385			x := v.Args[0]
  1386			v.reset(OpSelect1)
  1387			v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  1388			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1389			v1.AddArg(x)
  1390			v0.AddArg(v1)
  1391			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1392			v2.AddArg(y)
  1393			v0.AddArg(v2)
  1394			v.AddArg(v0)
  1395			return true
  1396		}
  1397	}
  1398	func rewriteValueMIPS_OpEq16_0(v *Value) bool {
  1399		b := v.Block
  1400		typ := &b.Func.Config.Types
  1401		// match: (Eq16 x y)
  1402		// cond:
  1403		// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1404		for {
  1405			y := v.Args[1]
  1406			x := v.Args[0]
  1407			v.reset(OpMIPSSGTUconst)
  1408			v.AuxInt = 1
  1409			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1410			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1411			v1.AddArg(x)
  1412			v0.AddArg(v1)
  1413			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1414			v2.AddArg(y)
  1415			v0.AddArg(v2)
  1416			v.AddArg(v0)
  1417			return true
  1418		}
  1419	}
  1420	func rewriteValueMIPS_OpEq32_0(v *Value) bool {
  1421		b := v.Block
  1422		typ := &b.Func.Config.Types
  1423		// match: (Eq32 x y)
  1424		// cond:
  1425		// result: (SGTUconst [1] (XOR x y))
  1426		for {
  1427			y := v.Args[1]
  1428			x := v.Args[0]
  1429			v.reset(OpMIPSSGTUconst)
  1430			v.AuxInt = 1
  1431			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1432			v0.AddArg(x)
  1433			v0.AddArg(y)
  1434			v.AddArg(v0)
  1435			return true
  1436		}
  1437	}
  1438	func rewriteValueMIPS_OpEq32F_0(v *Value) bool {
  1439		b := v.Block
  1440		// match: (Eq32F x y)
  1441		// cond:
  1442		// result: (FPFlagTrue (CMPEQF x y))
  1443		for {
  1444			y := v.Args[1]
  1445			x := v.Args[0]
  1446			v.reset(OpMIPSFPFlagTrue)
  1447			v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  1448			v0.AddArg(x)
  1449			v0.AddArg(y)
  1450			v.AddArg(v0)
  1451			return true
  1452		}
  1453	}
  1454	func rewriteValueMIPS_OpEq64F_0(v *Value) bool {
  1455		b := v.Block
  1456		// match: (Eq64F x y)
  1457		// cond:
  1458		// result: (FPFlagTrue (CMPEQD x y))
  1459		for {
  1460			y := v.Args[1]
  1461			x := v.Args[0]
  1462			v.reset(OpMIPSFPFlagTrue)
  1463			v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  1464			v0.AddArg(x)
  1465			v0.AddArg(y)
  1466			v.AddArg(v0)
  1467			return true
  1468		}
  1469	}
  1470	func rewriteValueMIPS_OpEq8_0(v *Value) bool {
  1471		b := v.Block
  1472		typ := &b.Func.Config.Types
  1473		// match: (Eq8 x y)
  1474		// cond:
  1475		// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
  1476		for {
  1477			y := v.Args[1]
  1478			x := v.Args[0]
  1479			v.reset(OpMIPSSGTUconst)
  1480			v.AuxInt = 1
  1481			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1482			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1483			v1.AddArg(x)
  1484			v0.AddArg(v1)
  1485			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1486			v2.AddArg(y)
  1487			v0.AddArg(v2)
  1488			v.AddArg(v0)
  1489			return true
  1490		}
  1491	}
  1492	func rewriteValueMIPS_OpEqB_0(v *Value) bool {
  1493		b := v.Block
  1494		typ := &b.Func.Config.Types
  1495		// match: (EqB x y)
  1496		// cond:
  1497		// result: (XORconst [1] (XOR <typ.Bool> x y))
  1498		for {
  1499			y := v.Args[1]
  1500			x := v.Args[0]
  1501			v.reset(OpMIPSXORconst)
  1502			v.AuxInt = 1
  1503			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.Bool)
  1504			v0.AddArg(x)
  1505			v0.AddArg(y)
  1506			v.AddArg(v0)
  1507			return true
  1508		}
  1509	}
  1510	func rewriteValueMIPS_OpEqPtr_0(v *Value) bool {
  1511		b := v.Block
  1512		typ := &b.Func.Config.Types
  1513		// match: (EqPtr x y)
  1514		// cond:
  1515		// result: (SGTUconst [1] (XOR x y))
  1516		for {
  1517			y := v.Args[1]
  1518			x := v.Args[0]
  1519			v.reset(OpMIPSSGTUconst)
  1520			v.AuxInt = 1
  1521			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  1522			v0.AddArg(x)
  1523			v0.AddArg(y)
  1524			v.AddArg(v0)
  1525			return true
  1526		}
  1527	}
  1528	func rewriteValueMIPS_OpGeq16_0(v *Value) bool {
  1529		b := v.Block
  1530		typ := &b.Func.Config.Types
  1531		// match: (Geq16 x y)
  1532		// cond:
  1533		// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
  1534		for {
  1535			y := v.Args[1]
  1536			x := v.Args[0]
  1537			v.reset(OpMIPSXORconst)
  1538			v.AuxInt = 1
  1539			v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1540			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1541			v1.AddArg(y)
  1542			v0.AddArg(v1)
  1543			v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1544			v2.AddArg(x)
  1545			v0.AddArg(v2)
  1546			v.AddArg(v0)
  1547			return true
  1548		}
  1549	}
  1550	func rewriteValueMIPS_OpGeq16U_0(v *Value) bool {
  1551		b := v.Block
  1552		typ := &b.Func.Config.Types
  1553		// match: (Geq16U x y)
  1554		// cond:
  1555		// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
  1556		for {
  1557			y := v.Args[1]
  1558			x := v.Args[0]
  1559			v.reset(OpMIPSXORconst)
  1560			v.AuxInt = 1
  1561			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1562			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1563			v1.AddArg(y)
  1564			v0.AddArg(v1)
  1565			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1566			v2.AddArg(x)
  1567			v0.AddArg(v2)
  1568			v.AddArg(v0)
  1569			return true
  1570		}
  1571	}
  1572	func rewriteValueMIPS_OpGeq32_0(v *Value) bool {
  1573		b := v.Block
  1574		typ := &b.Func.Config.Types
  1575		// match: (Geq32 x y)
  1576		// cond:
  1577		// result: (XORconst [1] (SGT y x))
  1578		for {
  1579			y := v.Args[1]
  1580			x := v.Args[0]
  1581			v.reset(OpMIPSXORconst)
  1582			v.AuxInt = 1
  1583			v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1584			v0.AddArg(y)
  1585			v0.AddArg(x)
  1586			v.AddArg(v0)
  1587			return true
  1588		}
  1589	}
  1590	func rewriteValueMIPS_OpGeq32F_0(v *Value) bool {
  1591		b := v.Block
  1592		// match: (Geq32F x y)
  1593		// cond:
  1594		// result: (FPFlagTrue (CMPGEF x y))
  1595		for {
  1596			y := v.Args[1]
  1597			x := v.Args[0]
  1598			v.reset(OpMIPSFPFlagTrue)
  1599			v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  1600			v0.AddArg(x)
  1601			v0.AddArg(y)
  1602			v.AddArg(v0)
  1603			return true
  1604		}
  1605	}
  1606	func rewriteValueMIPS_OpGeq32U_0(v *Value) bool {
  1607		b := v.Block
  1608		typ := &b.Func.Config.Types
  1609		// match: (Geq32U x y)
  1610		// cond:
  1611		// result: (XORconst [1] (SGTU y x))
  1612		for {
  1613			y := v.Args[1]
  1614			x := v.Args[0]
  1615			v.reset(OpMIPSXORconst)
  1616			v.AuxInt = 1
  1617			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1618			v0.AddArg(y)
  1619			v0.AddArg(x)
  1620			v.AddArg(v0)
  1621			return true
  1622		}
  1623	}
  1624	func rewriteValueMIPS_OpGeq64F_0(v *Value) bool {
  1625		b := v.Block
  1626		// match: (Geq64F x y)
  1627		// cond:
  1628		// result: (FPFlagTrue (CMPGED x y))
  1629		for {
  1630			y := v.Args[1]
  1631			x := v.Args[0]
  1632			v.reset(OpMIPSFPFlagTrue)
  1633			v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  1634			v0.AddArg(x)
  1635			v0.AddArg(y)
  1636			v.AddArg(v0)
  1637			return true
  1638		}
  1639	}
  1640	func rewriteValueMIPS_OpGeq8_0(v *Value) bool {
  1641		b := v.Block
  1642		typ := &b.Func.Config.Types
  1643		// match: (Geq8 x y)
  1644		// cond:
  1645		// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
  1646		for {
  1647			y := v.Args[1]
  1648			x := v.Args[0]
  1649			v.reset(OpMIPSXORconst)
  1650			v.AuxInt = 1
  1651			v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1652			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1653			v1.AddArg(y)
  1654			v0.AddArg(v1)
  1655			v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1656			v2.AddArg(x)
  1657			v0.AddArg(v2)
  1658			v.AddArg(v0)
  1659			return true
  1660		}
  1661	}
  1662	func rewriteValueMIPS_OpGeq8U_0(v *Value) bool {
  1663		b := v.Block
  1664		typ := &b.Func.Config.Types
  1665		// match: (Geq8U x y)
  1666		// cond:
  1667		// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
  1668		for {
  1669			y := v.Args[1]
  1670			x := v.Args[0]
  1671			v.reset(OpMIPSXORconst)
  1672			v.AuxInt = 1
  1673			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1674			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1675			v1.AddArg(y)
  1676			v0.AddArg(v1)
  1677			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1678			v2.AddArg(x)
  1679			v0.AddArg(v2)
  1680			v.AddArg(v0)
  1681			return true
  1682		}
  1683	}
  1684	func rewriteValueMIPS_OpGetCallerPC_0(v *Value) bool {
  1685		// match: (GetCallerPC)
  1686		// cond:
  1687		// result: (LoweredGetCallerPC)
  1688		for {
  1689			v.reset(OpMIPSLoweredGetCallerPC)
  1690			return true
  1691		}
  1692	}
  1693	func rewriteValueMIPS_OpGetCallerSP_0(v *Value) bool {
  1694		// match: (GetCallerSP)
  1695		// cond:
  1696		// result: (LoweredGetCallerSP)
  1697		for {
  1698			v.reset(OpMIPSLoweredGetCallerSP)
  1699			return true
  1700		}
  1701	}
  1702	func rewriteValueMIPS_OpGetClosurePtr_0(v *Value) bool {
  1703		// match: (GetClosurePtr)
  1704		// cond:
  1705		// result: (LoweredGetClosurePtr)
  1706		for {
  1707			v.reset(OpMIPSLoweredGetClosurePtr)
  1708			return true
  1709		}
  1710	}
  1711	func rewriteValueMIPS_OpGreater16_0(v *Value) bool {
  1712		b := v.Block
  1713		typ := &b.Func.Config.Types
  1714		// match: (Greater16 x y)
  1715		// cond:
  1716		// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
  1717		for {
  1718			y := v.Args[1]
  1719			x := v.Args[0]
  1720			v.reset(OpMIPSSGT)
  1721			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1722			v0.AddArg(x)
  1723			v.AddArg(v0)
  1724			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1725			v1.AddArg(y)
  1726			v.AddArg(v1)
  1727			return true
  1728		}
  1729	}
  1730	func rewriteValueMIPS_OpGreater16U_0(v *Value) bool {
  1731		b := v.Block
  1732		typ := &b.Func.Config.Types
  1733		// match: (Greater16U x y)
  1734		// cond:
  1735		// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
  1736		for {
  1737			y := v.Args[1]
  1738			x := v.Args[0]
  1739			v.reset(OpMIPSSGTU)
  1740			v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1741			v0.AddArg(x)
  1742			v.AddArg(v0)
  1743			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1744			v1.AddArg(y)
  1745			v.AddArg(v1)
  1746			return true
  1747		}
  1748	}
  1749	func rewriteValueMIPS_OpGreater32_0(v *Value) bool {
  1750		// match: (Greater32 x y)
  1751		// cond:
  1752		// result: (SGT x y)
  1753		for {
  1754			y := v.Args[1]
  1755			x := v.Args[0]
  1756			v.reset(OpMIPSSGT)
  1757			v.AddArg(x)
  1758			v.AddArg(y)
  1759			return true
  1760		}
  1761	}
  1762	func rewriteValueMIPS_OpGreater32F_0(v *Value) bool {
  1763		b := v.Block
  1764		// match: (Greater32F x y)
  1765		// cond:
  1766		// result: (FPFlagTrue (CMPGTF x y))
  1767		for {
  1768			y := v.Args[1]
  1769			x := v.Args[0]
  1770			v.reset(OpMIPSFPFlagTrue)
  1771			v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  1772			v0.AddArg(x)
  1773			v0.AddArg(y)
  1774			v.AddArg(v0)
  1775			return true
  1776		}
  1777	}
  1778	func rewriteValueMIPS_OpGreater32U_0(v *Value) bool {
  1779		// match: (Greater32U x y)
  1780		// cond:
  1781		// result: (SGTU x y)
  1782		for {
  1783			y := v.Args[1]
  1784			x := v.Args[0]
  1785			v.reset(OpMIPSSGTU)
  1786			v.AddArg(x)
  1787			v.AddArg(y)
  1788			return true
  1789		}
  1790	}
  1791	func rewriteValueMIPS_OpGreater64F_0(v *Value) bool {
  1792		b := v.Block
  1793		// match: (Greater64F x y)
  1794		// cond:
  1795		// result: (FPFlagTrue (CMPGTD x y))
  1796		for {
  1797			y := v.Args[1]
  1798			x := v.Args[0]
  1799			v.reset(OpMIPSFPFlagTrue)
  1800			v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  1801			v0.AddArg(x)
  1802			v0.AddArg(y)
  1803			v.AddArg(v0)
  1804			return true
  1805		}
  1806	}
  1807	func rewriteValueMIPS_OpGreater8_0(v *Value) bool {
  1808		b := v.Block
  1809		typ := &b.Func.Config.Types
  1810		// match: (Greater8 x y)
  1811		// cond:
  1812		// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
  1813		for {
  1814			y := v.Args[1]
  1815			x := v.Args[0]
  1816			v.reset(OpMIPSSGT)
  1817			v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1818			v0.AddArg(x)
  1819			v.AddArg(v0)
  1820			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1821			v1.AddArg(y)
  1822			v.AddArg(v1)
  1823			return true
  1824		}
  1825	}
  1826	func rewriteValueMIPS_OpGreater8U_0(v *Value) bool {
  1827		b := v.Block
  1828		typ := &b.Func.Config.Types
  1829		// match: (Greater8U x y)
  1830		// cond:
  1831		// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
  1832		for {
  1833			y := v.Args[1]
  1834			x := v.Args[0]
  1835			v.reset(OpMIPSSGTU)
  1836			v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1837			v0.AddArg(x)
  1838			v.AddArg(v0)
  1839			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1840			v1.AddArg(y)
  1841			v.AddArg(v1)
  1842			return true
  1843		}
  1844	}
  1845	func rewriteValueMIPS_OpHmul32_0(v *Value) bool {
  1846		b := v.Block
  1847		typ := &b.Func.Config.Types
  1848		// match: (Hmul32 x y)
  1849		// cond:
  1850		// result: (Select0 (MULT x y))
  1851		for {
  1852			y := v.Args[1]
  1853			x := v.Args[0]
  1854			v.reset(OpSelect0)
  1855			v0 := b.NewValue0(v.Pos, OpMIPSMULT, types.NewTuple(typ.Int32, typ.Int32))
  1856			v0.AddArg(x)
  1857			v0.AddArg(y)
  1858			v.AddArg(v0)
  1859			return true
  1860		}
  1861	}
  1862	func rewriteValueMIPS_OpHmul32u_0(v *Value) bool {
  1863		b := v.Block
  1864		typ := &b.Func.Config.Types
  1865		// match: (Hmul32u x y)
  1866		// cond:
  1867		// result: (Select0 (MULTU x y))
  1868		for {
  1869			y := v.Args[1]
  1870			x := v.Args[0]
  1871			v.reset(OpSelect0)
  1872			v0 := b.NewValue0(v.Pos, OpMIPSMULTU, types.NewTuple(typ.UInt32, typ.UInt32))
  1873			v0.AddArg(x)
  1874			v0.AddArg(y)
  1875			v.AddArg(v0)
  1876			return true
  1877		}
  1878	}
  1879	func rewriteValueMIPS_OpInterCall_0(v *Value) bool {
  1880		// match: (InterCall [argwid] entry mem)
  1881		// cond:
  1882		// result: (CALLinter [argwid] entry mem)
  1883		for {
  1884			argwid := v.AuxInt
  1885			mem := v.Args[1]
  1886			entry := v.Args[0]
  1887			v.reset(OpMIPSCALLinter)
  1888			v.AuxInt = argwid
  1889			v.AddArg(entry)
  1890			v.AddArg(mem)
  1891			return true
  1892		}
  1893	}
  1894	func rewriteValueMIPS_OpIsInBounds_0(v *Value) bool {
  1895		// match: (IsInBounds idx len)
  1896		// cond:
  1897		// result: (SGTU len idx)
  1898		for {
  1899			len := v.Args[1]
  1900			idx := v.Args[0]
  1901			v.reset(OpMIPSSGTU)
  1902			v.AddArg(len)
  1903			v.AddArg(idx)
  1904			return true
  1905		}
  1906	}
  1907	func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool {
  1908		b := v.Block
  1909		typ := &b.Func.Config.Types
  1910		// match: (IsNonNil ptr)
  1911		// cond:
  1912		// result: (SGTU ptr (MOVWconst [0]))
  1913		for {
  1914			ptr := v.Args[0]
  1915			v.reset(OpMIPSSGTU)
  1916			v.AddArg(ptr)
  1917			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  1918			v0.AuxInt = 0
  1919			v.AddArg(v0)
  1920			return true
  1921		}
  1922	}
  1923	func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool {
  1924		b := v.Block
  1925		typ := &b.Func.Config.Types
  1926		// match: (IsSliceInBounds idx len)
  1927		// cond:
  1928		// result: (XORconst [1] (SGTU idx len))
  1929		for {
  1930			len := v.Args[1]
  1931			idx := v.Args[0]
  1932			v.reset(OpMIPSXORconst)
  1933			v.AuxInt = 1
  1934			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1935			v0.AddArg(idx)
  1936			v0.AddArg(len)
  1937			v.AddArg(v0)
  1938			return true
  1939		}
  1940	}
  1941	func rewriteValueMIPS_OpLeq16_0(v *Value) bool {
  1942		b := v.Block
  1943		typ := &b.Func.Config.Types
  1944		// match: (Leq16 x y)
  1945		// cond:
  1946		// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
  1947		for {
  1948			y := v.Args[1]
  1949			x := v.Args[0]
  1950			v.reset(OpMIPSXORconst)
  1951			v.AuxInt = 1
  1952			v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1953			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1954			v1.AddArg(x)
  1955			v0.AddArg(v1)
  1956			v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  1957			v2.AddArg(y)
  1958			v0.AddArg(v2)
  1959			v.AddArg(v0)
  1960			return true
  1961		}
  1962	}
  1963	func rewriteValueMIPS_OpLeq16U_0(v *Value) bool {
  1964		b := v.Block
  1965		typ := &b.Func.Config.Types
  1966		// match: (Leq16U x y)
  1967		// cond:
  1968		// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  1969		for {
  1970			y := v.Args[1]
  1971			x := v.Args[0]
  1972			v.reset(OpMIPSXORconst)
  1973			v.AuxInt = 1
  1974			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  1975			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1976			v1.AddArg(x)
  1977			v0.AddArg(v1)
  1978			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1979			v2.AddArg(y)
  1980			v0.AddArg(v2)
  1981			v.AddArg(v0)
  1982			return true
  1983		}
  1984	}
  1985	func rewriteValueMIPS_OpLeq32_0(v *Value) bool {
  1986		b := v.Block
  1987		typ := &b.Func.Config.Types
  1988		// match: (Leq32 x y)
  1989		// cond:
  1990		// result: (XORconst [1] (SGT x y))
  1991		for {
  1992			y := v.Args[1]
  1993			x := v.Args[0]
  1994			v.reset(OpMIPSXORconst)
  1995			v.AuxInt = 1
  1996			v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  1997			v0.AddArg(x)
  1998			v0.AddArg(y)
  1999			v.AddArg(v0)
  2000			return true
  2001		}
  2002	}
  2003	func rewriteValueMIPS_OpLeq32F_0(v *Value) bool {
  2004		b := v.Block
  2005		// match: (Leq32F x y)
  2006		// cond:
  2007		// result: (FPFlagTrue (CMPGEF y x))
  2008		for {
  2009			y := v.Args[1]
  2010			x := v.Args[0]
  2011			v.reset(OpMIPSFPFlagTrue)
  2012			v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags)
  2013			v0.AddArg(y)
  2014			v0.AddArg(x)
  2015			v.AddArg(v0)
  2016			return true
  2017		}
  2018	}
  2019	func rewriteValueMIPS_OpLeq32U_0(v *Value) bool {
  2020		b := v.Block
  2021		typ := &b.Func.Config.Types
  2022		// match: (Leq32U x y)
  2023		// cond:
  2024		// result: (XORconst [1] (SGTU x y))
  2025		for {
  2026			y := v.Args[1]
  2027			x := v.Args[0]
  2028			v.reset(OpMIPSXORconst)
  2029			v.AuxInt = 1
  2030			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2031			v0.AddArg(x)
  2032			v0.AddArg(y)
  2033			v.AddArg(v0)
  2034			return true
  2035		}
  2036	}
  2037	func rewriteValueMIPS_OpLeq64F_0(v *Value) bool {
  2038		b := v.Block
  2039		// match: (Leq64F x y)
  2040		// cond:
  2041		// result: (FPFlagTrue (CMPGED y x))
  2042		for {
  2043			y := v.Args[1]
  2044			x := v.Args[0]
  2045			v.reset(OpMIPSFPFlagTrue)
  2046			v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags)
  2047			v0.AddArg(y)
  2048			v0.AddArg(x)
  2049			v.AddArg(v0)
  2050			return true
  2051		}
  2052	}
  2053	func rewriteValueMIPS_OpLeq8_0(v *Value) bool {
  2054		b := v.Block
  2055		typ := &b.Func.Config.Types
  2056		// match: (Leq8 x y)
  2057		// cond:
  2058		// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
  2059		for {
  2060			y := v.Args[1]
  2061			x := v.Args[0]
  2062			v.reset(OpMIPSXORconst)
  2063			v.AuxInt = 1
  2064			v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool)
  2065			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2066			v1.AddArg(x)
  2067			v0.AddArg(v1)
  2068			v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2069			v2.AddArg(y)
  2070			v0.AddArg(v2)
  2071			v.AddArg(v0)
  2072			return true
  2073		}
  2074	}
  2075	func rewriteValueMIPS_OpLeq8U_0(v *Value) bool {
  2076		b := v.Block
  2077		typ := &b.Func.Config.Types
  2078		// match: (Leq8U x y)
  2079		// cond:
  2080		// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  2081		for {
  2082			y := v.Args[1]
  2083			x := v.Args[0]
  2084			v.reset(OpMIPSXORconst)
  2085			v.AuxInt = 1
  2086			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  2087			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2088			v1.AddArg(x)
  2089			v0.AddArg(v1)
  2090			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2091			v2.AddArg(y)
  2092			v0.AddArg(v2)
  2093			v.AddArg(v0)
  2094			return true
  2095		}
  2096	}
  2097	func rewriteValueMIPS_OpLess16_0(v *Value) bool {
  2098		b := v.Block
  2099		typ := &b.Func.Config.Types
  2100		// match: (Less16 x y)
  2101		// cond:
  2102		// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
  2103		for {
  2104			y := v.Args[1]
  2105			x := v.Args[0]
  2106			v.reset(OpMIPSSGT)
  2107			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2108			v0.AddArg(y)
  2109			v.AddArg(v0)
  2110			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2111			v1.AddArg(x)
  2112			v.AddArg(v1)
  2113			return true
  2114		}
  2115	}
  2116	func rewriteValueMIPS_OpLess16U_0(v *Value) bool {
  2117		b := v.Block
  2118		typ := &b.Func.Config.Types
  2119		// match: (Less16U x y)
  2120		// cond:
  2121		// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
  2122		for {
  2123			y := v.Args[1]
  2124			x := v.Args[0]
  2125			v.reset(OpMIPSSGTU)
  2126			v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2127			v0.AddArg(y)
  2128			v.AddArg(v0)
  2129			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2130			v1.AddArg(x)
  2131			v.AddArg(v1)
  2132			return true
  2133		}
  2134	}
  2135	func rewriteValueMIPS_OpLess32_0(v *Value) bool {
  2136		// match: (Less32 x y)
  2137		// cond:
  2138		// result: (SGT y x)
  2139		for {
  2140			y := v.Args[1]
  2141			x := v.Args[0]
  2142			v.reset(OpMIPSSGT)
  2143			v.AddArg(y)
  2144			v.AddArg(x)
  2145			return true
  2146		}
  2147	}
  2148	func rewriteValueMIPS_OpLess32F_0(v *Value) bool {
  2149		b := v.Block
  2150		// match: (Less32F x y)
  2151		// cond:
  2152		// result: (FPFlagTrue (CMPGTF y x))
  2153		for {
  2154			y := v.Args[1]
  2155			x := v.Args[0]
  2156			v.reset(OpMIPSFPFlagTrue)
  2157			v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags)
  2158			v0.AddArg(y)
  2159			v0.AddArg(x)
  2160			v.AddArg(v0)
  2161			return true
  2162		}
  2163	}
  2164	func rewriteValueMIPS_OpLess32U_0(v *Value) bool {
  2165		// match: (Less32U x y)
  2166		// cond:
  2167		// result: (SGTU y x)
  2168		for {
  2169			y := v.Args[1]
  2170			x := v.Args[0]
  2171			v.reset(OpMIPSSGTU)
  2172			v.AddArg(y)
  2173			v.AddArg(x)
  2174			return true
  2175		}
  2176	}
  2177	func rewriteValueMIPS_OpLess64F_0(v *Value) bool {
  2178		b := v.Block
  2179		// match: (Less64F x y)
  2180		// cond:
  2181		// result: (FPFlagTrue (CMPGTD y x))
  2182		for {
  2183			y := v.Args[1]
  2184			x := v.Args[0]
  2185			v.reset(OpMIPSFPFlagTrue)
  2186			v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags)
  2187			v0.AddArg(y)
  2188			v0.AddArg(x)
  2189			v.AddArg(v0)
  2190			return true
  2191		}
  2192	}
  2193	func rewriteValueMIPS_OpLess8_0(v *Value) bool {
  2194		b := v.Block
  2195		typ := &b.Func.Config.Types
  2196		// match: (Less8 x y)
  2197		// cond:
  2198		// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
  2199		for {
  2200			y := v.Args[1]
  2201			x := v.Args[0]
  2202			v.reset(OpMIPSSGT)
  2203			v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2204			v0.AddArg(y)
  2205			v.AddArg(v0)
  2206			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2207			v1.AddArg(x)
  2208			v.AddArg(v1)
  2209			return true
  2210		}
  2211	}
  2212	func rewriteValueMIPS_OpLess8U_0(v *Value) bool {
  2213		b := v.Block
  2214		typ := &b.Func.Config.Types
  2215		// match: (Less8U x y)
  2216		// cond:
  2217		// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
  2218		for {
  2219			y := v.Args[1]
  2220			x := v.Args[0]
  2221			v.reset(OpMIPSSGTU)
  2222			v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2223			v0.AddArg(y)
  2224			v.AddArg(v0)
  2225			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2226			v1.AddArg(x)
  2227			v.AddArg(v1)
  2228			return true
  2229		}
  2230	}
  2231	func rewriteValueMIPS_OpLoad_0(v *Value) bool {
  2232		// match: (Load <t> ptr mem)
  2233		// cond: t.IsBoolean()
  2234		// result: (MOVBUload ptr mem)
  2235		for {
  2236			t := v.Type
  2237			mem := v.Args[1]
  2238			ptr := v.Args[0]
  2239			if !(t.IsBoolean()) {
  2240				break
  2241			}
  2242			v.reset(OpMIPSMOVBUload)
  2243			v.AddArg(ptr)
  2244			v.AddArg(mem)
  2245			return true
  2246		}
  2247		// match: (Load <t> ptr mem)
  2248		// cond: (is8BitInt(t) && isSigned(t))
  2249		// result: (MOVBload ptr mem)
  2250		for {
  2251			t := v.Type
  2252			mem := v.Args[1]
  2253			ptr := v.Args[0]
  2254			if !(is8BitInt(t) && isSigned(t)) {
  2255				break
  2256			}
  2257			v.reset(OpMIPSMOVBload)
  2258			v.AddArg(ptr)
  2259			v.AddArg(mem)
  2260			return true
  2261		}
  2262		// match: (Load <t> ptr mem)
  2263		// cond: (is8BitInt(t) && !isSigned(t))
  2264		// result: (MOVBUload ptr mem)
  2265		for {
  2266			t := v.Type
  2267			mem := v.Args[1]
  2268			ptr := v.Args[0]
  2269			if !(is8BitInt(t) && !isSigned(t)) {
  2270				break
  2271			}
  2272			v.reset(OpMIPSMOVBUload)
  2273			v.AddArg(ptr)
  2274			v.AddArg(mem)
  2275			return true
  2276		}
  2277		// match: (Load <t> ptr mem)
  2278		// cond: (is16BitInt(t) && isSigned(t))
  2279		// result: (MOVHload ptr mem)
  2280		for {
  2281			t := v.Type
  2282			mem := v.Args[1]
  2283			ptr := v.Args[0]
  2284			if !(is16BitInt(t) && isSigned(t)) {
  2285				break
  2286			}
  2287			v.reset(OpMIPSMOVHload)
  2288			v.AddArg(ptr)
  2289			v.AddArg(mem)
  2290			return true
  2291		}
  2292		// match: (Load <t> ptr mem)
  2293		// cond: (is16BitInt(t) && !isSigned(t))
  2294		// result: (MOVHUload ptr mem)
  2295		for {
  2296			t := v.Type
  2297			mem := v.Args[1]
  2298			ptr := v.Args[0]
  2299			if !(is16BitInt(t) && !isSigned(t)) {
  2300				break
  2301			}
  2302			v.reset(OpMIPSMOVHUload)
  2303			v.AddArg(ptr)
  2304			v.AddArg(mem)
  2305			return true
  2306		}
  2307		// match: (Load <t> ptr mem)
  2308		// cond: (is32BitInt(t) || isPtr(t))
  2309		// result: (MOVWload ptr mem)
  2310		for {
  2311			t := v.Type
  2312			mem := v.Args[1]
  2313			ptr := v.Args[0]
  2314			if !(is32BitInt(t) || isPtr(t)) {
  2315				break
  2316			}
  2317			v.reset(OpMIPSMOVWload)
  2318			v.AddArg(ptr)
  2319			v.AddArg(mem)
  2320			return true
  2321		}
  2322		// match: (Load <t> ptr mem)
  2323		// cond: is32BitFloat(t)
  2324		// result: (MOVFload ptr mem)
  2325		for {
  2326			t := v.Type
  2327			mem := v.Args[1]
  2328			ptr := v.Args[0]
  2329			if !(is32BitFloat(t)) {
  2330				break
  2331			}
  2332			v.reset(OpMIPSMOVFload)
  2333			v.AddArg(ptr)
  2334			v.AddArg(mem)
  2335			return true
  2336		}
  2337		// match: (Load <t> ptr mem)
  2338		// cond: is64BitFloat(t)
  2339		// result: (MOVDload ptr mem)
  2340		for {
  2341			t := v.Type
  2342			mem := v.Args[1]
  2343			ptr := v.Args[0]
  2344			if !(is64BitFloat(t)) {
  2345				break
  2346			}
  2347			v.reset(OpMIPSMOVDload)
  2348			v.AddArg(ptr)
  2349			v.AddArg(mem)
  2350			return true
  2351		}
  2352		return false
  2353	}
  2354	func rewriteValueMIPS_OpLocalAddr_0(v *Value) bool {
  2355		// match: (LocalAddr {sym} base _)
  2356		// cond:
  2357		// result: (MOVWaddr {sym} base)
  2358		for {
  2359			sym := v.Aux
  2360			_ = v.Args[1]
  2361			base := v.Args[0]
  2362			v.reset(OpMIPSMOVWaddr)
  2363			v.Aux = sym
  2364			v.AddArg(base)
  2365			return true
  2366		}
  2367	}
  2368	func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool {
  2369		b := v.Block
  2370		typ := &b.Func.Config.Types
  2371		// match: (Lsh16x16 <t> x y)
  2372		// cond:
  2373		// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2374		for {
  2375			t := v.Type
  2376			y := v.Args[1]
  2377			x := v.Args[0]
  2378			v.reset(OpMIPSCMOVZ)
  2379			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2380			v0.AddArg(x)
  2381			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2382			v1.AddArg(y)
  2383			v0.AddArg(v1)
  2384			v.AddArg(v0)
  2385			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2386			v2.AuxInt = 0
  2387			v.AddArg(v2)
  2388			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2389			v3.AuxInt = 32
  2390			v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2391			v4.AddArg(y)
  2392			v3.AddArg(v4)
  2393			v.AddArg(v3)
  2394			return true
  2395		}
  2396	}
  2397	func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool {
  2398		b := v.Block
  2399		typ := &b.Func.Config.Types
  2400		// match: (Lsh16x32 <t> x y)
  2401		// cond:
  2402		// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2403		for {
  2404			t := v.Type
  2405			y := v.Args[1]
  2406			x := v.Args[0]
  2407			v.reset(OpMIPSCMOVZ)
  2408			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2409			v0.AddArg(x)
  2410			v0.AddArg(y)
  2411			v.AddArg(v0)
  2412			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2413			v1.AuxInt = 0
  2414			v.AddArg(v1)
  2415			v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2416			v2.AuxInt = 32
  2417			v2.AddArg(y)
  2418			v.AddArg(v2)
  2419			return true
  2420		}
  2421	}
  2422	func rewriteValueMIPS_OpLsh16x64_0(v *Value) bool {
  2423		// match: (Lsh16x64 x (Const64 [c]))
  2424		// cond: uint32(c) < 16
  2425		// result: (SLLconst x [c])
  2426		for {
  2427			_ = v.Args[1]
  2428			x := v.Args[0]
  2429			v_1 := v.Args[1]
  2430			if v_1.Op != OpConst64 {
  2431				break
  2432			}
  2433			c := v_1.AuxInt
  2434			if !(uint32(c) < 16) {
  2435				break
  2436			}
  2437			v.reset(OpMIPSSLLconst)
  2438			v.AuxInt = c
  2439			v.AddArg(x)
  2440			return true
  2441		}
  2442		// match: (Lsh16x64 _ (Const64 [c]))
  2443		// cond: uint32(c) >= 16
  2444		// result: (MOVWconst [0])
  2445		for {
  2446			_ = v.Args[1]
  2447			v_1 := v.Args[1]
  2448			if v_1.Op != OpConst64 {
  2449				break
  2450			}
  2451			c := v_1.AuxInt
  2452			if !(uint32(c) >= 16) {
  2453				break
  2454			}
  2455			v.reset(OpMIPSMOVWconst)
  2456			v.AuxInt = 0
  2457			return true
  2458		}
  2459		return false
  2460	}
  2461	func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool {
  2462		b := v.Block
  2463		typ := &b.Func.Config.Types
  2464		// match: (Lsh16x8 <t> x y)
  2465		// cond:
  2466		// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2467		for {
  2468			t := v.Type
  2469			y := v.Args[1]
  2470			x := v.Args[0]
  2471			v.reset(OpMIPSCMOVZ)
  2472			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2473			v0.AddArg(x)
  2474			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2475			v1.AddArg(y)
  2476			v0.AddArg(v1)
  2477			v.AddArg(v0)
  2478			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2479			v2.AuxInt = 0
  2480			v.AddArg(v2)
  2481			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2482			v3.AuxInt = 32
  2483			v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2484			v4.AddArg(y)
  2485			v3.AddArg(v4)
  2486			v.AddArg(v3)
  2487			return true
  2488		}
  2489	}
  2490	func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool {
  2491		b := v.Block
  2492		typ := &b.Func.Config.Types
  2493		// match: (Lsh32x16 <t> x y)
  2494		// cond:
  2495		// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2496		for {
  2497			t := v.Type
  2498			y := v.Args[1]
  2499			x := v.Args[0]
  2500			v.reset(OpMIPSCMOVZ)
  2501			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2502			v0.AddArg(x)
  2503			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2504			v1.AddArg(y)
  2505			v0.AddArg(v1)
  2506			v.AddArg(v0)
  2507			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2508			v2.AuxInt = 0
  2509			v.AddArg(v2)
  2510			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2511			v3.AuxInt = 32
  2512			v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2513			v4.AddArg(y)
  2514			v3.AddArg(v4)
  2515			v.AddArg(v3)
  2516			return true
  2517		}
  2518	}
  2519	func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool {
  2520		b := v.Block
  2521		typ := &b.Func.Config.Types
  2522		// match: (Lsh32x32 <t> x y)
  2523		// cond:
  2524		// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2525		for {
  2526			t := v.Type
  2527			y := v.Args[1]
  2528			x := v.Args[0]
  2529			v.reset(OpMIPSCMOVZ)
  2530			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2531			v0.AddArg(x)
  2532			v0.AddArg(y)
  2533			v.AddArg(v0)
  2534			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2535			v1.AuxInt = 0
  2536			v.AddArg(v1)
  2537			v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2538			v2.AuxInt = 32
  2539			v2.AddArg(y)
  2540			v.AddArg(v2)
  2541			return true
  2542		}
  2543	}
  2544	func rewriteValueMIPS_OpLsh32x64_0(v *Value) bool {
  2545		// match: (Lsh32x64 x (Const64 [c]))
  2546		// cond: uint32(c) < 32
  2547		// result: (SLLconst x [c])
  2548		for {
  2549			_ = v.Args[1]
  2550			x := v.Args[0]
  2551			v_1 := v.Args[1]
  2552			if v_1.Op != OpConst64 {
  2553				break
  2554			}
  2555			c := v_1.AuxInt
  2556			if !(uint32(c) < 32) {
  2557				break
  2558			}
  2559			v.reset(OpMIPSSLLconst)
  2560			v.AuxInt = c
  2561			v.AddArg(x)
  2562			return true
  2563		}
  2564		// match: (Lsh32x64 _ (Const64 [c]))
  2565		// cond: uint32(c) >= 32
  2566		// result: (MOVWconst [0])
  2567		for {
  2568			_ = v.Args[1]
  2569			v_1 := v.Args[1]
  2570			if v_1.Op != OpConst64 {
  2571				break
  2572			}
  2573			c := v_1.AuxInt
  2574			if !(uint32(c) >= 32) {
  2575				break
  2576			}
  2577			v.reset(OpMIPSMOVWconst)
  2578			v.AuxInt = 0
  2579			return true
  2580		}
  2581		return false
  2582	}
  2583	func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool {
  2584		b := v.Block
  2585		typ := &b.Func.Config.Types
  2586		// match: (Lsh32x8 <t> x y)
  2587		// cond:
  2588		// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2589		for {
  2590			t := v.Type
  2591			y := v.Args[1]
  2592			x := v.Args[0]
  2593			v.reset(OpMIPSCMOVZ)
  2594			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2595			v0.AddArg(x)
  2596			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2597			v1.AddArg(y)
  2598			v0.AddArg(v1)
  2599			v.AddArg(v0)
  2600			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2601			v2.AuxInt = 0
  2602			v.AddArg(v2)
  2603			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2604			v3.AuxInt = 32
  2605			v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2606			v4.AddArg(y)
  2607			v3.AddArg(v4)
  2608			v.AddArg(v3)
  2609			return true
  2610		}
  2611	}
  2612	func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool {
  2613		b := v.Block
  2614		typ := &b.Func.Config.Types
  2615		// match: (Lsh8x16 <t> x y)
  2616		// cond:
  2617		// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  2618		for {
  2619			t := v.Type
  2620			y := v.Args[1]
  2621			x := v.Args[0]
  2622			v.reset(OpMIPSCMOVZ)
  2623			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2624			v0.AddArg(x)
  2625			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2626			v1.AddArg(y)
  2627			v0.AddArg(v1)
  2628			v.AddArg(v0)
  2629			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2630			v2.AuxInt = 0
  2631			v.AddArg(v2)
  2632			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2633			v3.AuxInt = 32
  2634			v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2635			v4.AddArg(y)
  2636			v3.AddArg(v4)
  2637			v.AddArg(v3)
  2638			return true
  2639		}
  2640	}
  2641	func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool {
  2642		b := v.Block
  2643		typ := &b.Func.Config.Types
  2644		// match: (Lsh8x32 <t> x y)
  2645		// cond:
  2646		// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  2647		for {
  2648			t := v.Type
  2649			y := v.Args[1]
  2650			x := v.Args[0]
  2651			v.reset(OpMIPSCMOVZ)
  2652			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2653			v0.AddArg(x)
  2654			v0.AddArg(y)
  2655			v.AddArg(v0)
  2656			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2657			v1.AuxInt = 0
  2658			v.AddArg(v1)
  2659			v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2660			v2.AuxInt = 32
  2661			v2.AddArg(y)
  2662			v.AddArg(v2)
  2663			return true
  2664		}
  2665	}
  2666	func rewriteValueMIPS_OpLsh8x64_0(v *Value) bool {
  2667		// match: (Lsh8x64 x (Const64 [c]))
  2668		// cond: uint32(c) < 8
  2669		// result: (SLLconst x [c])
  2670		for {
  2671			_ = v.Args[1]
  2672			x := v.Args[0]
  2673			v_1 := v.Args[1]
  2674			if v_1.Op != OpConst64 {
  2675				break
  2676			}
  2677			c := v_1.AuxInt
  2678			if !(uint32(c) < 8) {
  2679				break
  2680			}
  2681			v.reset(OpMIPSSLLconst)
  2682			v.AuxInt = c
  2683			v.AddArg(x)
  2684			return true
  2685		}
  2686		// match: (Lsh8x64 _ (Const64 [c]))
  2687		// cond: uint32(c) >= 8
  2688		// result: (MOVWconst [0])
  2689		for {
  2690			_ = v.Args[1]
  2691			v_1 := v.Args[1]
  2692			if v_1.Op != OpConst64 {
  2693				break
  2694			}
  2695			c := v_1.AuxInt
  2696			if !(uint32(c) >= 8) {
  2697				break
  2698			}
  2699			v.reset(OpMIPSMOVWconst)
  2700			v.AuxInt = 0
  2701			return true
  2702		}
  2703		return false
  2704	}
  2705	func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool {
  2706		b := v.Block
  2707		typ := &b.Func.Config.Types
  2708		// match: (Lsh8x8 <t> x y)
  2709		// cond:
  2710		// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  2711		for {
  2712			t := v.Type
  2713			y := v.Args[1]
  2714			x := v.Args[0]
  2715			v.reset(OpMIPSCMOVZ)
  2716			v0 := b.NewValue0(v.Pos, OpMIPSSLL, t)
  2717			v0.AddArg(x)
  2718			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2719			v1.AddArg(y)
  2720			v0.AddArg(v1)
  2721			v.AddArg(v0)
  2722			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  2723			v2.AuxInt = 0
  2724			v.AddArg(v2)
  2725			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  2726			v3.AuxInt = 32
  2727			v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2728			v4.AddArg(y)
  2729			v3.AddArg(v4)
  2730			v.AddArg(v3)
  2731			return true
  2732		}
  2733	}
  2734	func rewriteValueMIPS_OpMIPSADD_0(v *Value) bool {
  2735		// match: (ADD x (MOVWconst [c]))
  2736		// cond:
  2737		// result: (ADDconst [c] x)
  2738		for {
  2739			_ = v.Args[1]
  2740			x := v.Args[0]
  2741			v_1 := v.Args[1]
  2742			if v_1.Op != OpMIPSMOVWconst {
  2743				break
  2744			}
  2745			c := v_1.AuxInt
  2746			v.reset(OpMIPSADDconst)
  2747			v.AuxInt = c
  2748			v.AddArg(x)
  2749			return true
  2750		}
  2751		// match: (ADD (MOVWconst [c]) x)
  2752		// cond:
  2753		// result: (ADDconst [c] x)
  2754		for {
  2755			x := v.Args[1]
  2756			v_0 := v.Args[0]
  2757			if v_0.Op != OpMIPSMOVWconst {
  2758				break
  2759			}
  2760			c := v_0.AuxInt
  2761			v.reset(OpMIPSADDconst)
  2762			v.AuxInt = c
  2763			v.AddArg(x)
  2764			return true
  2765		}
  2766		// match: (ADD x (NEG y))
  2767		// cond:
  2768		// result: (SUB x y)
  2769		for {
  2770			_ = v.Args[1]
  2771			x := v.Args[0]
  2772			v_1 := v.Args[1]
  2773			if v_1.Op != OpMIPSNEG {
  2774				break
  2775			}
  2776			y := v_1.Args[0]
  2777			v.reset(OpMIPSSUB)
  2778			v.AddArg(x)
  2779			v.AddArg(y)
  2780			return true
  2781		}
  2782		// match: (ADD (NEG y) x)
  2783		// cond:
  2784		// result: (SUB x y)
  2785		for {
  2786			x := v.Args[1]
  2787			v_0 := v.Args[0]
  2788			if v_0.Op != OpMIPSNEG {
  2789				break
  2790			}
  2791			y := v_0.Args[0]
  2792			v.reset(OpMIPSSUB)
  2793			v.AddArg(x)
  2794			v.AddArg(y)
  2795			return true
  2796		}
  2797		return false
  2798	}
  2799	func rewriteValueMIPS_OpMIPSADDconst_0(v *Value) bool {
  2800		// match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr))
  2801		// cond:
  2802		// result: (MOVWaddr [off1+off2] {sym} ptr)
  2803		for {
  2804			off1 := v.AuxInt
  2805			v_0 := v.Args[0]
  2806			if v_0.Op != OpMIPSMOVWaddr {
  2807				break
  2808			}
  2809			off2 := v_0.AuxInt
  2810			sym := v_0.Aux
  2811			ptr := v_0.Args[0]
  2812			v.reset(OpMIPSMOVWaddr)
  2813			v.AuxInt = off1 + off2
  2814			v.Aux = sym
  2815			v.AddArg(ptr)
  2816			return true
  2817		}
  2818		// match: (ADDconst [0] x)
  2819		// cond:
  2820		// result: x
  2821		for {
  2822			if v.AuxInt != 0 {
  2823				break
  2824			}
  2825			x := v.Args[0]
  2826			v.reset(OpCopy)
  2827			v.Type = x.Type
  2828			v.AddArg(x)
  2829			return true
  2830		}
  2831		// match: (ADDconst [c] (MOVWconst [d]))
  2832		// cond:
  2833		// result: (MOVWconst [int64(int32(c+d))])
  2834		for {
  2835			c := v.AuxInt
  2836			v_0 := v.Args[0]
  2837			if v_0.Op != OpMIPSMOVWconst {
  2838				break
  2839			}
  2840			d := v_0.AuxInt
  2841			v.reset(OpMIPSMOVWconst)
  2842			v.AuxInt = int64(int32(c + d))
  2843			return true
  2844		}
  2845		// match: (ADDconst [c] (ADDconst [d] x))
  2846		// cond:
  2847		// result: (ADDconst [int64(int32(c+d))] x)
  2848		for {
  2849			c := v.AuxInt
  2850			v_0 := v.Args[0]
  2851			if v_0.Op != OpMIPSADDconst {
  2852				break
  2853			}
  2854			d := v_0.AuxInt
  2855			x := v_0.Args[0]
  2856			v.reset(OpMIPSADDconst)
  2857			v.AuxInt = int64(int32(c + d))
  2858			v.AddArg(x)
  2859			return true
  2860		}
  2861		// match: (ADDconst [c] (SUBconst [d] x))
  2862		// cond:
  2863		// result: (ADDconst [int64(int32(c-d))] x)
  2864		for {
  2865			c := v.AuxInt
  2866			v_0 := v.Args[0]
  2867			if v_0.Op != OpMIPSSUBconst {
  2868				break
  2869			}
  2870			d := v_0.AuxInt
  2871			x := v_0.Args[0]
  2872			v.reset(OpMIPSADDconst)
  2873			v.AuxInt = int64(int32(c - d))
  2874			v.AddArg(x)
  2875			return true
  2876		}
  2877		return false
  2878	}
  2879	func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool {
  2880		b := v.Block
  2881		// match: (AND x (MOVWconst [c]))
  2882		// cond:
  2883		// result: (ANDconst [c] x)
  2884		for {
  2885			_ = v.Args[1]
  2886			x := v.Args[0]
  2887			v_1 := v.Args[1]
  2888			if v_1.Op != OpMIPSMOVWconst {
  2889				break
  2890			}
  2891			c := v_1.AuxInt
  2892			v.reset(OpMIPSANDconst)
  2893			v.AuxInt = c
  2894			v.AddArg(x)
  2895			return true
  2896		}
  2897		// match: (AND (MOVWconst [c]) x)
  2898		// cond:
  2899		// result: (ANDconst [c] x)
  2900		for {
  2901			x := v.Args[1]
  2902			v_0 := v.Args[0]
  2903			if v_0.Op != OpMIPSMOVWconst {
  2904				break
  2905			}
  2906			c := v_0.AuxInt
  2907			v.reset(OpMIPSANDconst)
  2908			v.AuxInt = c
  2909			v.AddArg(x)
  2910			return true
  2911		}
  2912		// match: (AND x x)
  2913		// cond:
  2914		// result: x
  2915		for {
  2916			x := v.Args[1]
  2917			if x != v.Args[0] {
  2918				break
  2919			}
  2920			v.reset(OpCopy)
  2921			v.Type = x.Type
  2922			v.AddArg(x)
  2923			return true
  2924		}
  2925		// match: (AND (SGTUconst [1] x) (SGTUconst [1] y))
  2926		// cond:
  2927		// result: (SGTUconst [1] (OR <x.Type> x y))
  2928		for {
  2929			_ = v.Args[1]
  2930			v_0 := v.Args[0]
  2931			if v_0.Op != OpMIPSSGTUconst {
  2932				break
  2933			}
  2934			if v_0.AuxInt != 1 {
  2935				break
  2936			}
  2937			x := v_0.Args[0]
  2938			v_1 := v.Args[1]
  2939			if v_1.Op != OpMIPSSGTUconst {
  2940				break
  2941			}
  2942			if v_1.AuxInt != 1 {
  2943				break
  2944			}
  2945			y := v_1.Args[0]
  2946			v.reset(OpMIPSSGTUconst)
  2947			v.AuxInt = 1
  2948			v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  2949			v0.AddArg(x)
  2950			v0.AddArg(y)
  2951			v.AddArg(v0)
  2952			return true
  2953		}
  2954		// match: (AND (SGTUconst [1] y) (SGTUconst [1] x))
  2955		// cond:
  2956		// result: (SGTUconst [1] (OR <x.Type> x y))
  2957		for {
  2958			_ = v.Args[1]
  2959			v_0 := v.Args[0]
  2960			if v_0.Op != OpMIPSSGTUconst {
  2961				break
  2962			}
  2963			if v_0.AuxInt != 1 {
  2964				break
  2965			}
  2966			y := v_0.Args[0]
  2967			v_1 := v.Args[1]
  2968			if v_1.Op != OpMIPSSGTUconst {
  2969				break
  2970			}
  2971			if v_1.AuxInt != 1 {
  2972				break
  2973			}
  2974			x := v_1.Args[0]
  2975			v.reset(OpMIPSSGTUconst)
  2976			v.AuxInt = 1
  2977			v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  2978			v0.AddArg(x)
  2979			v0.AddArg(y)
  2980			v.AddArg(v0)
  2981			return true
  2982		}
  2983		return false
  2984	}
  2985	func rewriteValueMIPS_OpMIPSANDconst_0(v *Value) bool {
  2986		// match: (ANDconst [0] _)
  2987		// cond:
  2988		// result: (MOVWconst [0])
  2989		for {
  2990			if v.AuxInt != 0 {
  2991				break
  2992			}
  2993			v.reset(OpMIPSMOVWconst)
  2994			v.AuxInt = 0
  2995			return true
  2996		}
  2997		// match: (ANDconst [-1] x)
  2998		// cond:
  2999		// result: x
  3000		for {
  3001			if v.AuxInt != -1 {
  3002				break
  3003			}
  3004			x := v.Args[0]
  3005			v.reset(OpCopy)
  3006			v.Type = x.Type
  3007			v.AddArg(x)
  3008			return true
  3009		}
  3010		// match: (ANDconst [c] (MOVWconst [d]))
  3011		// cond:
  3012		// result: (MOVWconst [c&d])
  3013		for {
  3014			c := v.AuxInt
  3015			v_0 := v.Args[0]
  3016			if v_0.Op != OpMIPSMOVWconst {
  3017				break
  3018			}
  3019			d := v_0.AuxInt
  3020			v.reset(OpMIPSMOVWconst)
  3021			v.AuxInt = c & d
  3022			return true
  3023		}
  3024		// match: (ANDconst [c] (ANDconst [d] x))
  3025		// cond:
  3026		// result: (ANDconst [c&d] x)
  3027		for {
  3028			c := v.AuxInt
  3029			v_0 := v.Args[0]
  3030			if v_0.Op != OpMIPSANDconst {
  3031				break
  3032			}
  3033			d := v_0.AuxInt
  3034			x := v_0.Args[0]
  3035			v.reset(OpMIPSANDconst)
  3036			v.AuxInt = c & d
  3037			v.AddArg(x)
  3038			return true
  3039		}
  3040		return false
  3041	}
  3042	func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool {
  3043		// match: (CMOVZ _ f (MOVWconst [0]))
  3044		// cond:
  3045		// result: f
  3046		for {
  3047			_ = v.Args[2]
  3048			f := v.Args[1]
  3049			v_2 := v.Args[2]
  3050			if v_2.Op != OpMIPSMOVWconst {
  3051				break
  3052			}
  3053			if v_2.AuxInt != 0 {
  3054				break
  3055			}
  3056			v.reset(OpCopy)
  3057			v.Type = f.Type
  3058			v.AddArg(f)
  3059			return true
  3060		}
  3061		// match: (CMOVZ a _ (MOVWconst [c]))
  3062		// cond: c!=0
  3063		// result: a
  3064		for {
  3065			_ = v.Args[2]
  3066			a := v.Args[0]
  3067			v_2 := v.Args[2]
  3068			if v_2.Op != OpMIPSMOVWconst {
  3069				break
  3070			}
  3071			c := v_2.AuxInt
  3072			if !(c != 0) {
  3073				break
  3074			}
  3075			v.reset(OpCopy)
  3076			v.Type = a.Type
  3077			v.AddArg(a)
  3078			return true
  3079		}
  3080		// match: (CMOVZ a (MOVWconst [0]) c)
  3081		// cond:
  3082		// result: (CMOVZzero a c)
  3083		for {
  3084			c := v.Args[2]
  3085			a := v.Args[0]
  3086			v_1 := v.Args[1]
  3087			if v_1.Op != OpMIPSMOVWconst {
  3088				break
  3089			}
  3090			if v_1.AuxInt != 0 {
  3091				break
  3092			}
  3093			v.reset(OpMIPSCMOVZzero)
  3094			v.AddArg(a)
  3095			v.AddArg(c)
  3096			return true
  3097		}
  3098		return false
  3099	}
  3100	func rewriteValueMIPS_OpMIPSCMOVZzero_0(v *Value) bool {
  3101		// match: (CMOVZzero _ (MOVWconst [0]))
  3102		// cond:
  3103		// result: (MOVWconst [0])
  3104		for {
  3105			_ = v.Args[1]
  3106			v_1 := v.Args[1]
  3107			if v_1.Op != OpMIPSMOVWconst {
  3108				break
  3109			}
  3110			if v_1.AuxInt != 0 {
  3111				break
  3112			}
  3113			v.reset(OpMIPSMOVWconst)
  3114			v.AuxInt = 0
  3115			return true
  3116		}
  3117		// match: (CMOVZzero a (MOVWconst [c]))
  3118		// cond: c!=0
  3119		// result: a
  3120		for {
  3121			_ = v.Args[1]
  3122			a := v.Args[0]
  3123			v_1 := v.Args[1]
  3124			if v_1.Op != OpMIPSMOVWconst {
  3125				break
  3126			}
  3127			c := v_1.AuxInt
  3128			if !(c != 0) {
  3129				break
  3130			}
  3131			v.reset(OpCopy)
  3132			v.Type = a.Type
  3133			v.AddArg(a)
  3134			return true
  3135		}
  3136		return false
  3137	}
  3138	func rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v *Value) bool {
  3139		// match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem)
  3140		// cond: is16Bit(c)
  3141		// result: (LoweredAtomicAddconst [c] ptr mem)
  3142		for {
  3143			mem := v.Args[2]
  3144			ptr := v.Args[0]
  3145			v_1 := v.Args[1]
  3146			if v_1.Op != OpMIPSMOVWconst {
  3147				break
  3148			}
  3149			c := v_1.AuxInt
  3150			if !(is16Bit(c)) {
  3151				break
  3152			}
  3153			v.reset(OpMIPSLoweredAtomicAddconst)
  3154			v.AuxInt = c
  3155			v.AddArg(ptr)
  3156			v.AddArg(mem)
  3157			return true
  3158		}
  3159		return false
  3160	}
  3161	func rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v *Value) bool {
  3162		// match: (LoweredAtomicStore ptr (MOVWconst [0]) mem)
  3163		// cond:
  3164		// result: (LoweredAtomicStorezero ptr mem)
  3165		for {
  3166			mem := v.Args[2]
  3167			ptr := v.Args[0]
  3168			v_1 := v.Args[1]
  3169			if v_1.Op != OpMIPSMOVWconst {
  3170				break
  3171			}
  3172			if v_1.AuxInt != 0 {
  3173				break
  3174			}
  3175			v.reset(OpMIPSLoweredAtomicStorezero)
  3176			v.AddArg(ptr)
  3177			v.AddArg(mem)
  3178			return true
  3179		}
  3180		return false
  3181	}
  3182	func rewriteValueMIPS_OpMIPSMOVBUload_0(v *Value) bool {
  3183		// match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3184		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3185		// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3186		for {
  3187			off1 := v.AuxInt
  3188			sym := v.Aux
  3189			mem := v.Args[1]
  3190			x := v.Args[0]
  3191			if x.Op != OpMIPSADDconst {
  3192				break
  3193			}
  3194			off2 := x.AuxInt
  3195			ptr := x.Args[0]
  3196			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3197				break
  3198			}
  3199			v.reset(OpMIPSMOVBUload)
  3200			v.AuxInt = off1 + off2
  3201			v.Aux = sym
  3202			v.AddArg(ptr)
  3203			v.AddArg(mem)
  3204			return true
  3205		}
  3206		// match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3207		// cond: canMergeSym(sym1,sym2)
  3208		// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3209		for {
  3210			off1 := v.AuxInt
  3211			sym1 := v.Aux
  3212			mem := v.Args[1]
  3213			v_0 := v.Args[0]
  3214			if v_0.Op != OpMIPSMOVWaddr {
  3215				break
  3216			}
  3217			off2 := v_0.AuxInt
  3218			sym2 := v_0.Aux
  3219			ptr := v_0.Args[0]
  3220			if !(canMergeSym(sym1, sym2)) {
  3221				break
  3222			}
  3223			v.reset(OpMIPSMOVBUload)
  3224			v.AuxInt = off1 + off2
  3225			v.Aux = mergeSym(sym1, sym2)
  3226			v.AddArg(ptr)
  3227			v.AddArg(mem)
  3228			return true
  3229		}
  3230		// match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3231		// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3232		// result: (MOVBUreg x)
  3233		for {
  3234			off := v.AuxInt
  3235			sym := v.Aux
  3236			_ = v.Args[1]
  3237			ptr := v.Args[0]
  3238			v_1 := v.Args[1]
  3239			if v_1.Op != OpMIPSMOVBstore {
  3240				break
  3241			}
  3242			off2 := v_1.AuxInt
  3243			sym2 := v_1.Aux
  3244			_ = v_1.Args[2]
  3245			ptr2 := v_1.Args[0]
  3246			x := v_1.Args[1]
  3247			if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3248				break
  3249			}
  3250			v.reset(OpMIPSMOVBUreg)
  3251			v.AddArg(x)
  3252			return true
  3253		}
  3254		return false
  3255	}
  3256	func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
  3257		b := v.Block
  3258		// match: (MOVBUreg x:(MOVBUload _ _))
  3259		// cond:
  3260		// result: (MOVWreg x)
  3261		for {
  3262			x := v.Args[0]
  3263			if x.Op != OpMIPSMOVBUload {
  3264				break
  3265			}
  3266			_ = x.Args[1]
  3267			v.reset(OpMIPSMOVWreg)
  3268			v.AddArg(x)
  3269			return true
  3270		}
  3271		// match: (MOVBUreg x:(MOVBUreg _))
  3272		// cond:
  3273		// result: (MOVWreg x)
  3274		for {
  3275			x := v.Args[0]
  3276			if x.Op != OpMIPSMOVBUreg {
  3277				break
  3278			}
  3279			v.reset(OpMIPSMOVWreg)
  3280			v.AddArg(x)
  3281			return true
  3282		}
  3283		// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  3284		// cond: x.Uses == 1 && clobber(x)
  3285		// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  3286		for {
  3287			t := v.Type
  3288			x := v.Args[0]
  3289			if x.Op != OpMIPSMOVBload {
  3290				break
  3291			}
  3292			off := x.AuxInt
  3293			sym := x.Aux
  3294			mem := x.Args[1]
  3295			ptr := x.Args[0]
  3296			if !(x.Uses == 1 && clobber(x)) {
  3297				break
  3298			}
  3299			b = x.Block
  3300			v0 := b.NewValue0(x.Pos, OpMIPSMOVBUload, t)
  3301			v.reset(OpCopy)
  3302			v.AddArg(v0)
  3303			v0.AuxInt = off
  3304			v0.Aux = sym
  3305			v0.AddArg(ptr)
  3306			v0.AddArg(mem)
  3307			return true
  3308		}
  3309		// match: (MOVBUreg (ANDconst [c] x))
  3310		// cond:
  3311		// result: (ANDconst [c&0xff] x)
  3312		for {
  3313			v_0 := v.Args[0]
  3314			if v_0.Op != OpMIPSANDconst {
  3315				break
  3316			}
  3317			c := v_0.AuxInt
  3318			x := v_0.Args[0]
  3319			v.reset(OpMIPSANDconst)
  3320			v.AuxInt = c & 0xff
  3321			v.AddArg(x)
  3322			return true
  3323		}
  3324		// match: (MOVBUreg (MOVWconst [c]))
  3325		// cond:
  3326		// result: (MOVWconst [int64(uint8(c))])
  3327		for {
  3328			v_0 := v.Args[0]
  3329			if v_0.Op != OpMIPSMOVWconst {
  3330				break
  3331			}
  3332			c := v_0.AuxInt
  3333			v.reset(OpMIPSMOVWconst)
  3334			v.AuxInt = int64(uint8(c))
  3335			return true
  3336		}
  3337		return false
  3338	}
  3339	func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool {
  3340		// match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3341		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3342		// result: (MOVBload [off1+off2] {sym} ptr mem)
  3343		for {
  3344			off1 := v.AuxInt
  3345			sym := v.Aux
  3346			mem := v.Args[1]
  3347			x := v.Args[0]
  3348			if x.Op != OpMIPSADDconst {
  3349				break
  3350			}
  3351			off2 := x.AuxInt
  3352			ptr := x.Args[0]
  3353			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3354				break
  3355			}
  3356			v.reset(OpMIPSMOVBload)
  3357			v.AuxInt = off1 + off2
  3358			v.Aux = sym
  3359			v.AddArg(ptr)
  3360			v.AddArg(mem)
  3361			return true
  3362		}
  3363		// match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3364		// cond: canMergeSym(sym1,sym2)
  3365		// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3366		for {
  3367			off1 := v.AuxInt
  3368			sym1 := v.Aux
  3369			mem := v.Args[1]
  3370			v_0 := v.Args[0]
  3371			if v_0.Op != OpMIPSMOVWaddr {
  3372				break
  3373			}
  3374			off2 := v_0.AuxInt
  3375			sym2 := v_0.Aux
  3376			ptr := v_0.Args[0]
  3377			if !(canMergeSym(sym1, sym2)) {
  3378				break
  3379			}
  3380			v.reset(OpMIPSMOVBload)
  3381			v.AuxInt = off1 + off2
  3382			v.Aux = mergeSym(sym1, sym2)
  3383			v.AddArg(ptr)
  3384			v.AddArg(mem)
  3385			return true
  3386		}
  3387		// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
  3388		// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3389		// result: (MOVBreg x)
  3390		for {
  3391			off := v.AuxInt
  3392			sym := v.Aux
  3393			_ = v.Args[1]
  3394			ptr := v.Args[0]
  3395			v_1 := v.Args[1]
  3396			if v_1.Op != OpMIPSMOVBstore {
  3397				break
  3398			}
  3399			off2 := v_1.AuxInt
  3400			sym2 := v_1.Aux
  3401			_ = v_1.Args[2]
  3402			ptr2 := v_1.Args[0]
  3403			x := v_1.Args[1]
  3404			if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3405				break
  3406			}
  3407			v.reset(OpMIPSMOVBreg)
  3408			v.AddArg(x)
  3409			return true
  3410		}
  3411		return false
  3412	}
  3413	func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool {
  3414		b := v.Block
  3415		// match: (MOVBreg x:(MOVBload _ _))
  3416		// cond:
  3417		// result: (MOVWreg x)
  3418		for {
  3419			x := v.Args[0]
  3420			if x.Op != OpMIPSMOVBload {
  3421				break
  3422			}
  3423			_ = x.Args[1]
  3424			v.reset(OpMIPSMOVWreg)
  3425			v.AddArg(x)
  3426			return true
  3427		}
  3428		// match: (MOVBreg x:(MOVBreg _))
  3429		// cond:
  3430		// result: (MOVWreg x)
  3431		for {
  3432			x := v.Args[0]
  3433			if x.Op != OpMIPSMOVBreg {
  3434				break
  3435			}
  3436			v.reset(OpMIPSMOVWreg)
  3437			v.AddArg(x)
  3438			return true
  3439		}
  3440		// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  3441		// cond: x.Uses == 1 && clobber(x)
  3442		// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  3443		for {
  3444			t := v.Type
  3445			x := v.Args[0]
  3446			if x.Op != OpMIPSMOVBUload {
  3447				break
  3448			}
  3449			off := x.AuxInt
  3450			sym := x.Aux
  3451			mem := x.Args[1]
  3452			ptr := x.Args[0]
  3453			if !(x.Uses == 1 && clobber(x)) {
  3454				break
  3455			}
  3456			b = x.Block
  3457			v0 := b.NewValue0(x.Pos, OpMIPSMOVBload, t)
  3458			v.reset(OpCopy)
  3459			v.AddArg(v0)
  3460			v0.AuxInt = off
  3461			v0.Aux = sym
  3462			v0.AddArg(ptr)
  3463			v0.AddArg(mem)
  3464			return true
  3465		}
  3466		// match: (MOVBreg (ANDconst [c] x))
  3467		// cond: c & 0x80 == 0
  3468		// result: (ANDconst [c&0x7f] x)
  3469		for {
  3470			v_0 := v.Args[0]
  3471			if v_0.Op != OpMIPSANDconst {
  3472				break
  3473			}
  3474			c := v_0.AuxInt
  3475			x := v_0.Args[0]
  3476			if !(c&0x80 == 0) {
  3477				break
  3478			}
  3479			v.reset(OpMIPSANDconst)
  3480			v.AuxInt = c & 0x7f
  3481			v.AddArg(x)
  3482			return true
  3483		}
  3484		// match: (MOVBreg (MOVWconst [c]))
  3485		// cond:
  3486		// result: (MOVWconst [int64(int8(c))])
  3487		for {
  3488			v_0 := v.Args[0]
  3489			if v_0.Op != OpMIPSMOVWconst {
  3490				break
  3491			}
  3492			c := v_0.AuxInt
  3493			v.reset(OpMIPSMOVWconst)
  3494			v.AuxInt = int64(int8(c))
  3495			return true
  3496		}
  3497		return false
  3498	}
  3499	func rewriteValueMIPS_OpMIPSMOVBstore_0(v *Value) bool {
  3500		// match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3501		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3502		// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3503		for {
  3504			off1 := v.AuxInt
  3505			sym := v.Aux
  3506			mem := v.Args[2]
  3507			x := v.Args[0]
  3508			if x.Op != OpMIPSADDconst {
  3509				break
  3510			}
  3511			off2 := x.AuxInt
  3512			ptr := x.Args[0]
  3513			val := v.Args[1]
  3514			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3515				break
  3516			}
  3517			v.reset(OpMIPSMOVBstore)
  3518			v.AuxInt = off1 + off2
  3519			v.Aux = sym
  3520			v.AddArg(ptr)
  3521			v.AddArg(val)
  3522			v.AddArg(mem)
  3523			return true
  3524		}
  3525		// match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3526		// cond: canMergeSym(sym1,sym2)
  3527		// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3528		for {
  3529			off1 := v.AuxInt
  3530			sym1 := v.Aux
  3531			mem := v.Args[2]
  3532			v_0 := v.Args[0]
  3533			if v_0.Op != OpMIPSMOVWaddr {
  3534				break
  3535			}
  3536			off2 := v_0.AuxInt
  3537			sym2 := v_0.Aux
  3538			ptr := v_0.Args[0]
  3539			val := v.Args[1]
  3540			if !(canMergeSym(sym1, sym2)) {
  3541				break
  3542			}
  3543			v.reset(OpMIPSMOVBstore)
  3544			v.AuxInt = off1 + off2
  3545			v.Aux = mergeSym(sym1, sym2)
  3546			v.AddArg(ptr)
  3547			v.AddArg(val)
  3548			v.AddArg(mem)
  3549			return true
  3550		}
  3551		// match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem)
  3552		// cond:
  3553		// result: (MOVBstorezero [off] {sym} ptr mem)
  3554		for {
  3555			off := v.AuxInt
  3556			sym := v.Aux
  3557			mem := v.Args[2]
  3558			ptr := v.Args[0]
  3559			v_1 := v.Args[1]
  3560			if v_1.Op != OpMIPSMOVWconst {
  3561				break
  3562			}
  3563			if v_1.AuxInt != 0 {
  3564				break
  3565			}
  3566			v.reset(OpMIPSMOVBstorezero)
  3567			v.AuxInt = off
  3568			v.Aux = sym
  3569			v.AddArg(ptr)
  3570			v.AddArg(mem)
  3571			return true
  3572		}
  3573		// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3574		// cond:
  3575		// result: (MOVBstore [off] {sym} ptr x mem)
  3576		for {
  3577			off := v.AuxInt
  3578			sym := v.Aux
  3579			mem := v.Args[2]
  3580			ptr := v.Args[0]
  3581			v_1 := v.Args[1]
  3582			if v_1.Op != OpMIPSMOVBreg {
  3583				break
  3584			}
  3585			x := v_1.Args[0]
  3586			v.reset(OpMIPSMOVBstore)
  3587			v.AuxInt = off
  3588			v.Aux = sym
  3589			v.AddArg(ptr)
  3590			v.AddArg(x)
  3591			v.AddArg(mem)
  3592			return true
  3593		}
  3594		// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3595		// cond:
  3596		// result: (MOVBstore [off] {sym} ptr x mem)
  3597		for {
  3598			off := v.AuxInt
  3599			sym := v.Aux
  3600			mem := v.Args[2]
  3601			ptr := v.Args[0]
  3602			v_1 := v.Args[1]
  3603			if v_1.Op != OpMIPSMOVBUreg {
  3604				break
  3605			}
  3606			x := v_1.Args[0]
  3607			v.reset(OpMIPSMOVBstore)
  3608			v.AuxInt = off
  3609			v.Aux = sym
  3610			v.AddArg(ptr)
  3611			v.AddArg(x)
  3612			v.AddArg(mem)
  3613			return true
  3614		}
  3615		// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3616		// cond:
  3617		// result: (MOVBstore [off] {sym} ptr x mem)
  3618		for {
  3619			off := v.AuxInt
  3620			sym := v.Aux
  3621			mem := v.Args[2]
  3622			ptr := v.Args[0]
  3623			v_1 := v.Args[1]
  3624			if v_1.Op != OpMIPSMOVHreg {
  3625				break
  3626			}
  3627			x := v_1.Args[0]
  3628			v.reset(OpMIPSMOVBstore)
  3629			v.AuxInt = off
  3630			v.Aux = sym
  3631			v.AddArg(ptr)
  3632			v.AddArg(x)
  3633			v.AddArg(mem)
  3634			return true
  3635		}
  3636		// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3637		// cond:
  3638		// result: (MOVBstore [off] {sym} ptr x mem)
  3639		for {
  3640			off := v.AuxInt
  3641			sym := v.Aux
  3642			mem := v.Args[2]
  3643			ptr := v.Args[0]
  3644			v_1 := v.Args[1]
  3645			if v_1.Op != OpMIPSMOVHUreg {
  3646				break
  3647			}
  3648			x := v_1.Args[0]
  3649			v.reset(OpMIPSMOVBstore)
  3650			v.AuxInt = off
  3651			v.Aux = sym
  3652			v.AddArg(ptr)
  3653			v.AddArg(x)
  3654			v.AddArg(mem)
  3655			return true
  3656		}
  3657		// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3658		// cond:
  3659		// result: (MOVBstore [off] {sym} ptr x mem)
  3660		for {
  3661			off := v.AuxInt
  3662			sym := v.Aux
  3663			mem := v.Args[2]
  3664			ptr := v.Args[0]
  3665			v_1 := v.Args[1]
  3666			if v_1.Op != OpMIPSMOVWreg {
  3667				break
  3668			}
  3669			x := v_1.Args[0]
  3670			v.reset(OpMIPSMOVBstore)
  3671			v.AuxInt = off
  3672			v.Aux = sym
  3673			v.AddArg(ptr)
  3674			v.AddArg(x)
  3675			v.AddArg(mem)
  3676			return true
  3677		}
  3678		return false
  3679	}
  3680	func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool {
  3681		// match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3682		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3683		// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3684		for {
  3685			off1 := v.AuxInt
  3686			sym := v.Aux
  3687			mem := v.Args[1]
  3688			x := v.Args[0]
  3689			if x.Op != OpMIPSADDconst {
  3690				break
  3691			}
  3692			off2 := x.AuxInt
  3693			ptr := x.Args[0]
  3694			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3695				break
  3696			}
  3697			v.reset(OpMIPSMOVBstorezero)
  3698			v.AuxInt = off1 + off2
  3699			v.Aux = sym
  3700			v.AddArg(ptr)
  3701			v.AddArg(mem)
  3702			return true
  3703		}
  3704		// match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3705		// cond: canMergeSym(sym1,sym2)
  3706		// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3707		for {
  3708			off1 := v.AuxInt
  3709			sym1 := v.Aux
  3710			mem := v.Args[1]
  3711			v_0 := v.Args[0]
  3712			if v_0.Op != OpMIPSMOVWaddr {
  3713				break
  3714			}
  3715			off2 := v_0.AuxInt
  3716			sym2 := v_0.Aux
  3717			ptr := v_0.Args[0]
  3718			if !(canMergeSym(sym1, sym2)) {
  3719				break
  3720			}
  3721			v.reset(OpMIPSMOVBstorezero)
  3722			v.AuxInt = off1 + off2
  3723			v.Aux = mergeSym(sym1, sym2)
  3724			v.AddArg(ptr)
  3725			v.AddArg(mem)
  3726			return true
  3727		}
  3728		return false
  3729	}
  3730	func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool {
  3731		// match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3732		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3733		// result: (MOVDload [off1+off2] {sym} ptr mem)
  3734		for {
  3735			off1 := v.AuxInt
  3736			sym := v.Aux
  3737			mem := v.Args[1]
  3738			x := v.Args[0]
  3739			if x.Op != OpMIPSADDconst {
  3740				break
  3741			}
  3742			off2 := x.AuxInt
  3743			ptr := x.Args[0]
  3744			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3745				break
  3746			}
  3747			v.reset(OpMIPSMOVDload)
  3748			v.AuxInt = off1 + off2
  3749			v.Aux = sym
  3750			v.AddArg(ptr)
  3751			v.AddArg(mem)
  3752			return true
  3753		}
  3754		// match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3755		// cond: canMergeSym(sym1,sym2)
  3756		// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3757		for {
  3758			off1 := v.AuxInt
  3759			sym1 := v.Aux
  3760			mem := v.Args[1]
  3761			v_0 := v.Args[0]
  3762			if v_0.Op != OpMIPSMOVWaddr {
  3763				break
  3764			}
  3765			off2 := v_0.AuxInt
  3766			sym2 := v_0.Aux
  3767			ptr := v_0.Args[0]
  3768			if !(canMergeSym(sym1, sym2)) {
  3769				break
  3770			}
  3771			v.reset(OpMIPSMOVDload)
  3772			v.AuxInt = off1 + off2
  3773			v.Aux = mergeSym(sym1, sym2)
  3774			v.AddArg(ptr)
  3775			v.AddArg(mem)
  3776			return true
  3777		}
  3778		// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
  3779		// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3780		// result: x
  3781		for {
  3782			off := v.AuxInt
  3783			sym := v.Aux
  3784			_ = v.Args[1]
  3785			ptr := v.Args[0]
  3786			v_1 := v.Args[1]
  3787			if v_1.Op != OpMIPSMOVDstore {
  3788				break
  3789			}
  3790			off2 := v_1.AuxInt
  3791			sym2 := v_1.Aux
  3792			_ = v_1.Args[2]
  3793			ptr2 := v_1.Args[0]
  3794			x := v_1.Args[1]
  3795			if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3796				break
  3797			}
  3798			v.reset(OpCopy)
  3799			v.Type = x.Type
  3800			v.AddArg(x)
  3801			return true
  3802		}
  3803		return false
  3804	}
  3805	func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool {
  3806		// match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3807		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3808		// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  3809		for {
  3810			off1 := v.AuxInt
  3811			sym := v.Aux
  3812			mem := v.Args[2]
  3813			x := v.Args[0]
  3814			if x.Op != OpMIPSADDconst {
  3815				break
  3816			}
  3817			off2 := x.AuxInt
  3818			ptr := x.Args[0]
  3819			val := v.Args[1]
  3820			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3821				break
  3822			}
  3823			v.reset(OpMIPSMOVDstore)
  3824			v.AuxInt = off1 + off2
  3825			v.Aux = sym
  3826			v.AddArg(ptr)
  3827			v.AddArg(val)
  3828			v.AddArg(mem)
  3829			return true
  3830		}
  3831		// match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3832		// cond: canMergeSym(sym1,sym2)
  3833		// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3834		for {
  3835			off1 := v.AuxInt
  3836			sym1 := v.Aux
  3837			mem := v.Args[2]
  3838			v_0 := v.Args[0]
  3839			if v_0.Op != OpMIPSMOVWaddr {
  3840				break
  3841			}
  3842			off2 := v_0.AuxInt
  3843			sym2 := v_0.Aux
  3844			ptr := v_0.Args[0]
  3845			val := v.Args[1]
  3846			if !(canMergeSym(sym1, sym2)) {
  3847				break
  3848			}
  3849			v.reset(OpMIPSMOVDstore)
  3850			v.AuxInt = off1 + off2
  3851			v.Aux = mergeSym(sym1, sym2)
  3852			v.AddArg(ptr)
  3853			v.AddArg(val)
  3854			v.AddArg(mem)
  3855			return true
  3856		}
  3857		return false
  3858	}
  3859	func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool {
  3860		// match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3861		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3862		// result: (MOVFload [off1+off2] {sym} ptr mem)
  3863		for {
  3864			off1 := v.AuxInt
  3865			sym := v.Aux
  3866			mem := v.Args[1]
  3867			x := v.Args[0]
  3868			if x.Op != OpMIPSADDconst {
  3869				break
  3870			}
  3871			off2 := x.AuxInt
  3872			ptr := x.Args[0]
  3873			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3874				break
  3875			}
  3876			v.reset(OpMIPSMOVFload)
  3877			v.AuxInt = off1 + off2
  3878			v.Aux = sym
  3879			v.AddArg(ptr)
  3880			v.AddArg(mem)
  3881			return true
  3882		}
  3883		// match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  3884		// cond: canMergeSym(sym1,sym2)
  3885		// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3886		for {
  3887			off1 := v.AuxInt
  3888			sym1 := v.Aux
  3889			mem := v.Args[1]
  3890			v_0 := v.Args[0]
  3891			if v_0.Op != OpMIPSMOVWaddr {
  3892				break
  3893			}
  3894			off2 := v_0.AuxInt
  3895			sym2 := v_0.Aux
  3896			ptr := v_0.Args[0]
  3897			if !(canMergeSym(sym1, sym2)) {
  3898				break
  3899			}
  3900			v.reset(OpMIPSMOVFload)
  3901			v.AuxInt = off1 + off2
  3902			v.Aux = mergeSym(sym1, sym2)
  3903			v.AddArg(ptr)
  3904			v.AddArg(mem)
  3905			return true
  3906		}
  3907		// match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _))
  3908		// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  3909		// result: x
  3910		for {
  3911			off := v.AuxInt
  3912			sym := v.Aux
  3913			_ = v.Args[1]
  3914			ptr := v.Args[0]
  3915			v_1 := v.Args[1]
  3916			if v_1.Op != OpMIPSMOVFstore {
  3917				break
  3918			}
  3919			off2 := v_1.AuxInt
  3920			sym2 := v_1.Aux
  3921			_ = v_1.Args[2]
  3922			ptr2 := v_1.Args[0]
  3923			x := v_1.Args[1]
  3924			if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  3925				break
  3926			}
  3927			v.reset(OpCopy)
  3928			v.Type = x.Type
  3929			v.AddArg(x)
  3930			return true
  3931		}
  3932		return false
  3933	}
  3934	func rewriteValueMIPS_OpMIPSMOVFstore_0(v *Value) bool {
  3935		// match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  3936		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3937		// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  3938		for {
  3939			off1 := v.AuxInt
  3940			sym := v.Aux
  3941			mem := v.Args[2]
  3942			x := v.Args[0]
  3943			if x.Op != OpMIPSADDconst {
  3944				break
  3945			}
  3946			off2 := x.AuxInt
  3947			ptr := x.Args[0]
  3948			val := v.Args[1]
  3949			if !(is16Bit(off1+off2) || x.Uses == 1) {
  3950				break
  3951			}
  3952			v.reset(OpMIPSMOVFstore)
  3953			v.AuxInt = off1 + off2
  3954			v.Aux = sym
  3955			v.AddArg(ptr)
  3956			v.AddArg(val)
  3957			v.AddArg(mem)
  3958			return true
  3959		}
  3960		// match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  3961		// cond: canMergeSym(sym1,sym2)
  3962		// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3963		for {
  3964			off1 := v.AuxInt
  3965			sym1 := v.Aux
  3966			mem := v.Args[2]
  3967			v_0 := v.Args[0]
  3968			if v_0.Op != OpMIPSMOVWaddr {
  3969				break
  3970			}
  3971			off2 := v_0.AuxInt
  3972			sym2 := v_0.Aux
  3973			ptr := v_0.Args[0]
  3974			val := v.Args[1]
  3975			if !(canMergeSym(sym1, sym2)) {
  3976				break
  3977			}
  3978			v.reset(OpMIPSMOVFstore)
  3979			v.AuxInt = off1 + off2
  3980			v.Aux = mergeSym(sym1, sym2)
  3981			v.AddArg(ptr)
  3982			v.AddArg(val)
  3983			v.AddArg(mem)
  3984			return true
  3985		}
  3986		return false
  3987	}
  3988	func rewriteValueMIPS_OpMIPSMOVHUload_0(v *Value) bool {
  3989		// match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  3990		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  3991		// result: (MOVHUload [off1+off2] {sym} ptr mem)
  3992		for {
  3993			off1 := v.AuxInt
  3994			sym := v.Aux
  3995			mem := v.Args[1]
  3996			x := v.Args[0]
  3997			if x.Op != OpMIPSADDconst {
  3998				break
  3999			}
  4000			off2 := x.AuxInt
  4001			ptr := x.Args[0]
  4002			if !(is16Bit(off1+off2) || x.Uses == 1) {
  4003				break
  4004			}
  4005			v.reset(OpMIPSMOVHUload)
  4006			v.AuxInt = off1 + off2
  4007			v.Aux = sym
  4008			v.AddArg(ptr)
  4009			v.AddArg(mem)
  4010			return true
  4011		}
  4012		// match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4013		// cond: canMergeSym(sym1,sym2)
  4014		// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4015		for {
  4016			off1 := v.AuxInt
  4017			sym1 := v.Aux
  4018			mem := v.Args[1]
  4019			v_0 := v.Args[0]
  4020			if v_0.Op != OpMIPSMOVWaddr {
  4021				break
  4022			}
  4023			off2 := v_0.AuxInt
  4024			sym2 := v_0.Aux
  4025			ptr := v_0.Args[0]
  4026			if !(canMergeSym(sym1, sym2)) {
  4027				break
  4028			}
  4029			v.reset(OpMIPSMOVHUload)
  4030			v.AuxInt = off1 + off2
  4031			v.Aux = mergeSym(sym1, sym2)
  4032			v.AddArg(ptr)
  4033			v.AddArg(mem)
  4034			return true
  4035		}
  4036		// match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4037		// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4038		// result: (MOVHUreg x)
  4039		for {
  4040			off := v.AuxInt
  4041			sym := v.Aux
  4042			_ = v.Args[1]
  4043			ptr := v.Args[0]
  4044			v_1 := v.Args[1]
  4045			if v_1.Op != OpMIPSMOVHstore {
  4046				break
  4047			}
  4048			off2 := v_1.AuxInt
  4049			sym2 := v_1.Aux
  4050			_ = v_1.Args[2]
  4051			ptr2 := v_1.Args[0]
  4052			x := v_1.Args[1]
  4053			if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4054				break
  4055			}
  4056			v.reset(OpMIPSMOVHUreg)
  4057			v.AddArg(x)
  4058			return true
  4059		}
  4060		return false
  4061	}
  4062	func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
  4063		b := v.Block
  4064		// match: (MOVHUreg x:(MOVBUload _ _))
  4065		// cond:
  4066		// result: (MOVWreg x)
  4067		for {
  4068			x := v.Args[0]
  4069			if x.Op != OpMIPSMOVBUload {
  4070				break
  4071			}
  4072			_ = x.Args[1]
  4073			v.reset(OpMIPSMOVWreg)
  4074			v.AddArg(x)
  4075			return true
  4076		}
  4077		// match: (MOVHUreg x:(MOVHUload _ _))
  4078		// cond:
  4079		// result: (MOVWreg x)
  4080		for {
  4081			x := v.Args[0]
  4082			if x.Op != OpMIPSMOVHUload {
  4083				break
  4084			}
  4085			_ = x.Args[1]
  4086			v.reset(OpMIPSMOVWreg)
  4087			v.AddArg(x)
  4088			return true
  4089		}
  4090		// match: (MOVHUreg x:(MOVBUreg _))
  4091		// cond:
  4092		// result: (MOVWreg x)
  4093		for {
  4094			x := v.Args[0]
  4095			if x.Op != OpMIPSMOVBUreg {
  4096				break
  4097			}
  4098			v.reset(OpMIPSMOVWreg)
  4099			v.AddArg(x)
  4100			return true
  4101		}
  4102		// match: (MOVHUreg x:(MOVHUreg _))
  4103		// cond:
  4104		// result: (MOVWreg x)
  4105		for {
  4106			x := v.Args[0]
  4107			if x.Op != OpMIPSMOVHUreg {
  4108				break
  4109			}
  4110			v.reset(OpMIPSMOVWreg)
  4111			v.AddArg(x)
  4112			return true
  4113		}
  4114		// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4115		// cond: x.Uses == 1 && clobber(x)
  4116		// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4117		for {
  4118			t := v.Type
  4119			x := v.Args[0]
  4120			if x.Op != OpMIPSMOVHload {
  4121				break
  4122			}
  4123			off := x.AuxInt
  4124			sym := x.Aux
  4125			mem := x.Args[1]
  4126			ptr := x.Args[0]
  4127			if !(x.Uses == 1 && clobber(x)) {
  4128				break
  4129			}
  4130			b = x.Block
  4131			v0 := b.NewValue0(x.Pos, OpMIPSMOVHUload, t)
  4132			v.reset(OpCopy)
  4133			v.AddArg(v0)
  4134			v0.AuxInt = off
  4135			v0.Aux = sym
  4136			v0.AddArg(ptr)
  4137			v0.AddArg(mem)
  4138			return true
  4139		}
  4140		// match: (MOVHUreg (ANDconst [c] x))
  4141		// cond:
  4142		// result: (ANDconst [c&0xffff] x)
  4143		for {
  4144			v_0 := v.Args[0]
  4145			if v_0.Op != OpMIPSANDconst {
  4146				break
  4147			}
  4148			c := v_0.AuxInt
  4149			x := v_0.Args[0]
  4150			v.reset(OpMIPSANDconst)
  4151			v.AuxInt = c & 0xffff
  4152			v.AddArg(x)
  4153			return true
  4154		}
  4155		// match: (MOVHUreg (MOVWconst [c]))
  4156		// cond:
  4157		// result: (MOVWconst [int64(uint16(c))])
  4158		for {
  4159			v_0 := v.Args[0]
  4160			if v_0.Op != OpMIPSMOVWconst {
  4161				break
  4162			}
  4163			c := v_0.AuxInt
  4164			v.reset(OpMIPSMOVWconst)
  4165			v.AuxInt = int64(uint16(c))
  4166			return true
  4167		}
  4168		return false
  4169	}
  4170	func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool {
  4171		// match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4172		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4173		// result: (MOVHload [off1+off2] {sym} ptr mem)
  4174		for {
  4175			off1 := v.AuxInt
  4176			sym := v.Aux
  4177			mem := v.Args[1]
  4178			x := v.Args[0]
  4179			if x.Op != OpMIPSADDconst {
  4180				break
  4181			}
  4182			off2 := x.AuxInt
  4183			ptr := x.Args[0]
  4184			if !(is16Bit(off1+off2) || x.Uses == 1) {
  4185				break
  4186			}
  4187			v.reset(OpMIPSMOVHload)
  4188			v.AuxInt = off1 + off2
  4189			v.Aux = sym
  4190			v.AddArg(ptr)
  4191			v.AddArg(mem)
  4192			return true
  4193		}
  4194		// match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4195		// cond: canMergeSym(sym1,sym2)
  4196		// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4197		for {
  4198			off1 := v.AuxInt
  4199			sym1 := v.Aux
  4200			mem := v.Args[1]
  4201			v_0 := v.Args[0]
  4202			if v_0.Op != OpMIPSMOVWaddr {
  4203				break
  4204			}
  4205			off2 := v_0.AuxInt
  4206			sym2 := v_0.Aux
  4207			ptr := v_0.Args[0]
  4208			if !(canMergeSym(sym1, sym2)) {
  4209				break
  4210			}
  4211			v.reset(OpMIPSMOVHload)
  4212			v.AuxInt = off1 + off2
  4213			v.Aux = mergeSym(sym1, sym2)
  4214			v.AddArg(ptr)
  4215			v.AddArg(mem)
  4216			return true
  4217		}
  4218		// match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
  4219		// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4220		// result: (MOVHreg x)
  4221		for {
  4222			off := v.AuxInt
  4223			sym := v.Aux
  4224			_ = v.Args[1]
  4225			ptr := v.Args[0]
  4226			v_1 := v.Args[1]
  4227			if v_1.Op != OpMIPSMOVHstore {
  4228				break
  4229			}
  4230			off2 := v_1.AuxInt
  4231			sym2 := v_1.Aux
  4232			_ = v_1.Args[2]
  4233			ptr2 := v_1.Args[0]
  4234			x := v_1.Args[1]
  4235			if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4236				break
  4237			}
  4238			v.reset(OpMIPSMOVHreg)
  4239			v.AddArg(x)
  4240			return true
  4241		}
  4242		return false
  4243	}
  4244	func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool {
  4245		b := v.Block
  4246		// match: (MOVHreg x:(MOVBload _ _))
  4247		// cond:
  4248		// result: (MOVWreg x)
  4249		for {
  4250			x := v.Args[0]
  4251			if x.Op != OpMIPSMOVBload {
  4252				break
  4253			}
  4254			_ = x.Args[1]
  4255			v.reset(OpMIPSMOVWreg)
  4256			v.AddArg(x)
  4257			return true
  4258		}
  4259		// match: (MOVHreg x:(MOVBUload _ _))
  4260		// cond:
  4261		// result: (MOVWreg x)
  4262		for {
  4263			x := v.Args[0]
  4264			if x.Op != OpMIPSMOVBUload {
  4265				break
  4266			}
  4267			_ = x.Args[1]
  4268			v.reset(OpMIPSMOVWreg)
  4269			v.AddArg(x)
  4270			return true
  4271		}
  4272		// match: (MOVHreg x:(MOVHload _ _))
  4273		// cond:
  4274		// result: (MOVWreg x)
  4275		for {
  4276			x := v.Args[0]
  4277			if x.Op != OpMIPSMOVHload {
  4278				break
  4279			}
  4280			_ = x.Args[1]
  4281			v.reset(OpMIPSMOVWreg)
  4282			v.AddArg(x)
  4283			return true
  4284		}
  4285		// match: (MOVHreg x:(MOVBreg _))
  4286		// cond:
  4287		// result: (MOVWreg x)
  4288		for {
  4289			x := v.Args[0]
  4290			if x.Op != OpMIPSMOVBreg {
  4291				break
  4292			}
  4293			v.reset(OpMIPSMOVWreg)
  4294			v.AddArg(x)
  4295			return true
  4296		}
  4297		// match: (MOVHreg x:(MOVBUreg _))
  4298		// cond:
  4299		// result: (MOVWreg x)
  4300		for {
  4301			x := v.Args[0]
  4302			if x.Op != OpMIPSMOVBUreg {
  4303				break
  4304			}
  4305			v.reset(OpMIPSMOVWreg)
  4306			v.AddArg(x)
  4307			return true
  4308		}
  4309		// match: (MOVHreg x:(MOVHreg _))
  4310		// cond:
  4311		// result: (MOVWreg x)
  4312		for {
  4313			x := v.Args[0]
  4314			if x.Op != OpMIPSMOVHreg {
  4315				break
  4316			}
  4317			v.reset(OpMIPSMOVWreg)
  4318			v.AddArg(x)
  4319			return true
  4320		}
  4321		// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4322		// cond: x.Uses == 1 && clobber(x)
  4323		// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4324		for {
  4325			t := v.Type
  4326			x := v.Args[0]
  4327			if x.Op != OpMIPSMOVHUload {
  4328				break
  4329			}
  4330			off := x.AuxInt
  4331			sym := x.Aux
  4332			mem := x.Args[1]
  4333			ptr := x.Args[0]
  4334			if !(x.Uses == 1 && clobber(x)) {
  4335				break
  4336			}
  4337			b = x.Block
  4338			v0 := b.NewValue0(x.Pos, OpMIPSMOVHload, t)
  4339			v.reset(OpCopy)
  4340			v.AddArg(v0)
  4341			v0.AuxInt = off
  4342			v0.Aux = sym
  4343			v0.AddArg(ptr)
  4344			v0.AddArg(mem)
  4345			return true
  4346		}
  4347		// match: (MOVHreg (ANDconst [c] x))
  4348		// cond: c & 0x8000 == 0
  4349		// result: (ANDconst [c&0x7fff] x)
  4350		for {
  4351			v_0 := v.Args[0]
  4352			if v_0.Op != OpMIPSANDconst {
  4353				break
  4354			}
  4355			c := v_0.AuxInt
  4356			x := v_0.Args[0]
  4357			if !(c&0x8000 == 0) {
  4358				break
  4359			}
  4360			v.reset(OpMIPSANDconst)
  4361			v.AuxInt = c & 0x7fff
  4362			v.AddArg(x)
  4363			return true
  4364		}
  4365		// match: (MOVHreg (MOVWconst [c]))
  4366		// cond:
  4367		// result: (MOVWconst [int64(int16(c))])
  4368		for {
  4369			v_0 := v.Args[0]
  4370			if v_0.Op != OpMIPSMOVWconst {
  4371				break
  4372			}
  4373			c := v_0.AuxInt
  4374			v.reset(OpMIPSMOVWconst)
  4375			v.AuxInt = int64(int16(c))
  4376			return true
  4377		}
  4378		return false
  4379	}
  4380	func rewriteValueMIPS_OpMIPSMOVHstore_0(v *Value) bool {
  4381		// match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4382		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4383		// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4384		for {
  4385			off1 := v.AuxInt
  4386			sym := v.Aux
  4387			mem := v.Args[2]
  4388			x := v.Args[0]
  4389			if x.Op != OpMIPSADDconst {
  4390				break
  4391			}
  4392			off2 := x.AuxInt
  4393			ptr := x.Args[0]
  4394			val := v.Args[1]
  4395			if !(is16Bit(off1+off2) || x.Uses == 1) {
  4396				break
  4397			}
  4398			v.reset(OpMIPSMOVHstore)
  4399			v.AuxInt = off1 + off2
  4400			v.Aux = sym
  4401			v.AddArg(ptr)
  4402			v.AddArg(val)
  4403			v.AddArg(mem)
  4404			return true
  4405		}
  4406		// match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4407		// cond: canMergeSym(sym1,sym2)
  4408		// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4409		for {
  4410			off1 := v.AuxInt
  4411			sym1 := v.Aux
  4412			mem := v.Args[2]
  4413			v_0 := v.Args[0]
  4414			if v_0.Op != OpMIPSMOVWaddr {
  4415				break
  4416			}
  4417			off2 := v_0.AuxInt
  4418			sym2 := v_0.Aux
  4419			ptr := v_0.Args[0]
  4420			val := v.Args[1]
  4421			if !(canMergeSym(sym1, sym2)) {
  4422				break
  4423			}
  4424			v.reset(OpMIPSMOVHstore)
  4425			v.AuxInt = off1 + off2
  4426			v.Aux = mergeSym(sym1, sym2)
  4427			v.AddArg(ptr)
  4428			v.AddArg(val)
  4429			v.AddArg(mem)
  4430			return true
  4431		}
  4432		// match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem)
  4433		// cond:
  4434		// result: (MOVHstorezero [off] {sym} ptr mem)
  4435		for {
  4436			off := v.AuxInt
  4437			sym := v.Aux
  4438			mem := v.Args[2]
  4439			ptr := v.Args[0]
  4440			v_1 := v.Args[1]
  4441			if v_1.Op != OpMIPSMOVWconst {
  4442				break
  4443			}
  4444			if v_1.AuxInt != 0 {
  4445				break
  4446			}
  4447			v.reset(OpMIPSMOVHstorezero)
  4448			v.AuxInt = off
  4449			v.Aux = sym
  4450			v.AddArg(ptr)
  4451			v.AddArg(mem)
  4452			return true
  4453		}
  4454		// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4455		// cond:
  4456		// result: (MOVHstore [off] {sym} ptr x mem)
  4457		for {
  4458			off := v.AuxInt
  4459			sym := v.Aux
  4460			mem := v.Args[2]
  4461			ptr := v.Args[0]
  4462			v_1 := v.Args[1]
  4463			if v_1.Op != OpMIPSMOVHreg {
  4464				break
  4465			}
  4466			x := v_1.Args[0]
  4467			v.reset(OpMIPSMOVHstore)
  4468			v.AuxInt = off
  4469			v.Aux = sym
  4470			v.AddArg(ptr)
  4471			v.AddArg(x)
  4472			v.AddArg(mem)
  4473			return true
  4474		}
  4475		// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4476		// cond:
  4477		// result: (MOVHstore [off] {sym} ptr x mem)
  4478		for {
  4479			off := v.AuxInt
  4480			sym := v.Aux
  4481			mem := v.Args[2]
  4482			ptr := v.Args[0]
  4483			v_1 := v.Args[1]
  4484			if v_1.Op != OpMIPSMOVHUreg {
  4485				break
  4486			}
  4487			x := v_1.Args[0]
  4488			v.reset(OpMIPSMOVHstore)
  4489			v.AuxInt = off
  4490			v.Aux = sym
  4491			v.AddArg(ptr)
  4492			v.AddArg(x)
  4493			v.AddArg(mem)
  4494			return true
  4495		}
  4496		// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4497		// cond:
  4498		// result: (MOVHstore [off] {sym} ptr x mem)
  4499		for {
  4500			off := v.AuxInt
  4501			sym := v.Aux
  4502			mem := v.Args[2]
  4503			ptr := v.Args[0]
  4504			v_1 := v.Args[1]
  4505			if v_1.Op != OpMIPSMOVWreg {
  4506				break
  4507			}
  4508			x := v_1.Args[0]
  4509			v.reset(OpMIPSMOVHstore)
  4510			v.AuxInt = off
  4511			v.Aux = sym
  4512			v.AddArg(ptr)
  4513			v.AddArg(x)
  4514			v.AddArg(mem)
  4515			return true
  4516		}
  4517		return false
  4518	}
  4519	func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool {
  4520		// match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4521		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4522		// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4523		for {
  4524			off1 := v.AuxInt
  4525			sym := v.Aux
  4526			mem := v.Args[1]
  4527			x := v.Args[0]
  4528			if x.Op != OpMIPSADDconst {
  4529				break
  4530			}
  4531			off2 := x.AuxInt
  4532			ptr := x.Args[0]
  4533			if !(is16Bit(off1+off2) || x.Uses == 1) {
  4534				break
  4535			}
  4536			v.reset(OpMIPSMOVHstorezero)
  4537			v.AuxInt = off1 + off2
  4538			v.Aux = sym
  4539			v.AddArg(ptr)
  4540			v.AddArg(mem)
  4541			return true
  4542		}
  4543		// match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4544		// cond: canMergeSym(sym1,sym2)
  4545		// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4546		for {
  4547			off1 := v.AuxInt
  4548			sym1 := v.Aux
  4549			mem := v.Args[1]
  4550			v_0 := v.Args[0]
  4551			if v_0.Op != OpMIPSMOVWaddr {
  4552				break
  4553			}
  4554			off2 := v_0.AuxInt
  4555			sym2 := v_0.Aux
  4556			ptr := v_0.Args[0]
  4557			if !(canMergeSym(sym1, sym2)) {
  4558				break
  4559			}
  4560			v.reset(OpMIPSMOVHstorezero)
  4561			v.AuxInt = off1 + off2
  4562			v.Aux = mergeSym(sym1, sym2)
  4563			v.AddArg(ptr)
  4564			v.AddArg(mem)
  4565			return true
  4566		}
  4567		return false
  4568	}
  4569	func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool {
  4570		// match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4571		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4572		// result: (MOVWload [off1+off2] {sym} ptr mem)
  4573		for {
  4574			off1 := v.AuxInt
  4575			sym := v.Aux
  4576			mem := v.Args[1]
  4577			x := v.Args[0]
  4578			if x.Op != OpMIPSADDconst {
  4579				break
  4580			}
  4581			off2 := x.AuxInt
  4582			ptr := x.Args[0]
  4583			if !(is16Bit(off1+off2) || x.Uses == 1) {
  4584				break
  4585			}
  4586			v.reset(OpMIPSMOVWload)
  4587			v.AuxInt = off1 + off2
  4588			v.Aux = sym
  4589			v.AddArg(ptr)
  4590			v.AddArg(mem)
  4591			return true
  4592		}
  4593		// match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4594		// cond: canMergeSym(sym1,sym2)
  4595		// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4596		for {
  4597			off1 := v.AuxInt
  4598			sym1 := v.Aux
  4599			mem := v.Args[1]
  4600			v_0 := v.Args[0]
  4601			if v_0.Op != OpMIPSMOVWaddr {
  4602				break
  4603			}
  4604			off2 := v_0.AuxInt
  4605			sym2 := v_0.Aux
  4606			ptr := v_0.Args[0]
  4607			if !(canMergeSym(sym1, sym2)) {
  4608				break
  4609			}
  4610			v.reset(OpMIPSMOVWload)
  4611			v.AuxInt = off1 + off2
  4612			v.Aux = mergeSym(sym1, sym2)
  4613			v.AddArg(ptr)
  4614			v.AddArg(mem)
  4615			return true
  4616		}
  4617		// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
  4618		// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  4619		// result: x
  4620		for {
  4621			off := v.AuxInt
  4622			sym := v.Aux
  4623			_ = v.Args[1]
  4624			ptr := v.Args[0]
  4625			v_1 := v.Args[1]
  4626			if v_1.Op != OpMIPSMOVWstore {
  4627				break
  4628			}
  4629			off2 := v_1.AuxInt
  4630			sym2 := v_1.Aux
  4631			_ = v_1.Args[2]
  4632			ptr2 := v_1.Args[0]
  4633			x := v_1.Args[1]
  4634			if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  4635				break
  4636			}
  4637			v.reset(OpCopy)
  4638			v.Type = x.Type
  4639			v.AddArg(x)
  4640			return true
  4641		}
  4642		return false
  4643	}
  4644	func rewriteValueMIPS_OpMIPSMOVWreg_0(v *Value) bool {
  4645		// match: (MOVWreg x)
  4646		// cond: x.Uses == 1
  4647		// result: (MOVWnop x)
  4648		for {
  4649			x := v.Args[0]
  4650			if !(x.Uses == 1) {
  4651				break
  4652			}
  4653			v.reset(OpMIPSMOVWnop)
  4654			v.AddArg(x)
  4655			return true
  4656		}
  4657		// match: (MOVWreg (MOVWconst [c]))
  4658		// cond:
  4659		// result: (MOVWconst [c])
  4660		for {
  4661			v_0 := v.Args[0]
  4662			if v_0.Op != OpMIPSMOVWconst {
  4663				break
  4664			}
  4665			c := v_0.AuxInt
  4666			v.reset(OpMIPSMOVWconst)
  4667			v.AuxInt = c
  4668			return true
  4669		}
  4670		return false
  4671	}
  4672	func rewriteValueMIPS_OpMIPSMOVWstore_0(v *Value) bool {
  4673		// match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem)
  4674		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4675		// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  4676		for {
  4677			off1 := v.AuxInt
  4678			sym := v.Aux
  4679			mem := v.Args[2]
  4680			x := v.Args[0]
  4681			if x.Op != OpMIPSADDconst {
  4682				break
  4683			}
  4684			off2 := x.AuxInt
  4685			ptr := x.Args[0]
  4686			val := v.Args[1]
  4687			if !(is16Bit(off1+off2) || x.Uses == 1) {
  4688				break
  4689			}
  4690			v.reset(OpMIPSMOVWstore)
  4691			v.AuxInt = off1 + off2
  4692			v.Aux = sym
  4693			v.AddArg(ptr)
  4694			v.AddArg(val)
  4695			v.AddArg(mem)
  4696			return true
  4697		}
  4698		// match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem)
  4699		// cond: canMergeSym(sym1,sym2)
  4700		// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4701		for {
  4702			off1 := v.AuxInt
  4703			sym1 := v.Aux
  4704			mem := v.Args[2]
  4705			v_0 := v.Args[0]
  4706			if v_0.Op != OpMIPSMOVWaddr {
  4707				break
  4708			}
  4709			off2 := v_0.AuxInt
  4710			sym2 := v_0.Aux
  4711			ptr := v_0.Args[0]
  4712			val := v.Args[1]
  4713			if !(canMergeSym(sym1, sym2)) {
  4714				break
  4715			}
  4716			v.reset(OpMIPSMOVWstore)
  4717			v.AuxInt = off1 + off2
  4718			v.Aux = mergeSym(sym1, sym2)
  4719			v.AddArg(ptr)
  4720			v.AddArg(val)
  4721			v.AddArg(mem)
  4722			return true
  4723		}
  4724		// match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem)
  4725		// cond:
  4726		// result: (MOVWstorezero [off] {sym} ptr mem)
  4727		for {
  4728			off := v.AuxInt
  4729			sym := v.Aux
  4730			mem := v.Args[2]
  4731			ptr := v.Args[0]
  4732			v_1 := v.Args[1]
  4733			if v_1.Op != OpMIPSMOVWconst {
  4734				break
  4735			}
  4736			if v_1.AuxInt != 0 {
  4737				break
  4738			}
  4739			v.reset(OpMIPSMOVWstorezero)
  4740			v.AuxInt = off
  4741			v.Aux = sym
  4742			v.AddArg(ptr)
  4743			v.AddArg(mem)
  4744			return true
  4745		}
  4746		// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  4747		// cond:
  4748		// result: (MOVWstore [off] {sym} ptr x mem)
  4749		for {
  4750			off := v.AuxInt
  4751			sym := v.Aux
  4752			mem := v.Args[2]
  4753			ptr := v.Args[0]
  4754			v_1 := v.Args[1]
  4755			if v_1.Op != OpMIPSMOVWreg {
  4756				break
  4757			}
  4758			x := v_1.Args[0]
  4759			v.reset(OpMIPSMOVWstore)
  4760			v.AuxInt = off
  4761			v.Aux = sym
  4762			v.AddArg(ptr)
  4763			v.AddArg(x)
  4764			v.AddArg(mem)
  4765			return true
  4766		}
  4767		return false
  4768	}
  4769	func rewriteValueMIPS_OpMIPSMOVWstorezero_0(v *Value) bool {
  4770		// match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem)
  4771		// cond: (is16Bit(off1+off2) || x.Uses == 1)
  4772		// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  4773		for {
  4774			off1 := v.AuxInt
  4775			sym := v.Aux
  4776			mem := v.Args[1]
  4777			x := v.Args[0]
  4778			if x.Op != OpMIPSADDconst {
  4779				break
  4780			}
  4781			off2 := x.AuxInt
  4782			ptr := x.Args[0]
  4783			if !(is16Bit(off1+off2) || x.Uses == 1) {
  4784				break
  4785			}
  4786			v.reset(OpMIPSMOVWstorezero)
  4787			v.AuxInt = off1 + off2
  4788			v.Aux = sym
  4789			v.AddArg(ptr)
  4790			v.AddArg(mem)
  4791			return true
  4792		}
  4793		// match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem)
  4794		// cond: canMergeSym(sym1,sym2)
  4795		// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4796		for {
  4797			off1 := v.AuxInt
  4798			sym1 := v.Aux
  4799			mem := v.Args[1]
  4800			v_0 := v.Args[0]
  4801			if v_0.Op != OpMIPSMOVWaddr {
  4802				break
  4803			}
  4804			off2 := v_0.AuxInt
  4805			sym2 := v_0.Aux
  4806			ptr := v_0.Args[0]
  4807			if !(canMergeSym(sym1, sym2)) {
  4808				break
  4809			}
  4810			v.reset(OpMIPSMOVWstorezero)
  4811			v.AuxInt = off1 + off2
  4812			v.Aux = mergeSym(sym1, sym2)
  4813			v.AddArg(ptr)
  4814			v.AddArg(mem)
  4815			return true
  4816		}
  4817		return false
  4818	}
  4819	func rewriteValueMIPS_OpMIPSMUL_0(v *Value) bool {
  4820		// match: (MUL (MOVWconst [0]) _)
  4821		// cond:
  4822		// result: (MOVWconst [0])
  4823		for {
  4824			_ = v.Args[1]
  4825			v_0 := v.Args[0]
  4826			if v_0.Op != OpMIPSMOVWconst {
  4827				break
  4828			}
  4829			if v_0.AuxInt != 0 {
  4830				break
  4831			}
  4832			v.reset(OpMIPSMOVWconst)
  4833			v.AuxInt = 0
  4834			return true
  4835		}
  4836		// match: (MUL _ (MOVWconst [0]))
  4837		// cond:
  4838		// result: (MOVWconst [0])
  4839		for {
  4840			_ = v.Args[1]
  4841			v_1 := v.Args[1]
  4842			if v_1.Op != OpMIPSMOVWconst {
  4843				break
  4844			}
  4845			if v_1.AuxInt != 0 {
  4846				break
  4847			}
  4848			v.reset(OpMIPSMOVWconst)
  4849			v.AuxInt = 0
  4850			return true
  4851		}
  4852		// match: (MUL (MOVWconst [1]) x)
  4853		// cond:
  4854		// result: x
  4855		for {
  4856			x := v.Args[1]
  4857			v_0 := v.Args[0]
  4858			if v_0.Op != OpMIPSMOVWconst {
  4859				break
  4860			}
  4861			if v_0.AuxInt != 1 {
  4862				break
  4863			}
  4864			v.reset(OpCopy)
  4865			v.Type = x.Type
  4866			v.AddArg(x)
  4867			return true
  4868		}
  4869		// match: (MUL x (MOVWconst [1]))
  4870		// cond:
  4871		// result: x
  4872		for {
  4873			_ = v.Args[1]
  4874			x := v.Args[0]
  4875			v_1 := v.Args[1]
  4876			if v_1.Op != OpMIPSMOVWconst {
  4877				break
  4878			}
  4879			if v_1.AuxInt != 1 {
  4880				break
  4881			}
  4882			v.reset(OpCopy)
  4883			v.Type = x.Type
  4884			v.AddArg(x)
  4885			return true
  4886		}
  4887		// match: (MUL (MOVWconst [-1]) x)
  4888		// cond:
  4889		// result: (NEG x)
  4890		for {
  4891			x := v.Args[1]
  4892			v_0 := v.Args[0]
  4893			if v_0.Op != OpMIPSMOVWconst {
  4894				break
  4895			}
  4896			if v_0.AuxInt != -1 {
  4897				break
  4898			}
  4899			v.reset(OpMIPSNEG)
  4900			v.AddArg(x)
  4901			return true
  4902		}
  4903		// match: (MUL x (MOVWconst [-1]))
  4904		// cond:
  4905		// result: (NEG x)
  4906		for {
  4907			_ = v.Args[1]
  4908			x := v.Args[0]
  4909			v_1 := v.Args[1]
  4910			if v_1.Op != OpMIPSMOVWconst {
  4911				break
  4912			}
  4913			if v_1.AuxInt != -1 {
  4914				break
  4915			}
  4916			v.reset(OpMIPSNEG)
  4917			v.AddArg(x)
  4918			return true
  4919		}
  4920		// match: (MUL (MOVWconst [c]) x)
  4921		// cond: isPowerOfTwo(int64(uint32(c)))
  4922		// result: (SLLconst [log2(int64(uint32(c)))] x)
  4923		for {
  4924			x := v.Args[1]
  4925			v_0 := v.Args[0]
  4926			if v_0.Op != OpMIPSMOVWconst {
  4927				break
  4928			}
  4929			c := v_0.AuxInt
  4930			if !(isPowerOfTwo(int64(uint32(c)))) {
  4931				break
  4932			}
  4933			v.reset(OpMIPSSLLconst)
  4934			v.AuxInt = log2(int64(uint32(c)))
  4935			v.AddArg(x)
  4936			return true
  4937		}
  4938		// match: (MUL x (MOVWconst [c]))
  4939		// cond: isPowerOfTwo(int64(uint32(c)))
  4940		// result: (SLLconst [log2(int64(uint32(c)))] x)
  4941		for {
  4942			_ = v.Args[1]
  4943			x := v.Args[0]
  4944			v_1 := v.Args[1]
  4945			if v_1.Op != OpMIPSMOVWconst {
  4946				break
  4947			}
  4948			c := v_1.AuxInt
  4949			if !(isPowerOfTwo(int64(uint32(c)))) {
  4950				break
  4951			}
  4952			v.reset(OpMIPSSLLconst)
  4953			v.AuxInt = log2(int64(uint32(c)))
  4954			v.AddArg(x)
  4955			return true
  4956		}
  4957		// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
  4958		// cond:
  4959		// result: (MOVWconst [int64(int32(c)*int32(d))])
  4960		for {
  4961			_ = v.Args[1]
  4962			v_0 := v.Args[0]
  4963			if v_0.Op != OpMIPSMOVWconst {
  4964				break
  4965			}
  4966			c := v_0.AuxInt
  4967			v_1 := v.Args[1]
  4968			if v_1.Op != OpMIPSMOVWconst {
  4969				break
  4970			}
  4971			d := v_1.AuxInt
  4972			v.reset(OpMIPSMOVWconst)
  4973			v.AuxInt = int64(int32(c) * int32(d))
  4974			return true
  4975		}
  4976		// match: (MUL (MOVWconst [d]) (MOVWconst [c]))
  4977		// cond:
  4978		// result: (MOVWconst [int64(int32(c)*int32(d))])
  4979		for {
  4980			_ = v.Args[1]
  4981			v_0 := v.Args[0]
  4982			if v_0.Op != OpMIPSMOVWconst {
  4983				break
  4984			}
  4985			d := v_0.AuxInt
  4986			v_1 := v.Args[1]
  4987			if v_1.Op != OpMIPSMOVWconst {
  4988				break
  4989			}
  4990			c := v_1.AuxInt
  4991			v.reset(OpMIPSMOVWconst)
  4992			v.AuxInt = int64(int32(c) * int32(d))
  4993			return true
  4994		}
  4995		return false
  4996	}
  4997	func rewriteValueMIPS_OpMIPSNEG_0(v *Value) bool {
  4998		// match: (NEG (MOVWconst [c]))
  4999		// cond:
  5000		// result: (MOVWconst [int64(int32(-c))])
  5001		for {
  5002			v_0 := v.Args[0]
  5003			if v_0.Op != OpMIPSMOVWconst {
  5004				break
  5005			}
  5006			c := v_0.AuxInt
  5007			v.reset(OpMIPSMOVWconst)
  5008			v.AuxInt = int64(int32(-c))
  5009			return true
  5010		}
  5011		return false
  5012	}
  5013	func rewriteValueMIPS_OpMIPSNOR_0(v *Value) bool {
  5014		// match: (NOR x (MOVWconst [c]))
  5015		// cond:
  5016		// result: (NORconst [c] x)
  5017		for {
  5018			_ = v.Args[1]
  5019			x := v.Args[0]
  5020			v_1 := v.Args[1]
  5021			if v_1.Op != OpMIPSMOVWconst {
  5022				break
  5023			}
  5024			c := v_1.AuxInt
  5025			v.reset(OpMIPSNORconst)
  5026			v.AuxInt = c
  5027			v.AddArg(x)
  5028			return true
  5029		}
  5030		// match: (NOR (MOVWconst [c]) x)
  5031		// cond:
  5032		// result: (NORconst [c] x)
  5033		for {
  5034			x := v.Args[1]
  5035			v_0 := v.Args[0]
  5036			if v_0.Op != OpMIPSMOVWconst {
  5037				break
  5038			}
  5039			c := v_0.AuxInt
  5040			v.reset(OpMIPSNORconst)
  5041			v.AuxInt = c
  5042			v.AddArg(x)
  5043			return true
  5044		}
  5045		return false
  5046	}
  5047	func rewriteValueMIPS_OpMIPSNORconst_0(v *Value) bool {
  5048		// match: (NORconst [c] (MOVWconst [d]))
  5049		// cond:
  5050		// result: (MOVWconst [^(c|d)])
  5051		for {
  5052			c := v.AuxInt
  5053			v_0 := v.Args[0]
  5054			if v_0.Op != OpMIPSMOVWconst {
  5055				break
  5056			}
  5057			d := v_0.AuxInt
  5058			v.reset(OpMIPSMOVWconst)
  5059			v.AuxInt = ^(c | d)
  5060			return true
  5061		}
  5062		return false
  5063	}
  5064	func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
  5065		b := v.Block
  5066		// match: (OR x (MOVWconst [c]))
  5067		// cond:
  5068		// result: (ORconst [c] x)
  5069		for {
  5070			_ = v.Args[1]
  5071			x := v.Args[0]
  5072			v_1 := v.Args[1]
  5073			if v_1.Op != OpMIPSMOVWconst {
  5074				break
  5075			}
  5076			c := v_1.AuxInt
  5077			v.reset(OpMIPSORconst)
  5078			v.AuxInt = c
  5079			v.AddArg(x)
  5080			return true
  5081		}
  5082		// match: (OR (MOVWconst [c]) x)
  5083		// cond:
  5084		// result: (ORconst [c] x)
  5085		for {
  5086			x := v.Args[1]
  5087			v_0 := v.Args[0]
  5088			if v_0.Op != OpMIPSMOVWconst {
  5089				break
  5090			}
  5091			c := v_0.AuxInt
  5092			v.reset(OpMIPSORconst)
  5093			v.AuxInt = c
  5094			v.AddArg(x)
  5095			return true
  5096		}
  5097		// match: (OR x x)
  5098		// cond:
  5099		// result: x
  5100		for {
  5101			x := v.Args[1]
  5102			if x != v.Args[0] {
  5103				break
  5104			}
  5105			v.reset(OpCopy)
  5106			v.Type = x.Type
  5107			v.AddArg(x)
  5108			return true
  5109		}
  5110		// match: (OR (SGTUzero x) (SGTUzero y))
  5111		// cond:
  5112		// result: (SGTUzero (OR <x.Type> x y))
  5113		for {
  5114			_ = v.Args[1]
  5115			v_0 := v.Args[0]
  5116			if v_0.Op != OpMIPSSGTUzero {
  5117				break
  5118			}
  5119			x := v_0.Args[0]
  5120			v_1 := v.Args[1]
  5121			if v_1.Op != OpMIPSSGTUzero {
  5122				break
  5123			}
  5124			y := v_1.Args[0]
  5125			v.reset(OpMIPSSGTUzero)
  5126			v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5127			v0.AddArg(x)
  5128			v0.AddArg(y)
  5129			v.AddArg(v0)
  5130			return true
  5131		}
  5132		// match: (OR (SGTUzero y) (SGTUzero x))
  5133		// cond:
  5134		// result: (SGTUzero (OR <x.Type> x y))
  5135		for {
  5136			_ = v.Args[1]
  5137			v_0 := v.Args[0]
  5138			if v_0.Op != OpMIPSSGTUzero {
  5139				break
  5140			}
  5141			y := v_0.Args[0]
  5142			v_1 := v.Args[1]
  5143			if v_1.Op != OpMIPSSGTUzero {
  5144				break
  5145			}
  5146			x := v_1.Args[0]
  5147			v.reset(OpMIPSSGTUzero)
  5148			v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type)
  5149			v0.AddArg(x)
  5150			v0.AddArg(y)
  5151			v.AddArg(v0)
  5152			return true
  5153		}
  5154		return false
  5155	}
  5156	func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool {
  5157		// match: (ORconst [0] x)
  5158		// cond:
  5159		// result: x
  5160		for {
  5161			if v.AuxInt != 0 {
  5162				break
  5163			}
  5164			x := v.Args[0]
  5165			v.reset(OpCopy)
  5166			v.Type = x.Type
  5167			v.AddArg(x)
  5168			return true
  5169		}
  5170		// match: (ORconst [-1] _)
  5171		// cond:
  5172		// result: (MOVWconst [-1])
  5173		for {
  5174			if v.AuxInt != -1 {
  5175				break
  5176			}
  5177			v.reset(OpMIPSMOVWconst)
  5178			v.AuxInt = -1
  5179			return true
  5180		}
  5181		// match: (ORconst [c] (MOVWconst [d]))
  5182		// cond:
  5183		// result: (MOVWconst [c|d])
  5184		for {
  5185			c := v.AuxInt
  5186			v_0 := v.Args[0]
  5187			if v_0.Op != OpMIPSMOVWconst {
  5188				break
  5189			}
  5190			d := v_0.AuxInt
  5191			v.reset(OpMIPSMOVWconst)
  5192			v.AuxInt = c | d
  5193			return true
  5194		}
  5195		// match: (ORconst [c] (ORconst [d] x))
  5196		// cond:
  5197		// result: (ORconst [c|d] x)
  5198		for {
  5199			c := v.AuxInt
  5200			v_0 := v.Args[0]
  5201			if v_0.Op != OpMIPSORconst {
  5202				break
  5203			}
  5204			d := v_0.AuxInt
  5205			x := v_0.Args[0]
  5206			v.reset(OpMIPSORconst)
  5207			v.AuxInt = c | d
  5208			v.AddArg(x)
  5209			return true
  5210		}
  5211		return false
  5212	}
  5213	func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool {
  5214		// match: (SGT (MOVWconst [c]) x)
  5215		// cond:
  5216		// result: (SGTconst [c] x)
  5217		for {
  5218			x := v.Args[1]
  5219			v_0 := v.Args[0]
  5220			if v_0.Op != OpMIPSMOVWconst {
  5221				break
  5222			}
  5223			c := v_0.AuxInt
  5224			v.reset(OpMIPSSGTconst)
  5225			v.AuxInt = c
  5226			v.AddArg(x)
  5227			return true
  5228		}
  5229		// match: (SGT x (MOVWconst [0]))
  5230		// cond:
  5231		// result: (SGTzero x)
  5232		for {
  5233			_ = v.Args[1]
  5234			x := v.Args[0]
  5235			v_1 := v.Args[1]
  5236			if v_1.Op != OpMIPSMOVWconst {
  5237				break
  5238			}
  5239			if v_1.AuxInt != 0 {
  5240				break
  5241			}
  5242			v.reset(OpMIPSSGTzero)
  5243			v.AddArg(x)
  5244			return true
  5245		}
  5246		return false
  5247	}
  5248	func rewriteValueMIPS_OpMIPSSGTU_0(v *Value) bool {
  5249		// match: (SGTU (MOVWconst [c]) x)
  5250		// cond:
  5251		// result: (SGTUconst [c] x)
  5252		for {
  5253			x := v.Args[1]
  5254			v_0 := v.Args[0]
  5255			if v_0.Op != OpMIPSMOVWconst {
  5256				break
  5257			}
  5258			c := v_0.AuxInt
  5259			v.reset(OpMIPSSGTUconst)
  5260			v.AuxInt = c
  5261			v.AddArg(x)
  5262			return true
  5263		}
  5264		// match: (SGTU x (MOVWconst [0]))
  5265		// cond:
  5266		// result: (SGTUzero x)
  5267		for {
  5268			_ = v.Args[1]
  5269			x := v.Args[0]
  5270			v_1 := v.Args[1]
  5271			if v_1.Op != OpMIPSMOVWconst {
  5272				break
  5273			}
  5274			if v_1.AuxInt != 0 {
  5275				break
  5276			}
  5277			v.reset(OpMIPSSGTUzero)
  5278			v.AddArg(x)
  5279			return true
  5280		}
  5281		return false
  5282	}
  5283	func rewriteValueMIPS_OpMIPSSGTUconst_0(v *Value) bool {
  5284		// match: (SGTUconst [c] (MOVWconst [d]))
  5285		// cond: uint32(c)>uint32(d)
  5286		// result: (MOVWconst [1])
  5287		for {
  5288			c := v.AuxInt
  5289			v_0 := v.Args[0]
  5290			if v_0.Op != OpMIPSMOVWconst {
  5291				break
  5292			}
  5293			d := v_0.AuxInt
  5294			if !(uint32(c) > uint32(d)) {
  5295				break
  5296			}
  5297			v.reset(OpMIPSMOVWconst)
  5298			v.AuxInt = 1
  5299			return true
  5300		}
  5301		// match: (SGTUconst [c] (MOVWconst [d]))
  5302		// cond: uint32(c)<=uint32(d)
  5303		// result: (MOVWconst [0])
  5304		for {
  5305			c := v.AuxInt
  5306			v_0 := v.Args[0]
  5307			if v_0.Op != OpMIPSMOVWconst {
  5308				break
  5309			}
  5310			d := v_0.AuxInt
  5311			if !(uint32(c) <= uint32(d)) {
  5312				break
  5313			}
  5314			v.reset(OpMIPSMOVWconst)
  5315			v.AuxInt = 0
  5316			return true
  5317		}
  5318		// match: (SGTUconst [c] (MOVBUreg _))
  5319		// cond: 0xff < uint32(c)
  5320		// result: (MOVWconst [1])
  5321		for {
  5322			c := v.AuxInt
  5323			v_0 := v.Args[0]
  5324			if v_0.Op != OpMIPSMOVBUreg {
  5325				break
  5326			}
  5327			if !(0xff < uint32(c)) {
  5328				break
  5329			}
  5330			v.reset(OpMIPSMOVWconst)
  5331			v.AuxInt = 1
  5332			return true
  5333		}
  5334		// match: (SGTUconst [c] (MOVHUreg _))
  5335		// cond: 0xffff < uint32(c)
  5336		// result: (MOVWconst [1])
  5337		for {
  5338			c := v.AuxInt
  5339			v_0 := v.Args[0]
  5340			if v_0.Op != OpMIPSMOVHUreg {
  5341				break
  5342			}
  5343			if !(0xffff < uint32(c)) {
  5344				break
  5345			}
  5346			v.reset(OpMIPSMOVWconst)
  5347			v.AuxInt = 1
  5348			return true
  5349		}
  5350		// match: (SGTUconst [c] (ANDconst [m] _))
  5351		// cond: uint32(m) < uint32(c)
  5352		// result: (MOVWconst [1])
  5353		for {
  5354			c := v.AuxInt
  5355			v_0 := v.Args[0]
  5356			if v_0.Op != OpMIPSANDconst {
  5357				break
  5358			}
  5359			m := v_0.AuxInt
  5360			if !(uint32(m) < uint32(c)) {
  5361				break
  5362			}
  5363			v.reset(OpMIPSMOVWconst)
  5364			v.AuxInt = 1
  5365			return true
  5366		}
  5367		// match: (SGTUconst [c] (SRLconst _ [d]))
  5368		// cond: uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)
  5369		// result: (MOVWconst [1])
  5370		for {
  5371			c := v.AuxInt
  5372			v_0 := v.Args[0]
  5373			if v_0.Op != OpMIPSSRLconst {
  5374				break
  5375			}
  5376			d := v_0.AuxInt
  5377			if !(uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)) {
  5378				break
  5379			}
  5380			v.reset(OpMIPSMOVWconst)
  5381			v.AuxInt = 1
  5382			return true
  5383		}
  5384		return false
  5385	}
  5386	func rewriteValueMIPS_OpMIPSSGTUzero_0(v *Value) bool {
  5387		// match: (SGTUzero (MOVWconst [d]))
  5388		// cond: uint32(d) != 0
  5389		// result: (MOVWconst [1])
  5390		for {
  5391			v_0 := v.Args[0]
  5392			if v_0.Op != OpMIPSMOVWconst {
  5393				break
  5394			}
  5395			d := v_0.AuxInt
  5396			if !(uint32(d) != 0) {
  5397				break
  5398			}
  5399			v.reset(OpMIPSMOVWconst)
  5400			v.AuxInt = 1
  5401			return true
  5402		}
  5403		// match: (SGTUzero (MOVWconst [d]))
  5404		// cond: uint32(d) == 0
  5405		// result: (MOVWconst [0])
  5406		for {
  5407			v_0 := v.Args[0]
  5408			if v_0.Op != OpMIPSMOVWconst {
  5409				break
  5410			}
  5411			d := v_0.AuxInt
  5412			if !(uint32(d) == 0) {
  5413				break
  5414			}
  5415			v.reset(OpMIPSMOVWconst)
  5416			v.AuxInt = 0
  5417			return true
  5418		}
  5419		return false
  5420	}
  5421	func rewriteValueMIPS_OpMIPSSGTconst_0(v *Value) bool {
  5422		// match: (SGTconst [c] (MOVWconst [d]))
  5423		// cond: int32(c) > int32(d)
  5424		// result: (MOVWconst [1])
  5425		for {
  5426			c := v.AuxInt
  5427			v_0 := v.Args[0]
  5428			if v_0.Op != OpMIPSMOVWconst {
  5429				break
  5430			}
  5431			d := v_0.AuxInt
  5432			if !(int32(c) > int32(d)) {
  5433				break
  5434			}
  5435			v.reset(OpMIPSMOVWconst)
  5436			v.AuxInt = 1
  5437			return true
  5438		}
  5439		// match: (SGTconst [c] (MOVWconst [d]))
  5440		// cond: int32(c) <= int32(d)
  5441		// result: (MOVWconst [0])
  5442		for {
  5443			c := v.AuxInt
  5444			v_0 := v.Args[0]
  5445			if v_0.Op != OpMIPSMOVWconst {
  5446				break
  5447			}
  5448			d := v_0.AuxInt
  5449			if !(int32(c) <= int32(d)) {
  5450				break
  5451			}
  5452			v.reset(OpMIPSMOVWconst)
  5453			v.AuxInt = 0
  5454			return true
  5455		}
  5456		// match: (SGTconst [c] (MOVBreg _))
  5457		// cond: 0x7f < int32(c)
  5458		// result: (MOVWconst [1])
  5459		for {
  5460			c := v.AuxInt
  5461			v_0 := v.Args[0]
  5462			if v_0.Op != OpMIPSMOVBreg {
  5463				break
  5464			}
  5465			if !(0x7f < int32(c)) {
  5466				break
  5467			}
  5468			v.reset(OpMIPSMOVWconst)
  5469			v.AuxInt = 1
  5470			return true
  5471		}
  5472		// match: (SGTconst [c] (MOVBreg _))
  5473		// cond: int32(c) <= -0x80
  5474		// result: (MOVWconst [0])
  5475		for {
  5476			c := v.AuxInt
  5477			v_0 := v.Args[0]
  5478			if v_0.Op != OpMIPSMOVBreg {
  5479				break
  5480			}
  5481			if !(int32(c) <= -0x80) {
  5482				break
  5483			}
  5484			v.reset(OpMIPSMOVWconst)
  5485			v.AuxInt = 0
  5486			return true
  5487		}
  5488		// match: (SGTconst [c] (MOVBUreg _))
  5489		// cond: 0xff < int32(c)
  5490		// result: (MOVWconst [1])
  5491		for {
  5492			c := v.AuxInt
  5493			v_0 := v.Args[0]
  5494			if v_0.Op != OpMIPSMOVBUreg {
  5495				break
  5496			}
  5497			if !(0xff < int32(c)) {
  5498				break
  5499			}
  5500			v.reset(OpMIPSMOVWconst)
  5501			v.AuxInt = 1
  5502			return true
  5503		}
  5504		// match: (SGTconst [c] (MOVBUreg _))
  5505		// cond: int32(c) < 0
  5506		// result: (MOVWconst [0])
  5507		for {
  5508			c := v.AuxInt
  5509			v_0 := v.Args[0]
  5510			if v_0.Op != OpMIPSMOVBUreg {
  5511				break
  5512			}
  5513			if !(int32(c) < 0) {
  5514				break
  5515			}
  5516			v.reset(OpMIPSMOVWconst)
  5517			v.AuxInt = 0
  5518			return true
  5519		}
  5520		// match: (SGTconst [c] (MOVHreg _))
  5521		// cond: 0x7fff < int32(c)
  5522		// result: (MOVWconst [1])
  5523		for {
  5524			c := v.AuxInt
  5525			v_0 := v.Args[0]
  5526			if v_0.Op != OpMIPSMOVHreg {
  5527				break
  5528			}
  5529			if !(0x7fff < int32(c)) {
  5530				break
  5531			}
  5532			v.reset(OpMIPSMOVWconst)
  5533			v.AuxInt = 1
  5534			return true
  5535		}
  5536		// match: (SGTconst [c] (MOVHreg _))
  5537		// cond: int32(c) <= -0x8000
  5538		// result: (MOVWconst [0])
  5539		for {
  5540			c := v.AuxInt
  5541			v_0 := v.Args[0]
  5542			if v_0.Op != OpMIPSMOVHreg {
  5543				break
  5544			}
  5545			if !(int32(c) <= -0x8000) {
  5546				break
  5547			}
  5548			v.reset(OpMIPSMOVWconst)
  5549			v.AuxInt = 0
  5550			return true
  5551		}
  5552		// match: (SGTconst [c] (MOVHUreg _))
  5553		// cond: 0xffff < int32(c)
  5554		// result: (MOVWconst [1])
  5555		for {
  5556			c := v.AuxInt
  5557			v_0 := v.Args[0]
  5558			if v_0.Op != OpMIPSMOVHUreg {
  5559				break
  5560			}
  5561			if !(0xffff < int32(c)) {
  5562				break
  5563			}
  5564			v.reset(OpMIPSMOVWconst)
  5565			v.AuxInt = 1
  5566			return true
  5567		}
  5568		// match: (SGTconst [c] (MOVHUreg _))
  5569		// cond: int32(c) < 0
  5570		// result: (MOVWconst [0])
  5571		for {
  5572			c := v.AuxInt
  5573			v_0 := v.Args[0]
  5574			if v_0.Op != OpMIPSMOVHUreg {
  5575				break
  5576			}
  5577			if !(int32(c) < 0) {
  5578				break
  5579			}
  5580			v.reset(OpMIPSMOVWconst)
  5581			v.AuxInt = 0
  5582			return true
  5583		}
  5584		return false
  5585	}
  5586	func rewriteValueMIPS_OpMIPSSGTconst_10(v *Value) bool {
  5587		// match: (SGTconst [c] (ANDconst [m] _))
  5588		// cond: 0 <= int32(m) && int32(m) < int32(c)
  5589		// result: (MOVWconst [1])
  5590		for {
  5591			c := v.AuxInt
  5592			v_0 := v.Args[0]
  5593			if v_0.Op != OpMIPSANDconst {
  5594				break
  5595			}
  5596			m := v_0.AuxInt
  5597			if !(0 <= int32(m) && int32(m) < int32(c)) {
  5598				break
  5599			}
  5600			v.reset(OpMIPSMOVWconst)
  5601			v.AuxInt = 1
  5602			return true
  5603		}
  5604		// match: (SGTconst [c] (SRLconst _ [d]))
  5605		// cond: 0 <= int32(c) && uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)
  5606		// result: (MOVWconst [1])
  5607		for {
  5608			c := v.AuxInt
  5609			v_0 := v.Args[0]
  5610			if v_0.Op != OpMIPSSRLconst {
  5611				break
  5612			}
  5613			d := v_0.AuxInt
  5614			if !(0 <= int32(c) && uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)) {
  5615				break
  5616			}
  5617			v.reset(OpMIPSMOVWconst)
  5618			v.AuxInt = 1
  5619			return true
  5620		}
  5621		return false
  5622	}
  5623	func rewriteValueMIPS_OpMIPSSGTzero_0(v *Value) bool {
  5624		// match: (SGTzero (MOVWconst [d]))
  5625		// cond: int32(d) > 0
  5626		// result: (MOVWconst [1])
  5627		for {
  5628			v_0 := v.Args[0]
  5629			if v_0.Op != OpMIPSMOVWconst {
  5630				break
  5631			}
  5632			d := v_0.AuxInt
  5633			if !(int32(d) > 0) {
  5634				break
  5635			}
  5636			v.reset(OpMIPSMOVWconst)
  5637			v.AuxInt = 1
  5638			return true
  5639		}
  5640		// match: (SGTzero (MOVWconst [d]))
  5641		// cond: int32(d) <= 0
  5642		// result: (MOVWconst [0])
  5643		for {
  5644			v_0 := v.Args[0]
  5645			if v_0.Op != OpMIPSMOVWconst {
  5646				break
  5647			}
  5648			d := v_0.AuxInt
  5649			if !(int32(d) <= 0) {
  5650				break
  5651			}
  5652			v.reset(OpMIPSMOVWconst)
  5653			v.AuxInt = 0
  5654			return true
  5655		}
  5656		return false
  5657	}
  5658	func rewriteValueMIPS_OpMIPSSLL_0(v *Value) bool {
  5659		// match: (SLL _ (MOVWconst [c]))
  5660		// cond: uint32(c)>=32
  5661		// result: (MOVWconst [0])
  5662		for {
  5663			_ = v.Args[1]
  5664			v_1 := v.Args[1]
  5665			if v_1.Op != OpMIPSMOVWconst {
  5666				break
  5667			}
  5668			c := v_1.AuxInt
  5669			if !(uint32(c) >= 32) {
  5670				break
  5671			}
  5672			v.reset(OpMIPSMOVWconst)
  5673			v.AuxInt = 0
  5674			return true
  5675		}
  5676		// match: (SLL x (MOVWconst [c]))
  5677		// cond:
  5678		// result: (SLLconst x [c])
  5679		for {
  5680			_ = v.Args[1]
  5681			x := v.Args[0]
  5682			v_1 := v.Args[1]
  5683			if v_1.Op != OpMIPSMOVWconst {
  5684				break
  5685			}
  5686			c := v_1.AuxInt
  5687			v.reset(OpMIPSSLLconst)
  5688			v.AuxInt = c
  5689			v.AddArg(x)
  5690			return true
  5691		}
  5692		return false
  5693	}
  5694	func rewriteValueMIPS_OpMIPSSLLconst_0(v *Value) bool {
  5695		// match: (SLLconst [c] (MOVWconst [d]))
  5696		// cond:
  5697		// result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))])
  5698		for {
  5699			c := v.AuxInt
  5700			v_0 := v.Args[0]
  5701			if v_0.Op != OpMIPSMOVWconst {
  5702				break
  5703			}
  5704			d := v_0.AuxInt
  5705			v.reset(OpMIPSMOVWconst)
  5706			v.AuxInt = int64(int32(uint32(d) << uint32(c)))
  5707			return true
  5708		}
  5709		return false
  5710	}
  5711	func rewriteValueMIPS_OpMIPSSRA_0(v *Value) bool {
  5712		// match: (SRA x (MOVWconst [c]))
  5713		// cond: uint32(c)>=32
  5714		// result: (SRAconst x [31])
  5715		for {
  5716			_ = v.Args[1]
  5717			x := v.Args[0]
  5718			v_1 := v.Args[1]
  5719			if v_1.Op != OpMIPSMOVWconst {
  5720				break
  5721			}
  5722			c := v_1.AuxInt
  5723			if !(uint32(c) >= 32) {
  5724				break
  5725			}
  5726			v.reset(OpMIPSSRAconst)
  5727			v.AuxInt = 31
  5728			v.AddArg(x)
  5729			return true
  5730		}
  5731		// match: (SRA x (MOVWconst [c]))
  5732		// cond:
  5733		// result: (SRAconst x [c])
  5734		for {
  5735			_ = v.Args[1]
  5736			x := v.Args[0]
  5737			v_1 := v.Args[1]
  5738			if v_1.Op != OpMIPSMOVWconst {
  5739				break
  5740			}
  5741			c := v_1.AuxInt
  5742			v.reset(OpMIPSSRAconst)
  5743			v.AuxInt = c
  5744			v.AddArg(x)
  5745			return true
  5746		}
  5747		return false
  5748	}
  5749	func rewriteValueMIPS_OpMIPSSRAconst_0(v *Value) bool {
  5750		// match: (SRAconst [c] (MOVWconst [d]))
  5751		// cond:
  5752		// result: (MOVWconst [int64(int32(d)>>uint32(c))])
  5753		for {
  5754			c := v.AuxInt
  5755			v_0 := v.Args[0]
  5756			if v_0.Op != OpMIPSMOVWconst {
  5757				break
  5758			}
  5759			d := v_0.AuxInt
  5760			v.reset(OpMIPSMOVWconst)
  5761			v.AuxInt = int64(int32(d) >> uint32(c))
  5762			return true
  5763		}
  5764		return false
  5765	}
  5766	func rewriteValueMIPS_OpMIPSSRL_0(v *Value) bool {
  5767		// match: (SRL _ (MOVWconst [c]))
  5768		// cond: uint32(c)>=32
  5769		// result: (MOVWconst [0])
  5770		for {
  5771			_ = v.Args[1]
  5772			v_1 := v.Args[1]
  5773			if v_1.Op != OpMIPSMOVWconst {
  5774				break
  5775			}
  5776			c := v_1.AuxInt
  5777			if !(uint32(c) >= 32) {
  5778				break
  5779			}
  5780			v.reset(OpMIPSMOVWconst)
  5781			v.AuxInt = 0
  5782			return true
  5783		}
  5784		// match: (SRL x (MOVWconst [c]))
  5785		// cond:
  5786		// result: (SRLconst x [c])
  5787		for {
  5788			_ = v.Args[1]
  5789			x := v.Args[0]
  5790			v_1 := v.Args[1]
  5791			if v_1.Op != OpMIPSMOVWconst {
  5792				break
  5793			}
  5794			c := v_1.AuxInt
  5795			v.reset(OpMIPSSRLconst)
  5796			v.AuxInt = c
  5797			v.AddArg(x)
  5798			return true
  5799		}
  5800		return false
  5801	}
  5802	func rewriteValueMIPS_OpMIPSSRLconst_0(v *Value) bool {
  5803		// match: (SRLconst [c] (MOVWconst [d]))
  5804		// cond:
  5805		// result: (MOVWconst [int64(uint32(d)>>uint32(c))])
  5806		for {
  5807			c := v.AuxInt
  5808			v_0 := v.Args[0]
  5809			if v_0.Op != OpMIPSMOVWconst {
  5810				break
  5811			}
  5812			d := v_0.AuxInt
  5813			v.reset(OpMIPSMOVWconst)
  5814			v.AuxInt = int64(uint32(d) >> uint32(c))
  5815			return true
  5816		}
  5817		return false
  5818	}
  5819	func rewriteValueMIPS_OpMIPSSUB_0(v *Value) bool {
  5820		// match: (SUB x (MOVWconst [c]))
  5821		// cond:
  5822		// result: (SUBconst [c] x)
  5823		for {
  5824			_ = v.Args[1]
  5825			x := v.Args[0]
  5826			v_1 := v.Args[1]
  5827			if v_1.Op != OpMIPSMOVWconst {
  5828				break
  5829			}
  5830			c := v_1.AuxInt
  5831			v.reset(OpMIPSSUBconst)
  5832			v.AuxInt = c
  5833			v.AddArg(x)
  5834			return true
  5835		}
  5836		// match: (SUB x x)
  5837		// cond:
  5838		// result: (MOVWconst [0])
  5839		for {
  5840			x := v.Args[1]
  5841			if x != v.Args[0] {
  5842				break
  5843			}
  5844			v.reset(OpMIPSMOVWconst)
  5845			v.AuxInt = 0
  5846			return true
  5847		}
  5848		// match: (SUB (MOVWconst [0]) x)
  5849		// cond:
  5850		// result: (NEG x)
  5851		for {
  5852			x := v.Args[1]
  5853			v_0 := v.Args[0]
  5854			if v_0.Op != OpMIPSMOVWconst {
  5855				break
  5856			}
  5857			if v_0.AuxInt != 0 {
  5858				break
  5859			}
  5860			v.reset(OpMIPSNEG)
  5861			v.AddArg(x)
  5862			return true
  5863		}
  5864		return false
  5865	}
  5866	func rewriteValueMIPS_OpMIPSSUBconst_0(v *Value) bool {
  5867		// match: (SUBconst [0] x)
  5868		// cond:
  5869		// result: x
  5870		for {
  5871			if v.AuxInt != 0 {
  5872				break
  5873			}
  5874			x := v.Args[0]
  5875			v.reset(OpCopy)
  5876			v.Type = x.Type
  5877			v.AddArg(x)
  5878			return true
  5879		}
  5880		// match: (SUBconst [c] (MOVWconst [d]))
  5881		// cond:
  5882		// result: (MOVWconst [int64(int32(d-c))])
  5883		for {
  5884			c := v.AuxInt
  5885			v_0 := v.Args[0]
  5886			if v_0.Op != OpMIPSMOVWconst {
  5887				break
  5888			}
  5889			d := v_0.AuxInt
  5890			v.reset(OpMIPSMOVWconst)
  5891			v.AuxInt = int64(int32(d - c))
  5892			return true
  5893		}
  5894		// match: (SUBconst [c] (SUBconst [d] x))
  5895		// cond:
  5896		// result: (ADDconst [int64(int32(-c-d))] x)
  5897		for {
  5898			c := v.AuxInt
  5899			v_0 := v.Args[0]
  5900			if v_0.Op != OpMIPSSUBconst {
  5901				break
  5902			}
  5903			d := v_0.AuxInt
  5904			x := v_0.Args[0]
  5905			v.reset(OpMIPSADDconst)
  5906			v.AuxInt = int64(int32(-c - d))
  5907			v.AddArg(x)
  5908			return true
  5909		}
  5910		// match: (SUBconst [c] (ADDconst [d] x))
  5911		// cond:
  5912		// result: (ADDconst [int64(int32(-c+d))] x)
  5913		for {
  5914			c := v.AuxInt
  5915			v_0 := v.Args[0]
  5916			if v_0.Op != OpMIPSADDconst {
  5917				break
  5918			}
  5919			d := v_0.AuxInt
  5920			x := v_0.Args[0]
  5921			v.reset(OpMIPSADDconst)
  5922			v.AuxInt = int64(int32(-c + d))
  5923			v.AddArg(x)
  5924			return true
  5925		}
  5926		return false
  5927	}
  5928	func rewriteValueMIPS_OpMIPSXOR_0(v *Value) bool {
  5929		// match: (XOR x (MOVWconst [c]))
  5930		// cond:
  5931		// result: (XORconst [c] x)
  5932		for {
  5933			_ = v.Args[1]
  5934			x := v.Args[0]
  5935			v_1 := v.Args[1]
  5936			if v_1.Op != OpMIPSMOVWconst {
  5937				break
  5938			}
  5939			c := v_1.AuxInt
  5940			v.reset(OpMIPSXORconst)
  5941			v.AuxInt = c
  5942			v.AddArg(x)
  5943			return true
  5944		}
  5945		// match: (XOR (MOVWconst [c]) x)
  5946		// cond:
  5947		// result: (XORconst [c] x)
  5948		for {
  5949			x := v.Args[1]
  5950			v_0 := v.Args[0]
  5951			if v_0.Op != OpMIPSMOVWconst {
  5952				break
  5953			}
  5954			c := v_0.AuxInt
  5955			v.reset(OpMIPSXORconst)
  5956			v.AuxInt = c
  5957			v.AddArg(x)
  5958			return true
  5959		}
  5960		// match: (XOR x x)
  5961		// cond:
  5962		// result: (MOVWconst [0])
  5963		for {
  5964			x := v.Args[1]
  5965			if x != v.Args[0] {
  5966				break
  5967			}
  5968			v.reset(OpMIPSMOVWconst)
  5969			v.AuxInt = 0
  5970			return true
  5971		}
  5972		return false
  5973	}
  5974	func rewriteValueMIPS_OpMIPSXORconst_0(v *Value) bool {
  5975		// match: (XORconst [0] x)
  5976		// cond:
  5977		// result: x
  5978		for {
  5979			if v.AuxInt != 0 {
  5980				break
  5981			}
  5982			x := v.Args[0]
  5983			v.reset(OpCopy)
  5984			v.Type = x.Type
  5985			v.AddArg(x)
  5986			return true
  5987		}
  5988		// match: (XORconst [-1] x)
  5989		// cond:
  5990		// result: (NORconst [0] x)
  5991		for {
  5992			if v.AuxInt != -1 {
  5993				break
  5994			}
  5995			x := v.Args[0]
  5996			v.reset(OpMIPSNORconst)
  5997			v.AuxInt = 0
  5998			v.AddArg(x)
  5999			return true
  6000		}
  6001		// match: (XORconst [c] (MOVWconst [d]))
  6002		// cond:
  6003		// result: (MOVWconst [c^d])
  6004		for {
  6005			c := v.AuxInt
  6006			v_0 := v.Args[0]
  6007			if v_0.Op != OpMIPSMOVWconst {
  6008				break
  6009			}
  6010			d := v_0.AuxInt
  6011			v.reset(OpMIPSMOVWconst)
  6012			v.AuxInt = c ^ d
  6013			return true
  6014		}
  6015		// match: (XORconst [c] (XORconst [d] x))
  6016		// cond:
  6017		// result: (XORconst [c^d] x)
  6018		for {
  6019			c := v.AuxInt
  6020			v_0 := v.Args[0]
  6021			if v_0.Op != OpMIPSXORconst {
  6022				break
  6023			}
  6024			d := v_0.AuxInt
  6025			x := v_0.Args[0]
  6026			v.reset(OpMIPSXORconst)
  6027			v.AuxInt = c ^ d
  6028			v.AddArg(x)
  6029			return true
  6030		}
  6031		return false
  6032	}
  6033	func rewriteValueMIPS_OpMod16_0(v *Value) bool {
  6034		b := v.Block
  6035		typ := &b.Func.Config.Types
  6036		// match: (Mod16 x y)
  6037		// cond:
  6038		// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
  6039		for {
  6040			y := v.Args[1]
  6041			x := v.Args[0]
  6042			v.reset(OpSelect0)
  6043			v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6044			v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6045			v1.AddArg(x)
  6046			v0.AddArg(v1)
  6047			v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  6048			v2.AddArg(y)
  6049			v0.AddArg(v2)
  6050			v.AddArg(v0)
  6051			return true
  6052		}
  6053	}
  6054	func rewriteValueMIPS_OpMod16u_0(v *Value) bool {
  6055		b := v.Block
  6056		typ := &b.Func.Config.Types
  6057		// match: (Mod16u x y)
  6058		// cond:
  6059		// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
  6060		for {
  6061			y := v.Args[1]
  6062			x := v.Args[0]
  6063			v.reset(OpSelect0)
  6064			v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6065			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6066			v1.AddArg(x)
  6067			v0.AddArg(v1)
  6068			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6069			v2.AddArg(y)
  6070			v0.AddArg(v2)
  6071			v.AddArg(v0)
  6072			return true
  6073		}
  6074	}
  6075	func rewriteValueMIPS_OpMod32_0(v *Value) bool {
  6076		b := v.Block
  6077		typ := &b.Func.Config.Types
  6078		// match: (Mod32 x y)
  6079		// cond:
  6080		// result: (Select0 (DIV x y))
  6081		for {
  6082			y := v.Args[1]
  6083			x := v.Args[0]
  6084			v.reset(OpSelect0)
  6085			v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6086			v0.AddArg(x)
  6087			v0.AddArg(y)
  6088			v.AddArg(v0)
  6089			return true
  6090		}
  6091	}
  6092	func rewriteValueMIPS_OpMod32u_0(v *Value) bool {
  6093		b := v.Block
  6094		typ := &b.Func.Config.Types
  6095		// match: (Mod32u x y)
  6096		// cond:
  6097		// result: (Select0 (DIVU x y))
  6098		for {
  6099			y := v.Args[1]
  6100			x := v.Args[0]
  6101			v.reset(OpSelect0)
  6102			v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6103			v0.AddArg(x)
  6104			v0.AddArg(y)
  6105			v.AddArg(v0)
  6106			return true
  6107		}
  6108	}
  6109	func rewriteValueMIPS_OpMod8_0(v *Value) bool {
  6110		b := v.Block
  6111		typ := &b.Func.Config.Types
  6112		// match: (Mod8 x y)
  6113		// cond:
  6114		// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
  6115		for {
  6116			y := v.Args[1]
  6117			x := v.Args[0]
  6118			v.reset(OpSelect0)
  6119			v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32))
  6120			v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6121			v1.AddArg(x)
  6122			v0.AddArg(v1)
  6123			v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  6124			v2.AddArg(y)
  6125			v0.AddArg(v2)
  6126			v.AddArg(v0)
  6127			return true
  6128		}
  6129	}
  6130	func rewriteValueMIPS_OpMod8u_0(v *Value) bool {
  6131		b := v.Block
  6132		typ := &b.Func.Config.Types
  6133		// match: (Mod8u x y)
  6134		// cond:
  6135		// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
  6136		for {
  6137			y := v.Args[1]
  6138			x := v.Args[0]
  6139			v.reset(OpSelect0)
  6140			v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32))
  6141			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6142			v1.AddArg(x)
  6143			v0.AddArg(v1)
  6144			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6145			v2.AddArg(y)
  6146			v0.AddArg(v2)
  6147			v.AddArg(v0)
  6148			return true
  6149		}
  6150	}
  6151	func rewriteValueMIPS_OpMove_0(v *Value) bool {
  6152		b := v.Block
  6153		typ := &b.Func.Config.Types
  6154		// match: (Move [0] _ _ mem)
  6155		// cond:
  6156		// result: mem
  6157		for {
  6158			if v.AuxInt != 0 {
  6159				break
  6160			}
  6161			mem := v.Args[2]
  6162			v.reset(OpCopy)
  6163			v.Type = mem.Type
  6164			v.AddArg(mem)
  6165			return true
  6166		}
  6167		// match: (Move [1] dst src mem)
  6168		// cond:
  6169		// result: (MOVBstore dst (MOVBUload src mem) mem)
  6170		for {
  6171			if v.AuxInt != 1 {
  6172				break
  6173			}
  6174			mem := v.Args[2]
  6175			dst := v.Args[0]
  6176			src := v.Args[1]
  6177			v.reset(OpMIPSMOVBstore)
  6178			v.AddArg(dst)
  6179			v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6180			v0.AddArg(src)
  6181			v0.AddArg(mem)
  6182			v.AddArg(v0)
  6183			v.AddArg(mem)
  6184			return true
  6185		}
  6186		// match: (Move [2] {t} dst src mem)
  6187		// cond: t.(*types.Type).Alignment()%2 == 0
  6188		// result: (MOVHstore dst (MOVHUload src mem) mem)
  6189		for {
  6190			if v.AuxInt != 2 {
  6191				break
  6192			}
  6193			t := v.Aux
  6194			mem := v.Args[2]
  6195			dst := v.Args[0]
  6196			src := v.Args[1]
  6197			if !(t.(*types.Type).Alignment()%2 == 0) {
  6198				break
  6199			}
  6200			v.reset(OpMIPSMOVHstore)
  6201			v.AddArg(dst)
  6202			v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6203			v0.AddArg(src)
  6204			v0.AddArg(mem)
  6205			v.AddArg(v0)
  6206			v.AddArg(mem)
  6207			return true
  6208		}
  6209		// match: (Move [2] dst src mem)
  6210		// cond:
  6211		// result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))
  6212		for {
  6213			if v.AuxInt != 2 {
  6214				break
  6215			}
  6216			mem := v.Args[2]
  6217			dst := v.Args[0]
  6218			src := v.Args[1]
  6219			v.reset(OpMIPSMOVBstore)
  6220			v.AuxInt = 1
  6221			v.AddArg(dst)
  6222			v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6223			v0.AuxInt = 1
  6224			v0.AddArg(src)
  6225			v0.AddArg(mem)
  6226			v.AddArg(v0)
  6227			v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6228			v1.AddArg(dst)
  6229			v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6230			v2.AddArg(src)
  6231			v2.AddArg(mem)
  6232			v1.AddArg(v2)
  6233			v1.AddArg(mem)
  6234			v.AddArg(v1)
  6235			return true
  6236		}
  6237		// match: (Move [4] {t} dst src mem)
  6238		// cond: t.(*types.Type).Alignment()%4 == 0
  6239		// result: (MOVWstore dst (MOVWload src mem) mem)
  6240		for {
  6241			if v.AuxInt != 4 {
  6242				break
  6243			}
  6244			t := v.Aux
  6245			mem := v.Args[2]
  6246			dst := v.Args[0]
  6247			src := v.Args[1]
  6248			if !(t.(*types.Type).Alignment()%4 == 0) {
  6249				break
  6250			}
  6251			v.reset(OpMIPSMOVWstore)
  6252			v.AddArg(dst)
  6253			v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6254			v0.AddArg(src)
  6255			v0.AddArg(mem)
  6256			v.AddArg(v0)
  6257			v.AddArg(mem)
  6258			return true
  6259		}
  6260		// match: (Move [4] {t} dst src mem)
  6261		// cond: t.(*types.Type).Alignment()%2 == 0
  6262		// result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem))
  6263		for {
  6264			if v.AuxInt != 4 {
  6265				break
  6266			}
  6267			t := v.Aux
  6268			mem := v.Args[2]
  6269			dst := v.Args[0]
  6270			src := v.Args[1]
  6271			if !(t.(*types.Type).Alignment()%2 == 0) {
  6272				break
  6273			}
  6274			v.reset(OpMIPSMOVHstore)
  6275			v.AuxInt = 2
  6276			v.AddArg(dst)
  6277			v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6278			v0.AuxInt = 2
  6279			v0.AddArg(src)
  6280			v0.AddArg(mem)
  6281			v.AddArg(v0)
  6282			v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6283			v1.AddArg(dst)
  6284			v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16)
  6285			v2.AddArg(src)
  6286			v2.AddArg(mem)
  6287			v1.AddArg(v2)
  6288			v1.AddArg(mem)
  6289			v.AddArg(v1)
  6290			return true
  6291		}
  6292		// match: (Move [4] dst src mem)
  6293		// cond:
  6294		// result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))))
  6295		for {
  6296			if v.AuxInt != 4 {
  6297				break
  6298			}
  6299			mem := v.Args[2]
  6300			dst := v.Args[0]
  6301			src := v.Args[1]
  6302			v.reset(OpMIPSMOVBstore)
  6303			v.AuxInt = 3
  6304			v.AddArg(dst)
  6305			v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6306			v0.AuxInt = 3
  6307			v0.AddArg(src)
  6308			v0.AddArg(mem)
  6309			v.AddArg(v0)
  6310			v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6311			v1.AuxInt = 2
  6312			v1.AddArg(dst)
  6313			v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6314			v2.AuxInt = 2
  6315			v2.AddArg(src)
  6316			v2.AddArg(mem)
  6317			v1.AddArg(v2)
  6318			v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6319			v3.AuxInt = 1
  6320			v3.AddArg(dst)
  6321			v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6322			v4.AuxInt = 1
  6323			v4.AddArg(src)
  6324			v4.AddArg(mem)
  6325			v3.AddArg(v4)
  6326			v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6327			v5.AddArg(dst)
  6328			v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6329			v6.AddArg(src)
  6330			v6.AddArg(mem)
  6331			v5.AddArg(v6)
  6332			v5.AddArg(mem)
  6333			v3.AddArg(v5)
  6334			v1.AddArg(v3)
  6335			v.AddArg(v1)
  6336			return true
  6337		}
  6338		// match: (Move [3] dst src mem)
  6339		// cond:
  6340		// result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))
  6341		for {
  6342			if v.AuxInt != 3 {
  6343				break
  6344			}
  6345			mem := v.Args[2]
  6346			dst := v.Args[0]
  6347			src := v.Args[1]
  6348			v.reset(OpMIPSMOVBstore)
  6349			v.AuxInt = 2
  6350			v.AddArg(dst)
  6351			v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6352			v0.AuxInt = 2
  6353			v0.AddArg(src)
  6354			v0.AddArg(mem)
  6355			v.AddArg(v0)
  6356			v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6357			v1.AuxInt = 1
  6358			v1.AddArg(dst)
  6359			v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6360			v2.AuxInt = 1
  6361			v2.AddArg(src)
  6362			v2.AddArg(mem)
  6363			v1.AddArg(v2)
  6364			v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  6365			v3.AddArg(dst)
  6366			v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8)
  6367			v4.AddArg(src)
  6368			v4.AddArg(mem)
  6369			v3.AddArg(v4)
  6370			v3.AddArg(mem)
  6371			v1.AddArg(v3)
  6372			v.AddArg(v1)
  6373			return true
  6374		}
  6375		// match: (Move [8] {t} dst src mem)
  6376		// cond: t.(*types.Type).Alignment()%4 == 0
  6377		// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  6378		for {
  6379			if v.AuxInt != 8 {
  6380				break
  6381			}
  6382			t := v.Aux
  6383			mem := v.Args[2]
  6384			dst := v.Args[0]
  6385			src := v.Args[1]
  6386			if !(t.(*types.Type).Alignment()%4 == 0) {
  6387				break
  6388			}
  6389			v.reset(OpMIPSMOVWstore)
  6390			v.AuxInt = 4
  6391			v.AddArg(dst)
  6392			v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6393			v0.AuxInt = 4
  6394			v0.AddArg(src)
  6395			v0.AddArg(mem)
  6396			v.AddArg(v0)
  6397			v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6398			v1.AddArg(dst)
  6399			v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6400			v2.AddArg(src)
  6401			v2.AddArg(mem)
  6402			v1.AddArg(v2)
  6403			v1.AddArg(mem)
  6404			v.AddArg(v1)
  6405			return true
  6406		}
  6407		// match: (Move [8] {t} dst src mem)
  6408		// cond: t.(*types.Type).Alignment()%2 == 0
  6409		// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  6410		for {
  6411			if v.AuxInt != 8 {
  6412				break
  6413			}
  6414			t := v.Aux
  6415			mem := v.Args[2]
  6416			dst := v.Args[0]
  6417			src := v.Args[1]
  6418			if !(t.(*types.Type).Alignment()%2 == 0) {
  6419				break
  6420			}
  6421			v.reset(OpMIPSMOVHstore)
  6422			v.AuxInt = 6
  6423			v.AddArg(dst)
  6424			v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6425			v0.AuxInt = 6
  6426			v0.AddArg(src)
  6427			v0.AddArg(mem)
  6428			v.AddArg(v0)
  6429			v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6430			v1.AuxInt = 4
  6431			v1.AddArg(dst)
  6432			v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6433			v2.AuxInt = 4
  6434			v2.AddArg(src)
  6435			v2.AddArg(mem)
  6436			v1.AddArg(v2)
  6437			v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6438			v3.AuxInt = 2
  6439			v3.AddArg(dst)
  6440			v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6441			v4.AuxInt = 2
  6442			v4.AddArg(src)
  6443			v4.AddArg(mem)
  6444			v3.AddArg(v4)
  6445			v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6446			v5.AddArg(dst)
  6447			v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6448			v6.AddArg(src)
  6449			v6.AddArg(mem)
  6450			v5.AddArg(v6)
  6451			v5.AddArg(mem)
  6452			v3.AddArg(v5)
  6453			v1.AddArg(v3)
  6454			v.AddArg(v1)
  6455			return true
  6456		}
  6457		return false
  6458	}
  6459	func rewriteValueMIPS_OpMove_10(v *Value) bool {
  6460		b := v.Block
  6461		config := b.Func.Config
  6462		typ := &b.Func.Config.Types
  6463		// match: (Move [6] {t} dst src mem)
  6464		// cond: t.(*types.Type).Alignment()%2 == 0
  6465		// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  6466		for {
  6467			if v.AuxInt != 6 {
  6468				break
  6469			}
  6470			t := v.Aux
  6471			mem := v.Args[2]
  6472			dst := v.Args[0]
  6473			src := v.Args[1]
  6474			if !(t.(*types.Type).Alignment()%2 == 0) {
  6475				break
  6476			}
  6477			v.reset(OpMIPSMOVHstore)
  6478			v.AuxInt = 4
  6479			v.AddArg(dst)
  6480			v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6481			v0.AuxInt = 4
  6482			v0.AddArg(src)
  6483			v0.AddArg(mem)
  6484			v.AddArg(v0)
  6485			v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6486			v1.AuxInt = 2
  6487			v1.AddArg(dst)
  6488			v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6489			v2.AuxInt = 2
  6490			v2.AddArg(src)
  6491			v2.AddArg(mem)
  6492			v1.AddArg(v2)
  6493			v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  6494			v3.AddArg(dst)
  6495			v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16)
  6496			v4.AddArg(src)
  6497			v4.AddArg(mem)
  6498			v3.AddArg(v4)
  6499			v3.AddArg(mem)
  6500			v1.AddArg(v3)
  6501			v.AddArg(v1)
  6502			return true
  6503		}
  6504		// match: (Move [12] {t} dst src mem)
  6505		// cond: t.(*types.Type).Alignment()%4 == 0
  6506		// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  6507		for {
  6508			if v.AuxInt != 12 {
  6509				break
  6510			}
  6511			t := v.Aux
  6512			mem := v.Args[2]
  6513			dst := v.Args[0]
  6514			src := v.Args[1]
  6515			if !(t.(*types.Type).Alignment()%4 == 0) {
  6516				break
  6517			}
  6518			v.reset(OpMIPSMOVWstore)
  6519			v.AuxInt = 8
  6520			v.AddArg(dst)
  6521			v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6522			v0.AuxInt = 8
  6523			v0.AddArg(src)
  6524			v0.AddArg(mem)
  6525			v.AddArg(v0)
  6526			v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6527			v1.AuxInt = 4
  6528			v1.AddArg(dst)
  6529			v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6530			v2.AuxInt = 4
  6531			v2.AddArg(src)
  6532			v2.AddArg(mem)
  6533			v1.AddArg(v2)
  6534			v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6535			v3.AddArg(dst)
  6536			v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6537			v4.AddArg(src)
  6538			v4.AddArg(mem)
  6539			v3.AddArg(v4)
  6540			v3.AddArg(mem)
  6541			v1.AddArg(v3)
  6542			v.AddArg(v1)
  6543			return true
  6544		}
  6545		// match: (Move [16] {t} dst src mem)
  6546		// cond: t.(*types.Type).Alignment()%4 == 0
  6547		// result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))))
  6548		for {
  6549			if v.AuxInt != 16 {
  6550				break
  6551			}
  6552			t := v.Aux
  6553			mem := v.Args[2]
  6554			dst := v.Args[0]
  6555			src := v.Args[1]
  6556			if !(t.(*types.Type).Alignment()%4 == 0) {
  6557				break
  6558			}
  6559			v.reset(OpMIPSMOVWstore)
  6560			v.AuxInt = 12
  6561			v.AddArg(dst)
  6562			v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6563			v0.AuxInt = 12
  6564			v0.AddArg(src)
  6565			v0.AddArg(mem)
  6566			v.AddArg(v0)
  6567			v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6568			v1.AuxInt = 8
  6569			v1.AddArg(dst)
  6570			v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6571			v2.AuxInt = 8
  6572			v2.AddArg(src)
  6573			v2.AddArg(mem)
  6574			v1.AddArg(v2)
  6575			v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6576			v3.AuxInt = 4
  6577			v3.AddArg(dst)
  6578			v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6579			v4.AuxInt = 4
  6580			v4.AddArg(src)
  6581			v4.AddArg(mem)
  6582			v3.AddArg(v4)
  6583			v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  6584			v5.AddArg(dst)
  6585			v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32)
  6586			v6.AddArg(src)
  6587			v6.AddArg(mem)
  6588			v5.AddArg(v6)
  6589			v5.AddArg(mem)
  6590			v3.AddArg(v5)
  6591			v1.AddArg(v3)
  6592			v.AddArg(v1)
  6593			return true
  6594		}
  6595		// match: (Move [s] {t} dst src mem)
  6596		// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
  6597		// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  6598		for {
  6599			s := v.AuxInt
  6600			t := v.Aux
  6601			mem := v.Args[2]
  6602			dst := v.Args[0]
  6603			src := v.Args[1]
  6604			if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  6605				break
  6606			}
  6607			v.reset(OpMIPSLoweredMove)
  6608			v.AuxInt = t.(*types.Type).Alignment()
  6609			v.AddArg(dst)
  6610			v.AddArg(src)
  6611			v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type)
  6612			v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  6613			v0.AddArg(src)
  6614			v.AddArg(v0)
  6615			v.AddArg(mem)
  6616			return true
  6617		}
  6618		return false
  6619	}
  6620	func rewriteValueMIPS_OpMul16_0(v *Value) bool {
  6621		// match: (Mul16 x y)
  6622		// cond:
  6623		// result: (MUL x y)
  6624		for {
  6625			y := v.Args[1]
  6626			x := v.Args[0]
  6627			v.reset(OpMIPSMUL)
  6628			v.AddArg(x)
  6629			v.AddArg(y)
  6630			return true
  6631		}
  6632	}
  6633	func rewriteValueMIPS_OpMul32_0(v *Value) bool {
  6634		// match: (Mul32 x y)
  6635		// cond:
  6636		// result: (MUL x y)
  6637		for {
  6638			y := v.Args[1]
  6639			x := v.Args[0]
  6640			v.reset(OpMIPSMUL)
  6641			v.AddArg(x)
  6642			v.AddArg(y)
  6643			return true
  6644		}
  6645	}
  6646	func rewriteValueMIPS_OpMul32F_0(v *Value) bool {
  6647		// match: (Mul32F x y)
  6648		// cond:
  6649		// result: (MULF x y)
  6650		for {
  6651			y := v.Args[1]
  6652			x := v.Args[0]
  6653			v.reset(OpMIPSMULF)
  6654			v.AddArg(x)
  6655			v.AddArg(y)
  6656			return true
  6657		}
  6658	}
  6659	func rewriteValueMIPS_OpMul32uhilo_0(v *Value) bool {
  6660		// match: (Mul32uhilo x y)
  6661		// cond:
  6662		// result: (MULTU x y)
  6663		for {
  6664			y := v.Args[1]
  6665			x := v.Args[0]
  6666			v.reset(OpMIPSMULTU)
  6667			v.AddArg(x)
  6668			v.AddArg(y)
  6669			return true
  6670		}
  6671	}
  6672	func rewriteValueMIPS_OpMul64F_0(v *Value) bool {
  6673		// match: (Mul64F x y)
  6674		// cond:
  6675		// result: (MULD x y)
  6676		for {
  6677			y := v.Args[1]
  6678			x := v.Args[0]
  6679			v.reset(OpMIPSMULD)
  6680			v.AddArg(x)
  6681			v.AddArg(y)
  6682			return true
  6683		}
  6684	}
  6685	func rewriteValueMIPS_OpMul8_0(v *Value) bool {
  6686		// match: (Mul8 x y)
  6687		// cond:
  6688		// result: (MUL x y)
  6689		for {
  6690			y := v.Args[1]
  6691			x := v.Args[0]
  6692			v.reset(OpMIPSMUL)
  6693			v.AddArg(x)
  6694			v.AddArg(y)
  6695			return true
  6696		}
  6697	}
  6698	func rewriteValueMIPS_OpNeg16_0(v *Value) bool {
  6699		// match: (Neg16 x)
  6700		// cond:
  6701		// result: (NEG x)
  6702		for {
  6703			x := v.Args[0]
  6704			v.reset(OpMIPSNEG)
  6705			v.AddArg(x)
  6706			return true
  6707		}
  6708	}
  6709	func rewriteValueMIPS_OpNeg32_0(v *Value) bool {
  6710		// match: (Neg32 x)
  6711		// cond:
  6712		// result: (NEG x)
  6713		for {
  6714			x := v.Args[0]
  6715			v.reset(OpMIPSNEG)
  6716			v.AddArg(x)
  6717			return true
  6718		}
  6719	}
  6720	func rewriteValueMIPS_OpNeg32F_0(v *Value) bool {
  6721		// match: (Neg32F x)
  6722		// cond:
  6723		// result: (NEGF x)
  6724		for {
  6725			x := v.Args[0]
  6726			v.reset(OpMIPSNEGF)
  6727			v.AddArg(x)
  6728			return true
  6729		}
  6730	}
  6731	func rewriteValueMIPS_OpNeg64F_0(v *Value) bool {
  6732		// match: (Neg64F x)
  6733		// cond:
  6734		// result: (NEGD x)
  6735		for {
  6736			x := v.Args[0]
  6737			v.reset(OpMIPSNEGD)
  6738			v.AddArg(x)
  6739			return true
  6740		}
  6741	}
  6742	func rewriteValueMIPS_OpNeg8_0(v *Value) bool {
  6743		// match: (Neg8 x)
  6744		// cond:
  6745		// result: (NEG x)
  6746		for {
  6747			x := v.Args[0]
  6748			v.reset(OpMIPSNEG)
  6749			v.AddArg(x)
  6750			return true
  6751		}
  6752	}
  6753	func rewriteValueMIPS_OpNeq16_0(v *Value) bool {
  6754		b := v.Block
  6755		typ := &b.Func.Config.Types
  6756		// match: (Neq16 x y)
  6757		// cond:
  6758		// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
  6759		for {
  6760			y := v.Args[1]
  6761			x := v.Args[0]
  6762			v.reset(OpMIPSSGTU)
  6763			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  6764			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6765			v1.AddArg(x)
  6766			v0.AddArg(v1)
  6767			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  6768			v2.AddArg(y)
  6769			v0.AddArg(v2)
  6770			v.AddArg(v0)
  6771			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  6772			v3.AuxInt = 0
  6773			v.AddArg(v3)
  6774			return true
  6775		}
  6776	}
  6777	func rewriteValueMIPS_OpNeq32_0(v *Value) bool {
  6778		b := v.Block
  6779		typ := &b.Func.Config.Types
  6780		// match: (Neq32 x y)
  6781		// cond:
  6782		// result: (SGTU (XOR x y) (MOVWconst [0]))
  6783		for {
  6784			y := v.Args[1]
  6785			x := v.Args[0]
  6786			v.reset(OpMIPSSGTU)
  6787			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  6788			v0.AddArg(x)
  6789			v0.AddArg(y)
  6790			v.AddArg(v0)
  6791			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  6792			v1.AuxInt = 0
  6793			v.AddArg(v1)
  6794			return true
  6795		}
  6796	}
  6797	func rewriteValueMIPS_OpNeq32F_0(v *Value) bool {
  6798		b := v.Block
  6799		// match: (Neq32F x y)
  6800		// cond:
  6801		// result: (FPFlagFalse (CMPEQF x y))
  6802		for {
  6803			y := v.Args[1]
  6804			x := v.Args[0]
  6805			v.reset(OpMIPSFPFlagFalse)
  6806			v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags)
  6807			v0.AddArg(x)
  6808			v0.AddArg(y)
  6809			v.AddArg(v0)
  6810			return true
  6811		}
  6812	}
  6813	func rewriteValueMIPS_OpNeq64F_0(v *Value) bool {
  6814		b := v.Block
  6815		// match: (Neq64F x y)
  6816		// cond:
  6817		// result: (FPFlagFalse (CMPEQD x y))
  6818		for {
  6819			y := v.Args[1]
  6820			x := v.Args[0]
  6821			v.reset(OpMIPSFPFlagFalse)
  6822			v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags)
  6823			v0.AddArg(x)
  6824			v0.AddArg(y)
  6825			v.AddArg(v0)
  6826			return true
  6827		}
  6828	}
  6829	func rewriteValueMIPS_OpNeq8_0(v *Value) bool {
  6830		b := v.Block
  6831		typ := &b.Func.Config.Types
  6832		// match: (Neq8 x y)
  6833		// cond:
  6834		// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
  6835		for {
  6836			y := v.Args[1]
  6837			x := v.Args[0]
  6838			v.reset(OpMIPSSGTU)
  6839			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  6840			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6841			v1.AddArg(x)
  6842			v0.AddArg(v1)
  6843			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  6844			v2.AddArg(y)
  6845			v0.AddArg(v2)
  6846			v.AddArg(v0)
  6847			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  6848			v3.AuxInt = 0
  6849			v.AddArg(v3)
  6850			return true
  6851		}
  6852	}
  6853	func rewriteValueMIPS_OpNeqB_0(v *Value) bool {
  6854		// match: (NeqB x y)
  6855		// cond:
  6856		// result: (XOR x y)
  6857		for {
  6858			y := v.Args[1]
  6859			x := v.Args[0]
  6860			v.reset(OpMIPSXOR)
  6861			v.AddArg(x)
  6862			v.AddArg(y)
  6863			return true
  6864		}
  6865	}
  6866	func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool {
  6867		b := v.Block
  6868		typ := &b.Func.Config.Types
  6869		// match: (NeqPtr x y)
  6870		// cond:
  6871		// result: (SGTU (XOR x y) (MOVWconst [0]))
  6872		for {
  6873			y := v.Args[1]
  6874			x := v.Args[0]
  6875			v.reset(OpMIPSSGTU)
  6876			v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32)
  6877			v0.AddArg(x)
  6878			v0.AddArg(y)
  6879			v.AddArg(v0)
  6880			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  6881			v1.AuxInt = 0
  6882			v.AddArg(v1)
  6883			return true
  6884		}
  6885	}
  6886	func rewriteValueMIPS_OpNilCheck_0(v *Value) bool {
  6887		// match: (NilCheck ptr mem)
  6888		// cond:
  6889		// result: (LoweredNilCheck ptr mem)
  6890		for {
  6891			mem := v.Args[1]
  6892			ptr := v.Args[0]
  6893			v.reset(OpMIPSLoweredNilCheck)
  6894			v.AddArg(ptr)
  6895			v.AddArg(mem)
  6896			return true
  6897		}
  6898	}
  6899	func rewriteValueMIPS_OpNot_0(v *Value) bool {
  6900		// match: (Not x)
  6901		// cond:
  6902		// result: (XORconst [1] x)
  6903		for {
  6904			x := v.Args[0]
  6905			v.reset(OpMIPSXORconst)
  6906			v.AuxInt = 1
  6907			v.AddArg(x)
  6908			return true
  6909		}
  6910	}
  6911	func rewriteValueMIPS_OpOffPtr_0(v *Value) bool {
  6912		// match: (OffPtr [off] ptr:(SP))
  6913		// cond:
  6914		// result: (MOVWaddr [off] ptr)
  6915		for {
  6916			off := v.AuxInt
  6917			ptr := v.Args[0]
  6918			if ptr.Op != OpSP {
  6919				break
  6920			}
  6921			v.reset(OpMIPSMOVWaddr)
  6922			v.AuxInt = off
  6923			v.AddArg(ptr)
  6924			return true
  6925		}
  6926		// match: (OffPtr [off] ptr)
  6927		// cond:
  6928		// result: (ADDconst [off] ptr)
  6929		for {
  6930			off := v.AuxInt
  6931			ptr := v.Args[0]
  6932			v.reset(OpMIPSADDconst)
  6933			v.AuxInt = off
  6934			v.AddArg(ptr)
  6935			return true
  6936		}
  6937	}
  6938	func rewriteValueMIPS_OpOr16_0(v *Value) bool {
  6939		// match: (Or16 x y)
  6940		// cond:
  6941		// result: (OR x y)
  6942		for {
  6943			y := v.Args[1]
  6944			x := v.Args[0]
  6945			v.reset(OpMIPSOR)
  6946			v.AddArg(x)
  6947			v.AddArg(y)
  6948			return true
  6949		}
  6950	}
  6951	func rewriteValueMIPS_OpOr32_0(v *Value) bool {
  6952		// match: (Or32 x y)
  6953		// cond:
  6954		// result: (OR x y)
  6955		for {
  6956			y := v.Args[1]
  6957			x := v.Args[0]
  6958			v.reset(OpMIPSOR)
  6959			v.AddArg(x)
  6960			v.AddArg(y)
  6961			return true
  6962		}
  6963	}
  6964	func rewriteValueMIPS_OpOr8_0(v *Value) bool {
  6965		// match: (Or8 x y)
  6966		// cond:
  6967		// result: (OR x y)
  6968		for {
  6969			y := v.Args[1]
  6970			x := v.Args[0]
  6971			v.reset(OpMIPSOR)
  6972			v.AddArg(x)
  6973			v.AddArg(y)
  6974			return true
  6975		}
  6976	}
  6977	func rewriteValueMIPS_OpOrB_0(v *Value) bool {
  6978		// match: (OrB x y)
  6979		// cond:
  6980		// result: (OR x y)
  6981		for {
  6982			y := v.Args[1]
  6983			x := v.Args[0]
  6984			v.reset(OpMIPSOR)
  6985			v.AddArg(x)
  6986			v.AddArg(y)
  6987			return true
  6988		}
  6989	}
  6990	func rewriteValueMIPS_OpPanicBounds_0(v *Value) bool {
  6991		// match: (PanicBounds [kind] x y mem)
  6992		// cond: boundsABI(kind) == 0
  6993		// result: (LoweredPanicBoundsA [kind] x y mem)
  6994		for {
  6995			kind := v.AuxInt
  6996			mem := v.Args[2]
  6997			x := v.Args[0]
  6998			y := v.Args[1]
  6999			if !(boundsABI(kind) == 0) {
  7000				break
  7001			}
  7002			v.reset(OpMIPSLoweredPanicBoundsA)
  7003			v.AuxInt = kind
  7004			v.AddArg(x)
  7005			v.AddArg(y)
  7006			v.AddArg(mem)
  7007			return true
  7008		}
  7009		// match: (PanicBounds [kind] x y mem)
  7010		// cond: boundsABI(kind) == 1
  7011		// result: (LoweredPanicBoundsB [kind] x y mem)
  7012		for {
  7013			kind := v.AuxInt
  7014			mem := v.Args[2]
  7015			x := v.Args[0]
  7016			y := v.Args[1]
  7017			if !(boundsABI(kind) == 1) {
  7018				break
  7019			}
  7020			v.reset(OpMIPSLoweredPanicBoundsB)
  7021			v.AuxInt = kind
  7022			v.AddArg(x)
  7023			v.AddArg(y)
  7024			v.AddArg(mem)
  7025			return true
  7026		}
  7027		// match: (PanicBounds [kind] x y mem)
  7028		// cond: boundsABI(kind) == 2
  7029		// result: (LoweredPanicBoundsC [kind] x y mem)
  7030		for {
  7031			kind := v.AuxInt
  7032			mem := v.Args[2]
  7033			x := v.Args[0]
  7034			y := v.Args[1]
  7035			if !(boundsABI(kind) == 2) {
  7036				break
  7037			}
  7038			v.reset(OpMIPSLoweredPanicBoundsC)
  7039			v.AuxInt = kind
  7040			v.AddArg(x)
  7041			v.AddArg(y)
  7042			v.AddArg(mem)
  7043			return true
  7044		}
  7045		return false
  7046	}
  7047	func rewriteValueMIPS_OpPanicExtend_0(v *Value) bool {
  7048		// match: (PanicExtend [kind] hi lo y mem)
  7049		// cond: boundsABI(kind) == 0
  7050		// result: (LoweredPanicExtendA [kind] hi lo y mem)
  7051		for {
  7052			kind := v.AuxInt
  7053			mem := v.Args[3]
  7054			hi := v.Args[0]
  7055			lo := v.Args[1]
  7056			y := v.Args[2]
  7057			if !(boundsABI(kind) == 0) {
  7058				break
  7059			}
  7060			v.reset(OpMIPSLoweredPanicExtendA)
  7061			v.AuxInt = kind
  7062			v.AddArg(hi)
  7063			v.AddArg(lo)
  7064			v.AddArg(y)
  7065			v.AddArg(mem)
  7066			return true
  7067		}
  7068		// match: (PanicExtend [kind] hi lo y mem)
  7069		// cond: boundsABI(kind) == 1
  7070		// result: (LoweredPanicExtendB [kind] hi lo y mem)
  7071		for {
  7072			kind := v.AuxInt
  7073			mem := v.Args[3]
  7074			hi := v.Args[0]
  7075			lo := v.Args[1]
  7076			y := v.Args[2]
  7077			if !(boundsABI(kind) == 1) {
  7078				break
  7079			}
  7080			v.reset(OpMIPSLoweredPanicExtendB)
  7081			v.AuxInt = kind
  7082			v.AddArg(hi)
  7083			v.AddArg(lo)
  7084			v.AddArg(y)
  7085			v.AddArg(mem)
  7086			return true
  7087		}
  7088		// match: (PanicExtend [kind] hi lo y mem)
  7089		// cond: boundsABI(kind) == 2
  7090		// result: (LoweredPanicExtendC [kind] hi lo y mem)
  7091		for {
  7092			kind := v.AuxInt
  7093			mem := v.Args[3]
  7094			hi := v.Args[0]
  7095			lo := v.Args[1]
  7096			y := v.Args[2]
  7097			if !(boundsABI(kind) == 2) {
  7098				break
  7099			}
  7100			v.reset(OpMIPSLoweredPanicExtendC)
  7101			v.AuxInt = kind
  7102			v.AddArg(hi)
  7103			v.AddArg(lo)
  7104			v.AddArg(y)
  7105			v.AddArg(mem)
  7106			return true
  7107		}
  7108		return false
  7109	}
  7110	func rewriteValueMIPS_OpRotateLeft16_0(v *Value) bool {
  7111		b := v.Block
  7112		typ := &b.Func.Config.Types
  7113		// match: (RotateLeft16 <t> x (MOVWconst [c]))
  7114		// cond:
  7115		// result: (Or16 (Lsh16x32 <t> x (MOVWconst [c&15])) (Rsh16Ux32 <t> x (MOVWconst [-c&15])))
  7116		for {
  7117			t := v.Type
  7118			_ = v.Args[1]
  7119			x := v.Args[0]
  7120			v_1 := v.Args[1]
  7121			if v_1.Op != OpMIPSMOVWconst {
  7122				break
  7123			}
  7124			c := v_1.AuxInt
  7125			v.reset(OpOr16)
  7126			v0 := b.NewValue0(v.Pos, OpLsh16x32, t)
  7127			v0.AddArg(x)
  7128			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7129			v1.AuxInt = c & 15
  7130			v0.AddArg(v1)
  7131			v.AddArg(v0)
  7132			v2 := b.NewValue0(v.Pos, OpRsh16Ux32, t)
  7133			v2.AddArg(x)
  7134			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7135			v3.AuxInt = -c & 15
  7136			v2.AddArg(v3)
  7137			v.AddArg(v2)
  7138			return true
  7139		}
  7140		return false
  7141	}
  7142	func rewriteValueMIPS_OpRotateLeft32_0(v *Value) bool {
  7143		b := v.Block
  7144		typ := &b.Func.Config.Types
  7145		// match: (RotateLeft32 <t> x (MOVWconst [c]))
  7146		// cond:
  7147		// result: (Or32 (Lsh32x32 <t> x (MOVWconst [c&31])) (Rsh32Ux32 <t> x (MOVWconst [-c&31])))
  7148		for {
  7149			t := v.Type
  7150			_ = v.Args[1]
  7151			x := v.Args[0]
  7152			v_1 := v.Args[1]
  7153			if v_1.Op != OpMIPSMOVWconst {
  7154				break
  7155			}
  7156			c := v_1.AuxInt
  7157			v.reset(OpOr32)
  7158			v0 := b.NewValue0(v.Pos, OpLsh32x32, t)
  7159			v0.AddArg(x)
  7160			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7161			v1.AuxInt = c & 31
  7162			v0.AddArg(v1)
  7163			v.AddArg(v0)
  7164			v2 := b.NewValue0(v.Pos, OpRsh32Ux32, t)
  7165			v2.AddArg(x)
  7166			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7167			v3.AuxInt = -c & 31
  7168			v2.AddArg(v3)
  7169			v.AddArg(v2)
  7170			return true
  7171		}
  7172		return false
  7173	}
  7174	func rewriteValueMIPS_OpRotateLeft64_0(v *Value) bool {
  7175		b := v.Block
  7176		typ := &b.Func.Config.Types
  7177		// match: (RotateLeft64 <t> x (MOVWconst [c]))
  7178		// cond:
  7179		// result: (Or64 (Lsh64x32 <t> x (MOVWconst [c&63])) (Rsh64Ux32 <t> x (MOVWconst [-c&63])))
  7180		for {
  7181			t := v.Type
  7182			_ = v.Args[1]
  7183			x := v.Args[0]
  7184			v_1 := v.Args[1]
  7185			if v_1.Op != OpMIPSMOVWconst {
  7186				break
  7187			}
  7188			c := v_1.AuxInt
  7189			v.reset(OpOr64)
  7190			v0 := b.NewValue0(v.Pos, OpLsh64x32, t)
  7191			v0.AddArg(x)
  7192			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7193			v1.AuxInt = c & 63
  7194			v0.AddArg(v1)
  7195			v.AddArg(v0)
  7196			v2 := b.NewValue0(v.Pos, OpRsh64Ux32, t)
  7197			v2.AddArg(x)
  7198			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7199			v3.AuxInt = -c & 63
  7200			v2.AddArg(v3)
  7201			v.AddArg(v2)
  7202			return true
  7203		}
  7204		return false
  7205	}
  7206	func rewriteValueMIPS_OpRotateLeft8_0(v *Value) bool {
  7207		b := v.Block
  7208		typ := &b.Func.Config.Types
  7209		// match: (RotateLeft8 <t> x (MOVWconst [c]))
  7210		// cond:
  7211		// result: (Or8 (Lsh8x32 <t> x (MOVWconst [c&7])) (Rsh8Ux32 <t> x (MOVWconst [-c&7])))
  7212		for {
  7213			t := v.Type
  7214			_ = v.Args[1]
  7215			x := v.Args[0]
  7216			v_1 := v.Args[1]
  7217			if v_1.Op != OpMIPSMOVWconst {
  7218				break
  7219			}
  7220			c := v_1.AuxInt
  7221			v.reset(OpOr8)
  7222			v0 := b.NewValue0(v.Pos, OpLsh8x32, t)
  7223			v0.AddArg(x)
  7224			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7225			v1.AuxInt = c & 7
  7226			v0.AddArg(v1)
  7227			v.AddArg(v0)
  7228			v2 := b.NewValue0(v.Pos, OpRsh8Ux32, t)
  7229			v2.AddArg(x)
  7230			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7231			v3.AuxInt = -c & 7
  7232			v2.AddArg(v3)
  7233			v.AddArg(v2)
  7234			return true
  7235		}
  7236		return false
  7237	}
  7238	func rewriteValueMIPS_OpRound32F_0(v *Value) bool {
  7239		// match: (Round32F x)
  7240		// cond:
  7241		// result: x
  7242		for {
  7243			x := v.Args[0]
  7244			v.reset(OpCopy)
  7245			v.Type = x.Type
  7246			v.AddArg(x)
  7247			return true
  7248		}
  7249	}
  7250	func rewriteValueMIPS_OpRound64F_0(v *Value) bool {
  7251		// match: (Round64F x)
  7252		// cond:
  7253		// result: x
  7254		for {
  7255			x := v.Args[0]
  7256			v.reset(OpCopy)
  7257			v.Type = x.Type
  7258			v.AddArg(x)
  7259			return true
  7260		}
  7261	}
  7262	func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool {
  7263		b := v.Block
  7264		typ := &b.Func.Config.Types
  7265		// match: (Rsh16Ux16 <t> x y)
  7266		// cond:
  7267		// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7268		for {
  7269			t := v.Type
  7270			y := v.Args[1]
  7271			x := v.Args[0]
  7272			v.reset(OpMIPSCMOVZ)
  7273			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7274			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7275			v1.AddArg(x)
  7276			v0.AddArg(v1)
  7277			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7278			v2.AddArg(y)
  7279			v0.AddArg(v2)
  7280			v.AddArg(v0)
  7281			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7282			v3.AuxInt = 0
  7283			v.AddArg(v3)
  7284			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7285			v4.AuxInt = 32
  7286			v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7287			v5.AddArg(y)
  7288			v4.AddArg(v5)
  7289			v.AddArg(v4)
  7290			return true
  7291		}
  7292	}
  7293	func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool {
  7294		b := v.Block
  7295		typ := &b.Func.Config.Types
  7296		// match: (Rsh16Ux32 <t> x y)
  7297		// cond:
  7298		// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7299		for {
  7300			t := v.Type
  7301			y := v.Args[1]
  7302			x := v.Args[0]
  7303			v.reset(OpMIPSCMOVZ)
  7304			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7305			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7306			v1.AddArg(x)
  7307			v0.AddArg(v1)
  7308			v0.AddArg(y)
  7309			v.AddArg(v0)
  7310			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7311			v2.AuxInt = 0
  7312			v.AddArg(v2)
  7313			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7314			v3.AuxInt = 32
  7315			v3.AddArg(y)
  7316			v.AddArg(v3)
  7317			return true
  7318		}
  7319	}
  7320	func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool {
  7321		b := v.Block
  7322		typ := &b.Func.Config.Types
  7323		// match: (Rsh16Ux64 x (Const64 [c]))
  7324		// cond: uint32(c) < 16
  7325		// result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7326		for {
  7327			_ = v.Args[1]
  7328			x := v.Args[0]
  7329			v_1 := v.Args[1]
  7330			if v_1.Op != OpConst64 {
  7331				break
  7332			}
  7333			c := v_1.AuxInt
  7334			if !(uint32(c) < 16) {
  7335				break
  7336			}
  7337			v.reset(OpMIPSSRLconst)
  7338			v.AuxInt = c + 16
  7339			v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7340			v0.AuxInt = 16
  7341			v0.AddArg(x)
  7342			v.AddArg(v0)
  7343			return true
  7344		}
  7345		// match: (Rsh16Ux64 _ (Const64 [c]))
  7346		// cond: uint32(c) >= 16
  7347		// result: (MOVWconst [0])
  7348		for {
  7349			_ = v.Args[1]
  7350			v_1 := v.Args[1]
  7351			if v_1.Op != OpConst64 {
  7352				break
  7353			}
  7354			c := v_1.AuxInt
  7355			if !(uint32(c) >= 16) {
  7356				break
  7357			}
  7358			v.reset(OpMIPSMOVWconst)
  7359			v.AuxInt = 0
  7360			return true
  7361		}
  7362		return false
  7363	}
  7364	func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool {
  7365		b := v.Block
  7366		typ := &b.Func.Config.Types
  7367		// match: (Rsh16Ux8 <t> x y)
  7368		// cond:
  7369		// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7370		for {
  7371			t := v.Type
  7372			y := v.Args[1]
  7373			x := v.Args[0]
  7374			v.reset(OpMIPSCMOVZ)
  7375			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7376			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7377			v1.AddArg(x)
  7378			v0.AddArg(v1)
  7379			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7380			v2.AddArg(y)
  7381			v0.AddArg(v2)
  7382			v.AddArg(v0)
  7383			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7384			v3.AuxInt = 0
  7385			v.AddArg(v3)
  7386			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7387			v4.AuxInt = 32
  7388			v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7389			v5.AddArg(y)
  7390			v4.AddArg(v5)
  7391			v.AddArg(v4)
  7392			return true
  7393		}
  7394	}
  7395	func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool {
  7396		b := v.Block
  7397		typ := &b.Func.Config.Types
  7398		// match: (Rsh16x16 x y)
  7399		// cond:
  7400		// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7401		for {
  7402			y := v.Args[1]
  7403			x := v.Args[0]
  7404			v.reset(OpMIPSSRA)
  7405			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7406			v0.AddArg(x)
  7407			v.AddArg(v0)
  7408			v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7409			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7410			v2.AddArg(y)
  7411			v1.AddArg(v2)
  7412			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7413			v3.AuxInt = -1
  7414			v1.AddArg(v3)
  7415			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7416			v4.AuxInt = 32
  7417			v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7418			v5.AddArg(y)
  7419			v4.AddArg(v5)
  7420			v1.AddArg(v4)
  7421			v.AddArg(v1)
  7422			return true
  7423		}
  7424	}
  7425	func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool {
  7426		b := v.Block
  7427		typ := &b.Func.Config.Types
  7428		// match: (Rsh16x32 x y)
  7429		// cond:
  7430		// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7431		for {
  7432			y := v.Args[1]
  7433			x := v.Args[0]
  7434			v.reset(OpMIPSSRA)
  7435			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7436			v0.AddArg(x)
  7437			v.AddArg(v0)
  7438			v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7439			v1.AddArg(y)
  7440			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7441			v2.AuxInt = -1
  7442			v1.AddArg(v2)
  7443			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7444			v3.AuxInt = 32
  7445			v3.AddArg(y)
  7446			v1.AddArg(v3)
  7447			v.AddArg(v1)
  7448			return true
  7449		}
  7450	}
  7451	func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool {
  7452		b := v.Block
  7453		typ := &b.Func.Config.Types
  7454		// match: (Rsh16x64 x (Const64 [c]))
  7455		// cond: uint32(c) < 16
  7456		// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
  7457		for {
  7458			_ = v.Args[1]
  7459			x := v.Args[0]
  7460			v_1 := v.Args[1]
  7461			if v_1.Op != OpConst64 {
  7462				break
  7463			}
  7464			c := v_1.AuxInt
  7465			if !(uint32(c) < 16) {
  7466				break
  7467			}
  7468			v.reset(OpMIPSSRAconst)
  7469			v.AuxInt = c + 16
  7470			v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7471			v0.AuxInt = 16
  7472			v0.AddArg(x)
  7473			v.AddArg(v0)
  7474			return true
  7475		}
  7476		// match: (Rsh16x64 x (Const64 [c]))
  7477		// cond: uint32(c) >= 16
  7478		// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [31])
  7479		for {
  7480			_ = v.Args[1]
  7481			x := v.Args[0]
  7482			v_1 := v.Args[1]
  7483			if v_1.Op != OpConst64 {
  7484				break
  7485			}
  7486			c := v_1.AuxInt
  7487			if !(uint32(c) >= 16) {
  7488				break
  7489			}
  7490			v.reset(OpMIPSSRAconst)
  7491			v.AuxInt = 31
  7492			v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7493			v0.AuxInt = 16
  7494			v0.AddArg(x)
  7495			v.AddArg(v0)
  7496			return true
  7497		}
  7498		return false
  7499	}
  7500	func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool {
  7501		b := v.Block
  7502		typ := &b.Func.Config.Types
  7503		// match: (Rsh16x8 x y)
  7504		// cond:
  7505		// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7506		for {
  7507			y := v.Args[1]
  7508			x := v.Args[0]
  7509			v.reset(OpMIPSSRA)
  7510			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7511			v0.AddArg(x)
  7512			v.AddArg(v0)
  7513			v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7514			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7515			v2.AddArg(y)
  7516			v1.AddArg(v2)
  7517			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7518			v3.AuxInt = -1
  7519			v1.AddArg(v3)
  7520			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7521			v4.AuxInt = 32
  7522			v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7523			v5.AddArg(y)
  7524			v4.AddArg(v5)
  7525			v1.AddArg(v4)
  7526			v.AddArg(v1)
  7527			return true
  7528		}
  7529	}
  7530	func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool {
  7531		b := v.Block
  7532		typ := &b.Func.Config.Types
  7533		// match: (Rsh32Ux16 <t> x y)
  7534		// cond:
  7535		// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7536		for {
  7537			t := v.Type
  7538			y := v.Args[1]
  7539			x := v.Args[0]
  7540			v.reset(OpMIPSCMOVZ)
  7541			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7542			v0.AddArg(x)
  7543			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7544			v1.AddArg(y)
  7545			v0.AddArg(v1)
  7546			v.AddArg(v0)
  7547			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7548			v2.AuxInt = 0
  7549			v.AddArg(v2)
  7550			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7551			v3.AuxInt = 32
  7552			v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7553			v4.AddArg(y)
  7554			v3.AddArg(v4)
  7555			v.AddArg(v3)
  7556			return true
  7557		}
  7558	}
  7559	func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool {
  7560		b := v.Block
  7561		typ := &b.Func.Config.Types
  7562		// match: (Rsh32Ux32 <t> x y)
  7563		// cond:
  7564		// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
  7565		for {
  7566			t := v.Type
  7567			y := v.Args[1]
  7568			x := v.Args[0]
  7569			v.reset(OpMIPSCMOVZ)
  7570			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7571			v0.AddArg(x)
  7572			v0.AddArg(y)
  7573			v.AddArg(v0)
  7574			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7575			v1.AuxInt = 0
  7576			v.AddArg(v1)
  7577			v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7578			v2.AuxInt = 32
  7579			v2.AddArg(y)
  7580			v.AddArg(v2)
  7581			return true
  7582		}
  7583	}
  7584	func rewriteValueMIPS_OpRsh32Ux64_0(v *Value) bool {
  7585		// match: (Rsh32Ux64 x (Const64 [c]))
  7586		// cond: uint32(c) < 32
  7587		// result: (SRLconst x [c])
  7588		for {
  7589			_ = v.Args[1]
  7590			x := v.Args[0]
  7591			v_1 := v.Args[1]
  7592			if v_1.Op != OpConst64 {
  7593				break
  7594			}
  7595			c := v_1.AuxInt
  7596			if !(uint32(c) < 32) {
  7597				break
  7598			}
  7599			v.reset(OpMIPSSRLconst)
  7600			v.AuxInt = c
  7601			v.AddArg(x)
  7602			return true
  7603		}
  7604		// match: (Rsh32Ux64 _ (Const64 [c]))
  7605		// cond: uint32(c) >= 32
  7606		// result: (MOVWconst [0])
  7607		for {
  7608			_ = v.Args[1]
  7609			v_1 := v.Args[1]
  7610			if v_1.Op != OpConst64 {
  7611				break
  7612			}
  7613			c := v_1.AuxInt
  7614			if !(uint32(c) >= 32) {
  7615				break
  7616			}
  7617			v.reset(OpMIPSMOVWconst)
  7618			v.AuxInt = 0
  7619			return true
  7620		}
  7621		return false
  7622	}
  7623	func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool {
  7624		b := v.Block
  7625		typ := &b.Func.Config.Types
  7626		// match: (Rsh32Ux8 <t> x y)
  7627		// cond:
  7628		// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7629		for {
  7630			t := v.Type
  7631			y := v.Args[1]
  7632			x := v.Args[0]
  7633			v.reset(OpMIPSCMOVZ)
  7634			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7635			v0.AddArg(x)
  7636			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7637			v1.AddArg(y)
  7638			v0.AddArg(v1)
  7639			v.AddArg(v0)
  7640			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7641			v2.AuxInt = 0
  7642			v.AddArg(v2)
  7643			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7644			v3.AuxInt = 32
  7645			v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7646			v4.AddArg(y)
  7647			v3.AddArg(v4)
  7648			v.AddArg(v3)
  7649			return true
  7650		}
  7651	}
  7652	func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool {
  7653		b := v.Block
  7654		typ := &b.Func.Config.Types
  7655		// match: (Rsh32x16 x y)
  7656		// cond:
  7657		// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7658		for {
  7659			y := v.Args[1]
  7660			x := v.Args[0]
  7661			v.reset(OpMIPSSRA)
  7662			v.AddArg(x)
  7663			v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7664			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7665			v1.AddArg(y)
  7666			v0.AddArg(v1)
  7667			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7668			v2.AuxInt = -1
  7669			v0.AddArg(v2)
  7670			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7671			v3.AuxInt = 32
  7672			v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7673			v4.AddArg(y)
  7674			v3.AddArg(v4)
  7675			v0.AddArg(v3)
  7676			v.AddArg(v0)
  7677			return true
  7678		}
  7679	}
  7680	func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool {
  7681		b := v.Block
  7682		typ := &b.Func.Config.Types
  7683		// match: (Rsh32x32 x y)
  7684		// cond:
  7685		// result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7686		for {
  7687			y := v.Args[1]
  7688			x := v.Args[0]
  7689			v.reset(OpMIPSSRA)
  7690			v.AddArg(x)
  7691			v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7692			v0.AddArg(y)
  7693			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7694			v1.AuxInt = -1
  7695			v0.AddArg(v1)
  7696			v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7697			v2.AuxInt = 32
  7698			v2.AddArg(y)
  7699			v0.AddArg(v2)
  7700			v.AddArg(v0)
  7701			return true
  7702		}
  7703	}
  7704	func rewriteValueMIPS_OpRsh32x64_0(v *Value) bool {
  7705		// match: (Rsh32x64 x (Const64 [c]))
  7706		// cond: uint32(c) < 32
  7707		// result: (SRAconst x [c])
  7708		for {
  7709			_ = v.Args[1]
  7710			x := v.Args[0]
  7711			v_1 := v.Args[1]
  7712			if v_1.Op != OpConst64 {
  7713				break
  7714			}
  7715			c := v_1.AuxInt
  7716			if !(uint32(c) < 32) {
  7717				break
  7718			}
  7719			v.reset(OpMIPSSRAconst)
  7720			v.AuxInt = c
  7721			v.AddArg(x)
  7722			return true
  7723		}
  7724		// match: (Rsh32x64 x (Const64 [c]))
  7725		// cond: uint32(c) >= 32
  7726		// result: (SRAconst x [31])
  7727		for {
  7728			_ = v.Args[1]
  7729			x := v.Args[0]
  7730			v_1 := v.Args[1]
  7731			if v_1.Op != OpConst64 {
  7732				break
  7733			}
  7734			c := v_1.AuxInt
  7735			if !(uint32(c) >= 32) {
  7736				break
  7737			}
  7738			v.reset(OpMIPSSRAconst)
  7739			v.AuxInt = 31
  7740			v.AddArg(x)
  7741			return true
  7742		}
  7743		return false
  7744	}
  7745	func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool {
  7746		b := v.Block
  7747		typ := &b.Func.Config.Types
  7748		// match: (Rsh32x8 x y)
  7749		// cond:
  7750		// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  7751		for {
  7752			y := v.Args[1]
  7753			x := v.Args[0]
  7754			v.reset(OpMIPSSRA)
  7755			v.AddArg(x)
  7756			v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7757			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7758			v1.AddArg(y)
  7759			v0.AddArg(v1)
  7760			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7761			v2.AuxInt = -1
  7762			v0.AddArg(v2)
  7763			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7764			v3.AuxInt = 32
  7765			v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7766			v4.AddArg(y)
  7767			v3.AddArg(v4)
  7768			v0.AddArg(v3)
  7769			v.AddArg(v0)
  7770			return true
  7771		}
  7772	}
  7773	func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool {
  7774		b := v.Block
  7775		typ := &b.Func.Config.Types
  7776		// match: (Rsh8Ux16 <t> x y)
  7777		// cond:
  7778		// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
  7779		for {
  7780			t := v.Type
  7781			y := v.Args[1]
  7782			x := v.Args[0]
  7783			v.reset(OpMIPSCMOVZ)
  7784			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7785			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7786			v1.AddArg(x)
  7787			v0.AddArg(v1)
  7788			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7789			v2.AddArg(y)
  7790			v0.AddArg(v2)
  7791			v.AddArg(v0)
  7792			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7793			v3.AuxInt = 0
  7794			v.AddArg(v3)
  7795			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7796			v4.AuxInt = 32
  7797			v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7798			v5.AddArg(y)
  7799			v4.AddArg(v5)
  7800			v.AddArg(v4)
  7801			return true
  7802		}
  7803	}
  7804	func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool {
  7805		b := v.Block
  7806		typ := &b.Func.Config.Types
  7807		// match: (Rsh8Ux32 <t> x y)
  7808		// cond:
  7809		// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
  7810		for {
  7811			t := v.Type
  7812			y := v.Args[1]
  7813			x := v.Args[0]
  7814			v.reset(OpMIPSCMOVZ)
  7815			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7816			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7817			v1.AddArg(x)
  7818			v0.AddArg(v1)
  7819			v0.AddArg(y)
  7820			v.AddArg(v0)
  7821			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7822			v2.AuxInt = 0
  7823			v.AddArg(v2)
  7824			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7825			v3.AuxInt = 32
  7826			v3.AddArg(y)
  7827			v.AddArg(v3)
  7828			return true
  7829		}
  7830	}
  7831	func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool {
  7832		b := v.Block
  7833		typ := &b.Func.Config.Types
  7834		// match: (Rsh8Ux64 x (Const64 [c]))
  7835		// cond: uint32(c) < 8
  7836		// result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
  7837		for {
  7838			_ = v.Args[1]
  7839			x := v.Args[0]
  7840			v_1 := v.Args[1]
  7841			if v_1.Op != OpConst64 {
  7842				break
  7843			}
  7844			c := v_1.AuxInt
  7845			if !(uint32(c) < 8) {
  7846				break
  7847			}
  7848			v.reset(OpMIPSSRLconst)
  7849			v.AuxInt = c + 24
  7850			v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7851			v0.AuxInt = 24
  7852			v0.AddArg(x)
  7853			v.AddArg(v0)
  7854			return true
  7855		}
  7856		// match: (Rsh8Ux64 _ (Const64 [c]))
  7857		// cond: uint32(c) >= 8
  7858		// result: (MOVWconst [0])
  7859		for {
  7860			_ = v.Args[1]
  7861			v_1 := v.Args[1]
  7862			if v_1.Op != OpConst64 {
  7863				break
  7864			}
  7865			c := v_1.AuxInt
  7866			if !(uint32(c) >= 8) {
  7867				break
  7868			}
  7869			v.reset(OpMIPSMOVWconst)
  7870			v.AuxInt = 0
  7871			return true
  7872		}
  7873		return false
  7874	}
  7875	func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool {
  7876		b := v.Block
  7877		typ := &b.Func.Config.Types
  7878		// match: (Rsh8Ux8 <t> x y)
  7879		// cond:
  7880		// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
  7881		for {
  7882			t := v.Type
  7883			y := v.Args[1]
  7884			x := v.Args[0]
  7885			v.reset(OpMIPSCMOVZ)
  7886			v0 := b.NewValue0(v.Pos, OpMIPSSRL, t)
  7887			v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7888			v1.AddArg(x)
  7889			v0.AddArg(v1)
  7890			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7891			v2.AddArg(y)
  7892			v0.AddArg(v2)
  7893			v.AddArg(v0)
  7894			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7895			v3.AuxInt = 0
  7896			v.AddArg(v3)
  7897			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7898			v4.AuxInt = 32
  7899			v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  7900			v5.AddArg(y)
  7901			v4.AddArg(v5)
  7902			v.AddArg(v4)
  7903			return true
  7904		}
  7905	}
  7906	func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool {
  7907		b := v.Block
  7908		typ := &b.Func.Config.Types
  7909		// match: (Rsh8x16 x y)
  7910		// cond:
  7911		// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
  7912		for {
  7913			y := v.Args[1]
  7914			x := v.Args[0]
  7915			v.reset(OpMIPSSRA)
  7916			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7917			v0.AddArg(x)
  7918			v.AddArg(v0)
  7919			v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7920			v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7921			v2.AddArg(y)
  7922			v1.AddArg(v2)
  7923			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7924			v3.AuxInt = -1
  7925			v1.AddArg(v3)
  7926			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7927			v4.AuxInt = 32
  7928			v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7929			v5.AddArg(y)
  7930			v4.AddArg(v5)
  7931			v1.AddArg(v4)
  7932			v.AddArg(v1)
  7933			return true
  7934		}
  7935	}
  7936	func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool {
  7937		b := v.Block
  7938		typ := &b.Func.Config.Types
  7939		// match: (Rsh8x32 x y)
  7940		// cond:
  7941		// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
  7942		for {
  7943			y := v.Args[1]
  7944			x := v.Args[0]
  7945			v.reset(OpMIPSSRA)
  7946			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7947			v0.AddArg(x)
  7948			v.AddArg(v0)
  7949			v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  7950			v1.AddArg(y)
  7951			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  7952			v2.AuxInt = -1
  7953			v1.AddArg(v2)
  7954			v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  7955			v3.AuxInt = 32
  7956			v3.AddArg(y)
  7957			v1.AddArg(v3)
  7958			v.AddArg(v1)
  7959			return true
  7960		}
  7961	}
  7962	func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool {
  7963		b := v.Block
  7964		typ := &b.Func.Config.Types
  7965		// match: (Rsh8x64 x (Const64 [c]))
  7966		// cond: uint32(c) < 8
  7967		// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
  7968		for {
  7969			_ = v.Args[1]
  7970			x := v.Args[0]
  7971			v_1 := v.Args[1]
  7972			if v_1.Op != OpConst64 {
  7973				break
  7974			}
  7975			c := v_1.AuxInt
  7976			if !(uint32(c) < 8) {
  7977				break
  7978			}
  7979			v.reset(OpMIPSSRAconst)
  7980			v.AuxInt = c + 24
  7981			v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  7982			v0.AuxInt = 24
  7983			v0.AddArg(x)
  7984			v.AddArg(v0)
  7985			return true
  7986		}
  7987		// match: (Rsh8x64 x (Const64 [c]))
  7988		// cond: uint32(c) >= 8
  7989		// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [31])
  7990		for {
  7991			_ = v.Args[1]
  7992			x := v.Args[0]
  7993			v_1 := v.Args[1]
  7994			if v_1.Op != OpConst64 {
  7995				break
  7996			}
  7997			c := v_1.AuxInt
  7998			if !(uint32(c) >= 8) {
  7999				break
  8000			}
  8001			v.reset(OpMIPSSRAconst)
  8002			v.AuxInt = 31
  8003			v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32)
  8004			v0.AuxInt = 24
  8005			v0.AddArg(x)
  8006			v.AddArg(v0)
  8007			return true
  8008		}
  8009		return false
  8010	}
  8011	func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool {
  8012		b := v.Block
  8013		typ := &b.Func.Config.Types
  8014		// match: (Rsh8x8 x y)
  8015		// cond:
  8016		// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
  8017		for {
  8018			y := v.Args[1]
  8019			x := v.Args[0]
  8020			v.reset(OpMIPSSRA)
  8021			v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  8022			v0.AddArg(x)
  8023			v.AddArg(v0)
  8024			v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32)
  8025			v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8026			v2.AddArg(y)
  8027			v1.AddArg(v2)
  8028			v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8029			v3.AuxInt = -1
  8030			v1.AddArg(v3)
  8031			v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool)
  8032			v4.AuxInt = 32
  8033			v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8034			v5.AddArg(y)
  8035			v4.AddArg(v5)
  8036			v1.AddArg(v4)
  8037			v.AddArg(v1)
  8038			return true
  8039		}
  8040	}
  8041	func rewriteValueMIPS_OpSelect0_0(v *Value) bool {
  8042		b := v.Block
  8043		typ := &b.Func.Config.Types
  8044		// match: (Select0 (Add32carry <t> x y))
  8045		// cond:
  8046		// result: (ADD <t.FieldType(0)> x y)
  8047		for {
  8048			v_0 := v.Args[0]
  8049			if v_0.Op != OpAdd32carry {
  8050				break
  8051			}
  8052			t := v_0.Type
  8053			y := v_0.Args[1]
  8054			x := v_0.Args[0]
  8055			v.reset(OpMIPSADD)
  8056			v.Type = t.FieldType(0)
  8057			v.AddArg(x)
  8058			v.AddArg(y)
  8059			return true
  8060		}
  8061		// match: (Select0 (Sub32carry <t> x y))
  8062		// cond:
  8063		// result: (SUB <t.FieldType(0)> x y)
  8064		for {
  8065			v_0 := v.Args[0]
  8066			if v_0.Op != OpSub32carry {
  8067				break
  8068			}
  8069			t := v_0.Type
  8070			y := v_0.Args[1]
  8071			x := v_0.Args[0]
  8072			v.reset(OpMIPSSUB)
  8073			v.Type = t.FieldType(0)
  8074			v.AddArg(x)
  8075			v.AddArg(y)
  8076			return true
  8077		}
  8078		// match: (Select0 (MULTU (MOVWconst [0]) _))
  8079		// cond:
  8080		// result: (MOVWconst [0])
  8081		for {
  8082			v_0 := v.Args[0]
  8083			if v_0.Op != OpMIPSMULTU {
  8084				break
  8085			}
  8086			_ = v_0.Args[1]
  8087			v_0_0 := v_0.Args[0]
  8088			if v_0_0.Op != OpMIPSMOVWconst {
  8089				break
  8090			}
  8091			if v_0_0.AuxInt != 0 {
  8092				break
  8093			}
  8094			v.reset(OpMIPSMOVWconst)
  8095			v.AuxInt = 0
  8096			return true
  8097		}
  8098		// match: (Select0 (MULTU _ (MOVWconst [0])))
  8099		// cond:
  8100		// result: (MOVWconst [0])
  8101		for {
  8102			v_0 := v.Args[0]
  8103			if v_0.Op != OpMIPSMULTU {
  8104				break
  8105			}
  8106			_ = v_0.Args[1]
  8107			v_0_1 := v_0.Args[1]
  8108			if v_0_1.Op != OpMIPSMOVWconst {
  8109				break
  8110			}
  8111			if v_0_1.AuxInt != 0 {
  8112				break
  8113			}
  8114			v.reset(OpMIPSMOVWconst)
  8115			v.AuxInt = 0
  8116			return true
  8117		}
  8118		// match: (Select0 (MULTU (MOVWconst [1]) _))
  8119		// cond:
  8120		// result: (MOVWconst [0])
  8121		for {
  8122			v_0 := v.Args[0]
  8123			if v_0.Op != OpMIPSMULTU {
  8124				break
  8125			}
  8126			_ = v_0.Args[1]
  8127			v_0_0 := v_0.Args[0]
  8128			if v_0_0.Op != OpMIPSMOVWconst {
  8129				break
  8130			}
  8131			if v_0_0.AuxInt != 1 {
  8132				break
  8133			}
  8134			v.reset(OpMIPSMOVWconst)
  8135			v.AuxInt = 0
  8136			return true
  8137		}
  8138		// match: (Select0 (MULTU _ (MOVWconst [1])))
  8139		// cond:
  8140		// result: (MOVWconst [0])
  8141		for {
  8142			v_0 := v.Args[0]
  8143			if v_0.Op != OpMIPSMULTU {
  8144				break
  8145			}
  8146			_ = v_0.Args[1]
  8147			v_0_1 := v_0.Args[1]
  8148			if v_0_1.Op != OpMIPSMOVWconst {
  8149				break
  8150			}
  8151			if v_0_1.AuxInt != 1 {
  8152				break
  8153			}
  8154			v.reset(OpMIPSMOVWconst)
  8155			v.AuxInt = 0
  8156			return true
  8157		}
  8158		// match: (Select0 (MULTU (MOVWconst [-1]) x))
  8159		// cond:
  8160		// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8161		for {
  8162			v_0 := v.Args[0]
  8163			if v_0.Op != OpMIPSMULTU {
  8164				break
  8165			}
  8166			x := v_0.Args[1]
  8167			v_0_0 := v_0.Args[0]
  8168			if v_0_0.Op != OpMIPSMOVWconst {
  8169				break
  8170			}
  8171			if v_0_0.AuxInt != -1 {
  8172				break
  8173			}
  8174			v.reset(OpMIPSCMOVZ)
  8175			v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8176			v0.AuxInt = -1
  8177			v0.AddArg(x)
  8178			v.AddArg(v0)
  8179			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8180			v1.AuxInt = 0
  8181			v.AddArg(v1)
  8182			v.AddArg(x)
  8183			return true
  8184		}
  8185		// match: (Select0 (MULTU x (MOVWconst [-1])))
  8186		// cond:
  8187		// result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x)
  8188		for {
  8189			v_0 := v.Args[0]
  8190			if v_0.Op != OpMIPSMULTU {
  8191				break
  8192			}
  8193			_ = v_0.Args[1]
  8194			x := v_0.Args[0]
  8195			v_0_1 := v_0.Args[1]
  8196			if v_0_1.Op != OpMIPSMOVWconst {
  8197				break
  8198			}
  8199			if v_0_1.AuxInt != -1 {
  8200				break
  8201			}
  8202			v.reset(OpMIPSCMOVZ)
  8203			v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type)
  8204			v0.AuxInt = -1
  8205			v0.AddArg(x)
  8206			v.AddArg(v0)
  8207			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  8208			v1.AuxInt = 0
  8209			v.AddArg(v1)
  8210			v.AddArg(x)
  8211			return true
  8212		}
  8213		// match: (Select0 (MULTU (MOVWconst [c]) x))
  8214		// cond: isPowerOfTwo(int64(uint32(c)))
  8215		// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8216		for {
  8217			v_0 := v.Args[0]
  8218			if v_0.Op != OpMIPSMULTU {
  8219				break
  8220			}
  8221			x := v_0.Args[1]
  8222			v_0_0 := v_0.Args[0]
  8223			if v_0_0.Op != OpMIPSMOVWconst {
  8224				break
  8225			}
  8226			c := v_0_0.AuxInt
  8227			if !(isPowerOfTwo(int64(uint32(c)))) {
  8228				break
  8229			}
  8230			v.reset(OpMIPSSRLconst)
  8231			v.AuxInt = 32 - log2(int64(uint32(c)))
  8232			v.AddArg(x)
  8233			return true
  8234		}
  8235		// match: (Select0 (MULTU x (MOVWconst [c])))
  8236		// cond: isPowerOfTwo(int64(uint32(c)))
  8237		// result: (SRLconst [32-log2(int64(uint32(c)))] x)
  8238		for {
  8239			v_0 := v.Args[0]
  8240			if v_0.Op != OpMIPSMULTU {
  8241				break
  8242			}
  8243			_ = v_0.Args[1]
  8244			x := v_0.Args[0]
  8245			v_0_1 := v_0.Args[1]
  8246			if v_0_1.Op != OpMIPSMOVWconst {
  8247				break
  8248			}
  8249			c := v_0_1.AuxInt
  8250			if !(isPowerOfTwo(int64(uint32(c)))) {
  8251				break
  8252			}
  8253			v.reset(OpMIPSSRLconst)
  8254			v.AuxInt = 32 - log2(int64(uint32(c)))
  8255			v.AddArg(x)
  8256			return true
  8257		}
  8258		return false
  8259	}
  8260	func rewriteValueMIPS_OpSelect0_10(v *Value) bool {
  8261		// match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8262		// cond:
  8263		// result: (MOVWconst [(c*d)>>32])
  8264		for {
  8265			v_0 := v.Args[0]
  8266			if v_0.Op != OpMIPSMULTU {
  8267				break
  8268			}
  8269			_ = v_0.Args[1]
  8270			v_0_0 := v_0.Args[0]
  8271			if v_0_0.Op != OpMIPSMOVWconst {
  8272				break
  8273			}
  8274			c := v_0_0.AuxInt
  8275			v_0_1 := v_0.Args[1]
  8276			if v_0_1.Op != OpMIPSMOVWconst {
  8277				break
  8278			}
  8279			d := v_0_1.AuxInt
  8280			v.reset(OpMIPSMOVWconst)
  8281			v.AuxInt = (c * d) >> 32
  8282			return true
  8283		}
  8284		// match: (Select0 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8285		// cond:
  8286		// result: (MOVWconst [(c*d)>>32])
  8287		for {
  8288			v_0 := v.Args[0]
  8289			if v_0.Op != OpMIPSMULTU {
  8290				break
  8291			}
  8292			_ = v_0.Args[1]
  8293			v_0_0 := v_0.Args[0]
  8294			if v_0_0.Op != OpMIPSMOVWconst {
  8295				break
  8296			}
  8297			d := v_0_0.AuxInt
  8298			v_0_1 := v_0.Args[1]
  8299			if v_0_1.Op != OpMIPSMOVWconst {
  8300				break
  8301			}
  8302			c := v_0_1.AuxInt
  8303			v.reset(OpMIPSMOVWconst)
  8304			v.AuxInt = (c * d) >> 32
  8305			return true
  8306		}
  8307		// match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8308		// cond:
  8309		// result: (MOVWconst [int64(int32(c)%int32(d))])
  8310		for {
  8311			v_0 := v.Args[0]
  8312			if v_0.Op != OpMIPSDIV {
  8313				break
  8314			}
  8315			_ = v_0.Args[1]
  8316			v_0_0 := v_0.Args[0]
  8317			if v_0_0.Op != OpMIPSMOVWconst {
  8318				break
  8319			}
  8320			c := v_0_0.AuxInt
  8321			v_0_1 := v_0.Args[1]
  8322			if v_0_1.Op != OpMIPSMOVWconst {
  8323				break
  8324			}
  8325			d := v_0_1.AuxInt
  8326			v.reset(OpMIPSMOVWconst)
  8327			v.AuxInt = int64(int32(c) % int32(d))
  8328			return true
  8329		}
  8330		// match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8331		// cond:
  8332		// result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
  8333		for {
  8334			v_0 := v.Args[0]
  8335			if v_0.Op != OpMIPSDIVU {
  8336				break
  8337			}
  8338			_ = v_0.Args[1]
  8339			v_0_0 := v_0.Args[0]
  8340			if v_0_0.Op != OpMIPSMOVWconst {
  8341				break
  8342			}
  8343			c := v_0_0.AuxInt
  8344			v_0_1 := v_0.Args[1]
  8345			if v_0_1.Op != OpMIPSMOVWconst {
  8346				break
  8347			}
  8348			d := v_0_1.AuxInt
  8349			v.reset(OpMIPSMOVWconst)
  8350			v.AuxInt = int64(int32(uint32(c) % uint32(d)))
  8351			return true
  8352		}
  8353		return false
  8354	}
  8355	func rewriteValueMIPS_OpSelect1_0(v *Value) bool {
  8356		b := v.Block
  8357		typ := &b.Func.Config.Types
  8358		// match: (Select1 (Add32carry <t> x y))
  8359		// cond:
  8360		// result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y))
  8361		for {
  8362			v_0 := v.Args[0]
  8363			if v_0.Op != OpAdd32carry {
  8364				break
  8365			}
  8366			t := v_0.Type
  8367			y := v_0.Args[1]
  8368			x := v_0.Args[0]
  8369			v.reset(OpMIPSSGTU)
  8370			v.Type = typ.Bool
  8371			v.AddArg(x)
  8372			v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0))
  8373			v0.AddArg(x)
  8374			v0.AddArg(y)
  8375			v.AddArg(v0)
  8376			return true
  8377		}
  8378		// match: (Select1 (Sub32carry <t> x y))
  8379		// cond:
  8380		// result: (SGTU <typ.Bool> (SUB <t.FieldType(0)> x y) x)
  8381		for {
  8382			v_0 := v.Args[0]
  8383			if v_0.Op != OpSub32carry {
  8384				break
  8385			}
  8386			t := v_0.Type
  8387			y := v_0.Args[1]
  8388			x := v_0.Args[0]
  8389			v.reset(OpMIPSSGTU)
  8390			v.Type = typ.Bool
  8391			v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0))
  8392			v0.AddArg(x)
  8393			v0.AddArg(y)
  8394			v.AddArg(v0)
  8395			v.AddArg(x)
  8396			return true
  8397		}
  8398		// match: (Select1 (MULTU (MOVWconst [0]) _))
  8399		// cond:
  8400		// result: (MOVWconst [0])
  8401		for {
  8402			v_0 := v.Args[0]
  8403			if v_0.Op != OpMIPSMULTU {
  8404				break
  8405			}
  8406			_ = v_0.Args[1]
  8407			v_0_0 := v_0.Args[0]
  8408			if v_0_0.Op != OpMIPSMOVWconst {
  8409				break
  8410			}
  8411			if v_0_0.AuxInt != 0 {
  8412				break
  8413			}
  8414			v.reset(OpMIPSMOVWconst)
  8415			v.AuxInt = 0
  8416			return true
  8417		}
  8418		// match: (Select1 (MULTU _ (MOVWconst [0])))
  8419		// cond:
  8420		// result: (MOVWconst [0])
  8421		for {
  8422			v_0 := v.Args[0]
  8423			if v_0.Op != OpMIPSMULTU {
  8424				break
  8425			}
  8426			_ = v_0.Args[1]
  8427			v_0_1 := v_0.Args[1]
  8428			if v_0_1.Op != OpMIPSMOVWconst {
  8429				break
  8430			}
  8431			if v_0_1.AuxInt != 0 {
  8432				break
  8433			}
  8434			v.reset(OpMIPSMOVWconst)
  8435			v.AuxInt = 0
  8436			return true
  8437		}
  8438		// match: (Select1 (MULTU (MOVWconst [1]) x))
  8439		// cond:
  8440		// result: x
  8441		for {
  8442			v_0 := v.Args[0]
  8443			if v_0.Op != OpMIPSMULTU {
  8444				break
  8445			}
  8446			x := v_0.Args[1]
  8447			v_0_0 := v_0.Args[0]
  8448			if v_0_0.Op != OpMIPSMOVWconst {
  8449				break
  8450			}
  8451			if v_0_0.AuxInt != 1 {
  8452				break
  8453			}
  8454			v.reset(OpCopy)
  8455			v.Type = x.Type
  8456			v.AddArg(x)
  8457			return true
  8458		}
  8459		// match: (Select1 (MULTU x (MOVWconst [1])))
  8460		// cond:
  8461		// result: x
  8462		for {
  8463			v_0 := v.Args[0]
  8464			if v_0.Op != OpMIPSMULTU {
  8465				break
  8466			}
  8467			_ = v_0.Args[1]
  8468			x := v_0.Args[0]
  8469			v_0_1 := v_0.Args[1]
  8470			if v_0_1.Op != OpMIPSMOVWconst {
  8471				break
  8472			}
  8473			if v_0_1.AuxInt != 1 {
  8474				break
  8475			}
  8476			v.reset(OpCopy)
  8477			v.Type = x.Type
  8478			v.AddArg(x)
  8479			return true
  8480		}
  8481		// match: (Select1 (MULTU (MOVWconst [-1]) x))
  8482		// cond:
  8483		// result: (NEG <x.Type> x)
  8484		for {
  8485			v_0 := v.Args[0]
  8486			if v_0.Op != OpMIPSMULTU {
  8487				break
  8488			}
  8489			x := v_0.Args[1]
  8490			v_0_0 := v_0.Args[0]
  8491			if v_0_0.Op != OpMIPSMOVWconst {
  8492				break
  8493			}
  8494			if v_0_0.AuxInt != -1 {
  8495				break
  8496			}
  8497			v.reset(OpMIPSNEG)
  8498			v.Type = x.Type
  8499			v.AddArg(x)
  8500			return true
  8501		}
  8502		// match: (Select1 (MULTU x (MOVWconst [-1])))
  8503		// cond:
  8504		// result: (NEG <x.Type> x)
  8505		for {
  8506			v_0 := v.Args[0]
  8507			if v_0.Op != OpMIPSMULTU {
  8508				break
  8509			}
  8510			_ = v_0.Args[1]
  8511			x := v_0.Args[0]
  8512			v_0_1 := v_0.Args[1]
  8513			if v_0_1.Op != OpMIPSMOVWconst {
  8514				break
  8515			}
  8516			if v_0_1.AuxInt != -1 {
  8517				break
  8518			}
  8519			v.reset(OpMIPSNEG)
  8520			v.Type = x.Type
  8521			v.AddArg(x)
  8522			return true
  8523		}
  8524		// match: (Select1 (MULTU (MOVWconst [c]) x))
  8525		// cond: isPowerOfTwo(int64(uint32(c)))
  8526		// result: (SLLconst [log2(int64(uint32(c)))] x)
  8527		for {
  8528			v_0 := v.Args[0]
  8529			if v_0.Op != OpMIPSMULTU {
  8530				break
  8531			}
  8532			x := v_0.Args[1]
  8533			v_0_0 := v_0.Args[0]
  8534			if v_0_0.Op != OpMIPSMOVWconst {
  8535				break
  8536			}
  8537			c := v_0_0.AuxInt
  8538			if !(isPowerOfTwo(int64(uint32(c)))) {
  8539				break
  8540			}
  8541			v.reset(OpMIPSSLLconst)
  8542			v.AuxInt = log2(int64(uint32(c)))
  8543			v.AddArg(x)
  8544			return true
  8545		}
  8546		// match: (Select1 (MULTU x (MOVWconst [c])))
  8547		// cond: isPowerOfTwo(int64(uint32(c)))
  8548		// result: (SLLconst [log2(int64(uint32(c)))] x)
  8549		for {
  8550			v_0 := v.Args[0]
  8551			if v_0.Op != OpMIPSMULTU {
  8552				break
  8553			}
  8554			_ = v_0.Args[1]
  8555			x := v_0.Args[0]
  8556			v_0_1 := v_0.Args[1]
  8557			if v_0_1.Op != OpMIPSMOVWconst {
  8558				break
  8559			}
  8560			c := v_0_1.AuxInt
  8561			if !(isPowerOfTwo(int64(uint32(c)))) {
  8562				break
  8563			}
  8564			v.reset(OpMIPSSLLconst)
  8565			v.AuxInt = log2(int64(uint32(c)))
  8566			v.AddArg(x)
  8567			return true
  8568		}
  8569		return false
  8570	}
  8571	func rewriteValueMIPS_OpSelect1_10(v *Value) bool {
  8572		// match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d])))
  8573		// cond:
  8574		// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8575		for {
  8576			v_0 := v.Args[0]
  8577			if v_0.Op != OpMIPSMULTU {
  8578				break
  8579			}
  8580			_ = v_0.Args[1]
  8581			v_0_0 := v_0.Args[0]
  8582			if v_0_0.Op != OpMIPSMOVWconst {
  8583				break
  8584			}
  8585			c := v_0_0.AuxInt
  8586			v_0_1 := v_0.Args[1]
  8587			if v_0_1.Op != OpMIPSMOVWconst {
  8588				break
  8589			}
  8590			d := v_0_1.AuxInt
  8591			v.reset(OpMIPSMOVWconst)
  8592			v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8593			return true
  8594		}
  8595		// match: (Select1 (MULTU (MOVWconst [d]) (MOVWconst [c])))
  8596		// cond:
  8597		// result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))])
  8598		for {
  8599			v_0 := v.Args[0]
  8600			if v_0.Op != OpMIPSMULTU {
  8601				break
  8602			}
  8603			_ = v_0.Args[1]
  8604			v_0_0 := v_0.Args[0]
  8605			if v_0_0.Op != OpMIPSMOVWconst {
  8606				break
  8607			}
  8608			d := v_0_0.AuxInt
  8609			v_0_1 := v_0.Args[1]
  8610			if v_0_1.Op != OpMIPSMOVWconst {
  8611				break
  8612			}
  8613			c := v_0_1.AuxInt
  8614			v.reset(OpMIPSMOVWconst)
  8615			v.AuxInt = int64(int32(uint32(c) * uint32(d)))
  8616			return true
  8617		}
  8618		// match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d])))
  8619		// cond:
  8620		// result: (MOVWconst [int64(int32(c)/int32(d))])
  8621		for {
  8622			v_0 := v.Args[0]
  8623			if v_0.Op != OpMIPSDIV {
  8624				break
  8625			}
  8626			_ = v_0.Args[1]
  8627			v_0_0 := v_0.Args[0]
  8628			if v_0_0.Op != OpMIPSMOVWconst {
  8629				break
  8630			}
  8631			c := v_0_0.AuxInt
  8632			v_0_1 := v_0.Args[1]
  8633			if v_0_1.Op != OpMIPSMOVWconst {
  8634				break
  8635			}
  8636			d := v_0_1.AuxInt
  8637			v.reset(OpMIPSMOVWconst)
  8638			v.AuxInt = int64(int32(c) / int32(d))
  8639			return true
  8640		}
  8641		// match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d])))
  8642		// cond:
  8643		// result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
  8644		for {
  8645			v_0 := v.Args[0]
  8646			if v_0.Op != OpMIPSDIVU {
  8647				break
  8648			}
  8649			_ = v_0.Args[1]
  8650			v_0_0 := v_0.Args[0]
  8651			if v_0_0.Op != OpMIPSMOVWconst {
  8652				break
  8653			}
  8654			c := v_0_0.AuxInt
  8655			v_0_1 := v_0.Args[1]
  8656			if v_0_1.Op != OpMIPSMOVWconst {
  8657				break
  8658			}
  8659			d := v_0_1.AuxInt
  8660			v.reset(OpMIPSMOVWconst)
  8661			v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  8662			return true
  8663		}
  8664		return false
  8665	}
  8666	func rewriteValueMIPS_OpSignExt16to32_0(v *Value) bool {
  8667		// match: (SignExt16to32 x)
  8668		// cond:
  8669		// result: (MOVHreg x)
  8670		for {
  8671			x := v.Args[0]
  8672			v.reset(OpMIPSMOVHreg)
  8673			v.AddArg(x)
  8674			return true
  8675		}
  8676	}
  8677	func rewriteValueMIPS_OpSignExt8to16_0(v *Value) bool {
  8678		// match: (SignExt8to16 x)
  8679		// cond:
  8680		// result: (MOVBreg x)
  8681		for {
  8682			x := v.Args[0]
  8683			v.reset(OpMIPSMOVBreg)
  8684			v.AddArg(x)
  8685			return true
  8686		}
  8687	}
  8688	func rewriteValueMIPS_OpSignExt8to32_0(v *Value) bool {
  8689		// match: (SignExt8to32 x)
  8690		// cond:
  8691		// result: (MOVBreg x)
  8692		for {
  8693			x := v.Args[0]
  8694			v.reset(OpMIPSMOVBreg)
  8695			v.AddArg(x)
  8696			return true
  8697		}
  8698	}
  8699	func rewriteValueMIPS_OpSignmask_0(v *Value) bool {
  8700		// match: (Signmask x)
  8701		// cond:
  8702		// result: (SRAconst x [31])
  8703		for {
  8704			x := v.Args[0]
  8705			v.reset(OpMIPSSRAconst)
  8706			v.AuxInt = 31
  8707			v.AddArg(x)
  8708			return true
  8709		}
  8710	}
  8711	func rewriteValueMIPS_OpSlicemask_0(v *Value) bool {
  8712		b := v.Block
  8713		// match: (Slicemask <t> x)
  8714		// cond:
  8715		// result: (SRAconst (NEG <t> x) [31])
  8716		for {
  8717			t := v.Type
  8718			x := v.Args[0]
  8719			v.reset(OpMIPSSRAconst)
  8720			v.AuxInt = 31
  8721			v0 := b.NewValue0(v.Pos, OpMIPSNEG, t)
  8722			v0.AddArg(x)
  8723			v.AddArg(v0)
  8724			return true
  8725		}
  8726	}
  8727	func rewriteValueMIPS_OpSqrt_0(v *Value) bool {
  8728		// match: (Sqrt x)
  8729		// cond:
  8730		// result: (SQRTD x)
  8731		for {
  8732			x := v.Args[0]
  8733			v.reset(OpMIPSSQRTD)
  8734			v.AddArg(x)
  8735			return true
  8736		}
  8737	}
  8738	func rewriteValueMIPS_OpStaticCall_0(v *Value) bool {
  8739		// match: (StaticCall [argwid] {target} mem)
  8740		// cond:
  8741		// result: (CALLstatic [argwid] {target} mem)
  8742		for {
  8743			argwid := v.AuxInt
  8744			target := v.Aux
  8745			mem := v.Args[0]
  8746			v.reset(OpMIPSCALLstatic)
  8747			v.AuxInt = argwid
  8748			v.Aux = target
  8749			v.AddArg(mem)
  8750			return true
  8751		}
  8752	}
  8753	func rewriteValueMIPS_OpStore_0(v *Value) bool {
  8754		// match: (Store {t} ptr val mem)
  8755		// cond: t.(*types.Type).Size() == 1
  8756		// result: (MOVBstore ptr val mem)
  8757		for {
  8758			t := v.Aux
  8759			mem := v.Args[2]
  8760			ptr := v.Args[0]
  8761			val := v.Args[1]
  8762			if !(t.(*types.Type).Size() == 1) {
  8763				break
  8764			}
  8765			v.reset(OpMIPSMOVBstore)
  8766			v.AddArg(ptr)
  8767			v.AddArg(val)
  8768			v.AddArg(mem)
  8769			return true
  8770		}
  8771		// match: (Store {t} ptr val mem)
  8772		// cond: t.(*types.Type).Size() == 2
  8773		// result: (MOVHstore ptr val mem)
  8774		for {
  8775			t := v.Aux
  8776			mem := v.Args[2]
  8777			ptr := v.Args[0]
  8778			val := v.Args[1]
  8779			if !(t.(*types.Type).Size() == 2) {
  8780				break
  8781			}
  8782			v.reset(OpMIPSMOVHstore)
  8783			v.AddArg(ptr)
  8784			v.AddArg(val)
  8785			v.AddArg(mem)
  8786			return true
  8787		}
  8788		// match: (Store {t} ptr val mem)
  8789		// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
  8790		// result: (MOVWstore ptr val mem)
  8791		for {
  8792			t := v.Aux
  8793			mem := v.Args[2]
  8794			ptr := v.Args[0]
  8795			val := v.Args[1]
  8796			if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
  8797				break
  8798			}
  8799			v.reset(OpMIPSMOVWstore)
  8800			v.AddArg(ptr)
  8801			v.AddArg(val)
  8802			v.AddArg(mem)
  8803			return true
  8804		}
  8805		// match: (Store {t} ptr val mem)
  8806		// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
  8807		// result: (MOVFstore ptr val mem)
  8808		for {
  8809			t := v.Aux
  8810			mem := v.Args[2]
  8811			ptr := v.Args[0]
  8812			val := v.Args[1]
  8813			if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
  8814				break
  8815			}
  8816			v.reset(OpMIPSMOVFstore)
  8817			v.AddArg(ptr)
  8818			v.AddArg(val)
  8819			v.AddArg(mem)
  8820			return true
  8821		}
  8822		// match: (Store {t} ptr val mem)
  8823		// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
  8824		// result: (MOVDstore ptr val mem)
  8825		for {
  8826			t := v.Aux
  8827			mem := v.Args[2]
  8828			ptr := v.Args[0]
  8829			val := v.Args[1]
  8830			if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
  8831				break
  8832			}
  8833			v.reset(OpMIPSMOVDstore)
  8834			v.AddArg(ptr)
  8835			v.AddArg(val)
  8836			v.AddArg(mem)
  8837			return true
  8838		}
  8839		return false
  8840	}
  8841	func rewriteValueMIPS_OpSub16_0(v *Value) bool {
  8842		// match: (Sub16 x y)
  8843		// cond:
  8844		// result: (SUB x y)
  8845		for {
  8846			y := v.Args[1]
  8847			x := v.Args[0]
  8848			v.reset(OpMIPSSUB)
  8849			v.AddArg(x)
  8850			v.AddArg(y)
  8851			return true
  8852		}
  8853	}
  8854	func rewriteValueMIPS_OpSub32_0(v *Value) bool {
  8855		// match: (Sub32 x y)
  8856		// cond:
  8857		// result: (SUB x y)
  8858		for {
  8859			y := v.Args[1]
  8860			x := v.Args[0]
  8861			v.reset(OpMIPSSUB)
  8862			v.AddArg(x)
  8863			v.AddArg(y)
  8864			return true
  8865		}
  8866	}
  8867	func rewriteValueMIPS_OpSub32F_0(v *Value) bool {
  8868		// match: (Sub32F x y)
  8869		// cond:
  8870		// result: (SUBF x y)
  8871		for {
  8872			y := v.Args[1]
  8873			x := v.Args[0]
  8874			v.reset(OpMIPSSUBF)
  8875			v.AddArg(x)
  8876			v.AddArg(y)
  8877			return true
  8878		}
  8879	}
  8880	func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool {
  8881		b := v.Block
  8882		// match: (Sub32withcarry <t> x y c)
  8883		// cond:
  8884		// result: (SUB (SUB <t> x y) c)
  8885		for {
  8886			t := v.Type
  8887			c := v.Args[2]
  8888			x := v.Args[0]
  8889			y := v.Args[1]
  8890			v.reset(OpMIPSSUB)
  8891			v0 := b.NewValue0(v.Pos, OpMIPSSUB, t)
  8892			v0.AddArg(x)
  8893			v0.AddArg(y)
  8894			v.AddArg(v0)
  8895			v.AddArg(c)
  8896			return true
  8897		}
  8898	}
  8899	func rewriteValueMIPS_OpSub64F_0(v *Value) bool {
  8900		// match: (Sub64F x y)
  8901		// cond:
  8902		// result: (SUBD x y)
  8903		for {
  8904			y := v.Args[1]
  8905			x := v.Args[0]
  8906			v.reset(OpMIPSSUBD)
  8907			v.AddArg(x)
  8908			v.AddArg(y)
  8909			return true
  8910		}
  8911	}
  8912	func rewriteValueMIPS_OpSub8_0(v *Value) bool {
  8913		// match: (Sub8 x y)
  8914		// cond:
  8915		// result: (SUB x y)
  8916		for {
  8917			y := v.Args[1]
  8918			x := v.Args[0]
  8919			v.reset(OpMIPSSUB)
  8920			v.AddArg(x)
  8921			v.AddArg(y)
  8922			return true
  8923		}
  8924	}
  8925	func rewriteValueMIPS_OpSubPtr_0(v *Value) bool {
  8926		// match: (SubPtr x y)
  8927		// cond:
  8928		// result: (SUB x y)
  8929		for {
  8930			y := v.Args[1]
  8931			x := v.Args[0]
  8932			v.reset(OpMIPSSUB)
  8933			v.AddArg(x)
  8934			v.AddArg(y)
  8935			return true
  8936		}
  8937	}
  8938	func rewriteValueMIPS_OpTrunc16to8_0(v *Value) bool {
  8939		// match: (Trunc16to8 x)
  8940		// cond:
  8941		// result: x
  8942		for {
  8943			x := v.Args[0]
  8944			v.reset(OpCopy)
  8945			v.Type = x.Type
  8946			v.AddArg(x)
  8947			return true
  8948		}
  8949	}
  8950	func rewriteValueMIPS_OpTrunc32to16_0(v *Value) bool {
  8951		// match: (Trunc32to16 x)
  8952		// cond:
  8953		// result: x
  8954		for {
  8955			x := v.Args[0]
  8956			v.reset(OpCopy)
  8957			v.Type = x.Type
  8958			v.AddArg(x)
  8959			return true
  8960		}
  8961	}
  8962	func rewriteValueMIPS_OpTrunc32to8_0(v *Value) bool {
  8963		// match: (Trunc32to8 x)
  8964		// cond:
  8965		// result: x
  8966		for {
  8967			x := v.Args[0]
  8968			v.reset(OpCopy)
  8969			v.Type = x.Type
  8970			v.AddArg(x)
  8971			return true
  8972		}
  8973	}
  8974	func rewriteValueMIPS_OpWB_0(v *Value) bool {
  8975		// match: (WB {fn} destptr srcptr mem)
  8976		// cond:
  8977		// result: (LoweredWB {fn} destptr srcptr mem)
  8978		for {
  8979			fn := v.Aux
  8980			mem := v.Args[2]
  8981			destptr := v.Args[0]
  8982			srcptr := v.Args[1]
  8983			v.reset(OpMIPSLoweredWB)
  8984			v.Aux = fn
  8985			v.AddArg(destptr)
  8986			v.AddArg(srcptr)
  8987			v.AddArg(mem)
  8988			return true
  8989		}
  8990	}
  8991	func rewriteValueMIPS_OpXor16_0(v *Value) bool {
  8992		// match: (Xor16 x y)
  8993		// cond:
  8994		// result: (XOR x y)
  8995		for {
  8996			y := v.Args[1]
  8997			x := v.Args[0]
  8998			v.reset(OpMIPSXOR)
  8999			v.AddArg(x)
  9000			v.AddArg(y)
  9001			return true
  9002		}
  9003	}
  9004	func rewriteValueMIPS_OpXor32_0(v *Value) bool {
  9005		// match: (Xor32 x y)
  9006		// cond:
  9007		// result: (XOR x y)
  9008		for {
  9009			y := v.Args[1]
  9010			x := v.Args[0]
  9011			v.reset(OpMIPSXOR)
  9012			v.AddArg(x)
  9013			v.AddArg(y)
  9014			return true
  9015		}
  9016	}
  9017	func rewriteValueMIPS_OpXor8_0(v *Value) bool {
  9018		// match: (Xor8 x y)
  9019		// cond:
  9020		// result: (XOR x y)
  9021		for {
  9022			y := v.Args[1]
  9023			x := v.Args[0]
  9024			v.reset(OpMIPSXOR)
  9025			v.AddArg(x)
  9026			v.AddArg(y)
  9027			return true
  9028		}
  9029	}
  9030	func rewriteValueMIPS_OpZero_0(v *Value) bool {
  9031		b := v.Block
  9032		typ := &b.Func.Config.Types
  9033		// match: (Zero [0] _ mem)
  9034		// cond:
  9035		// result: mem
  9036		for {
  9037			if v.AuxInt != 0 {
  9038				break
  9039			}
  9040			mem := v.Args[1]
  9041			v.reset(OpCopy)
  9042			v.Type = mem.Type
  9043			v.AddArg(mem)
  9044			return true
  9045		}
  9046		// match: (Zero [1] ptr mem)
  9047		// cond:
  9048		// result: (MOVBstore ptr (MOVWconst [0]) mem)
  9049		for {
  9050			if v.AuxInt != 1 {
  9051				break
  9052			}
  9053			mem := v.Args[1]
  9054			ptr := v.Args[0]
  9055			v.reset(OpMIPSMOVBstore)
  9056			v.AddArg(ptr)
  9057			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9058			v0.AuxInt = 0
  9059			v.AddArg(v0)
  9060			v.AddArg(mem)
  9061			return true
  9062		}
  9063		// match: (Zero [2] {t} ptr mem)
  9064		// cond: t.(*types.Type).Alignment()%2 == 0
  9065		// result: (MOVHstore ptr (MOVWconst [0]) mem)
  9066		for {
  9067			if v.AuxInt != 2 {
  9068				break
  9069			}
  9070			t := v.Aux
  9071			mem := v.Args[1]
  9072			ptr := v.Args[0]
  9073			if !(t.(*types.Type).Alignment()%2 == 0) {
  9074				break
  9075			}
  9076			v.reset(OpMIPSMOVHstore)
  9077			v.AddArg(ptr)
  9078			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9079			v0.AuxInt = 0
  9080			v.AddArg(v0)
  9081			v.AddArg(mem)
  9082			return true
  9083		}
  9084		// match: (Zero [2] ptr mem)
  9085		// cond:
  9086		// result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))
  9087		for {
  9088			if v.AuxInt != 2 {
  9089				break
  9090			}
  9091			mem := v.Args[1]
  9092			ptr := v.Args[0]
  9093			v.reset(OpMIPSMOVBstore)
  9094			v.AuxInt = 1
  9095			v.AddArg(ptr)
  9096			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9097			v0.AuxInt = 0
  9098			v.AddArg(v0)
  9099			v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9100			v1.AuxInt = 0
  9101			v1.AddArg(ptr)
  9102			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9103			v2.AuxInt = 0
  9104			v1.AddArg(v2)
  9105			v1.AddArg(mem)
  9106			v.AddArg(v1)
  9107			return true
  9108		}
  9109		// match: (Zero [4] {t} ptr mem)
  9110		// cond: t.(*types.Type).Alignment()%4 == 0
  9111		// result: (MOVWstore ptr (MOVWconst [0]) mem)
  9112		for {
  9113			if v.AuxInt != 4 {
  9114				break
  9115			}
  9116			t := v.Aux
  9117			mem := v.Args[1]
  9118			ptr := v.Args[0]
  9119			if !(t.(*types.Type).Alignment()%4 == 0) {
  9120				break
  9121			}
  9122			v.reset(OpMIPSMOVWstore)
  9123			v.AddArg(ptr)
  9124			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9125			v0.AuxInt = 0
  9126			v.AddArg(v0)
  9127			v.AddArg(mem)
  9128			return true
  9129		}
  9130		// match: (Zero [4] {t} ptr mem)
  9131		// cond: t.(*types.Type).Alignment()%2 == 0
  9132		// result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))
  9133		for {
  9134			if v.AuxInt != 4 {
  9135				break
  9136			}
  9137			t := v.Aux
  9138			mem := v.Args[1]
  9139			ptr := v.Args[0]
  9140			if !(t.(*types.Type).Alignment()%2 == 0) {
  9141				break
  9142			}
  9143			v.reset(OpMIPSMOVHstore)
  9144			v.AuxInt = 2
  9145			v.AddArg(ptr)
  9146			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9147			v0.AuxInt = 0
  9148			v.AddArg(v0)
  9149			v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9150			v1.AuxInt = 0
  9151			v1.AddArg(ptr)
  9152			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9153			v2.AuxInt = 0
  9154			v1.AddArg(v2)
  9155			v1.AddArg(mem)
  9156			v.AddArg(v1)
  9157			return true
  9158		}
  9159		// match: (Zero [4] ptr mem)
  9160		// cond:
  9161		// result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))))
  9162		for {
  9163			if v.AuxInt != 4 {
  9164				break
  9165			}
  9166			mem := v.Args[1]
  9167			ptr := v.Args[0]
  9168			v.reset(OpMIPSMOVBstore)
  9169			v.AuxInt = 3
  9170			v.AddArg(ptr)
  9171			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9172			v0.AuxInt = 0
  9173			v.AddArg(v0)
  9174			v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9175			v1.AuxInt = 2
  9176			v1.AddArg(ptr)
  9177			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9178			v2.AuxInt = 0
  9179			v1.AddArg(v2)
  9180			v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9181			v3.AuxInt = 1
  9182			v3.AddArg(ptr)
  9183			v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9184			v4.AuxInt = 0
  9185			v3.AddArg(v4)
  9186			v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9187			v5.AuxInt = 0
  9188			v5.AddArg(ptr)
  9189			v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9190			v6.AuxInt = 0
  9191			v5.AddArg(v6)
  9192			v5.AddArg(mem)
  9193			v3.AddArg(v5)
  9194			v1.AddArg(v3)
  9195			v.AddArg(v1)
  9196			return true
  9197		}
  9198		// match: (Zero [3] ptr mem)
  9199		// cond:
  9200		// result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))
  9201		for {
  9202			if v.AuxInt != 3 {
  9203				break
  9204			}
  9205			mem := v.Args[1]
  9206			ptr := v.Args[0]
  9207			v.reset(OpMIPSMOVBstore)
  9208			v.AuxInt = 2
  9209			v.AddArg(ptr)
  9210			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9211			v0.AuxInt = 0
  9212			v.AddArg(v0)
  9213			v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9214			v1.AuxInt = 1
  9215			v1.AddArg(ptr)
  9216			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9217			v2.AuxInt = 0
  9218			v1.AddArg(v2)
  9219			v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem)
  9220			v3.AuxInt = 0
  9221			v3.AddArg(ptr)
  9222			v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9223			v4.AuxInt = 0
  9224			v3.AddArg(v4)
  9225			v3.AddArg(mem)
  9226			v1.AddArg(v3)
  9227			v.AddArg(v1)
  9228			return true
  9229		}
  9230		// match: (Zero [6] {t} ptr mem)
  9231		// cond: t.(*types.Type).Alignment()%2 == 0
  9232		// result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)))
  9233		for {
  9234			if v.AuxInt != 6 {
  9235				break
  9236			}
  9237			t := v.Aux
  9238			mem := v.Args[1]
  9239			ptr := v.Args[0]
  9240			if !(t.(*types.Type).Alignment()%2 == 0) {
  9241				break
  9242			}
  9243			v.reset(OpMIPSMOVHstore)
  9244			v.AuxInt = 4
  9245			v.AddArg(ptr)
  9246			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9247			v0.AuxInt = 0
  9248			v.AddArg(v0)
  9249			v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9250			v1.AuxInt = 2
  9251			v1.AddArg(ptr)
  9252			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9253			v2.AuxInt = 0
  9254			v1.AddArg(v2)
  9255			v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem)
  9256			v3.AuxInt = 0
  9257			v3.AddArg(ptr)
  9258			v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9259			v4.AuxInt = 0
  9260			v3.AddArg(v4)
  9261			v3.AddArg(mem)
  9262			v1.AddArg(v3)
  9263			v.AddArg(v1)
  9264			return true
  9265		}
  9266		// match: (Zero [8] {t} ptr mem)
  9267		// cond: t.(*types.Type).Alignment()%4 == 0
  9268		// result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))
  9269		for {
  9270			if v.AuxInt != 8 {
  9271				break
  9272			}
  9273			t := v.Aux
  9274			mem := v.Args[1]
  9275			ptr := v.Args[0]
  9276			if !(t.(*types.Type).Alignment()%4 == 0) {
  9277				break
  9278			}
  9279			v.reset(OpMIPSMOVWstore)
  9280			v.AuxInt = 4
  9281			v.AddArg(ptr)
  9282			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9283			v0.AuxInt = 0
  9284			v.AddArg(v0)
  9285			v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9286			v1.AuxInt = 0
  9287			v1.AddArg(ptr)
  9288			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9289			v2.AuxInt = 0
  9290			v1.AddArg(v2)
  9291			v1.AddArg(mem)
  9292			v.AddArg(v1)
  9293			return true
  9294		}
  9295		return false
  9296	}
  9297	func rewriteValueMIPS_OpZero_10(v *Value) bool {
  9298		b := v.Block
  9299		config := b.Func.Config
  9300		typ := &b.Func.Config.Types
  9301		// match: (Zero [12] {t} ptr mem)
  9302		// cond: t.(*types.Type).Alignment()%4 == 0
  9303		// result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
  9304		for {
  9305			if v.AuxInt != 12 {
  9306				break
  9307			}
  9308			t := v.Aux
  9309			mem := v.Args[1]
  9310			ptr := v.Args[0]
  9311			if !(t.(*types.Type).Alignment()%4 == 0) {
  9312				break
  9313			}
  9314			v.reset(OpMIPSMOVWstore)
  9315			v.AuxInt = 8
  9316			v.AddArg(ptr)
  9317			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9318			v0.AuxInt = 0
  9319			v.AddArg(v0)
  9320			v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9321			v1.AuxInt = 4
  9322			v1.AddArg(ptr)
  9323			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9324			v2.AuxInt = 0
  9325			v1.AddArg(v2)
  9326			v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9327			v3.AuxInt = 0
  9328			v3.AddArg(ptr)
  9329			v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9330			v4.AuxInt = 0
  9331			v3.AddArg(v4)
  9332			v3.AddArg(mem)
  9333			v1.AddArg(v3)
  9334			v.AddArg(v1)
  9335			return true
  9336		}
  9337		// match: (Zero [16] {t} ptr mem)
  9338		// cond: t.(*types.Type).Alignment()%4 == 0
  9339		// result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))))
  9340		for {
  9341			if v.AuxInt != 16 {
  9342				break
  9343			}
  9344			t := v.Aux
  9345			mem := v.Args[1]
  9346			ptr := v.Args[0]
  9347			if !(t.(*types.Type).Alignment()%4 == 0) {
  9348				break
  9349			}
  9350			v.reset(OpMIPSMOVWstore)
  9351			v.AuxInt = 12
  9352			v.AddArg(ptr)
  9353			v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9354			v0.AuxInt = 0
  9355			v.AddArg(v0)
  9356			v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9357			v1.AuxInt = 8
  9358			v1.AddArg(ptr)
  9359			v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9360			v2.AuxInt = 0
  9361			v1.AddArg(v2)
  9362			v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9363			v3.AuxInt = 4
  9364			v3.AddArg(ptr)
  9365			v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9366			v4.AuxInt = 0
  9367			v3.AddArg(v4)
  9368			v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem)
  9369			v5.AuxInt = 0
  9370			v5.AddArg(ptr)
  9371			v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9372			v6.AuxInt = 0
  9373			v5.AddArg(v6)
  9374			v5.AddArg(mem)
  9375			v3.AddArg(v5)
  9376			v1.AddArg(v3)
  9377			v.AddArg(v1)
  9378			return true
  9379		}
  9380		// match: (Zero [s] {t} ptr mem)
  9381		// cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0)
  9382		// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  9383		for {
  9384			s := v.AuxInt
  9385			t := v.Aux
  9386			mem := v.Args[1]
  9387			ptr := v.Args[0]
  9388			if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) {
  9389				break
  9390			}
  9391			v.reset(OpMIPSLoweredZero)
  9392			v.AuxInt = t.(*types.Type).Alignment()
  9393			v.AddArg(ptr)
  9394			v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type)
  9395			v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  9396			v0.AddArg(ptr)
  9397			v.AddArg(v0)
  9398			v.AddArg(mem)
  9399			return true
  9400		}
  9401		return false
  9402	}
  9403	func rewriteValueMIPS_OpZeroExt16to32_0(v *Value) bool {
  9404		// match: (ZeroExt16to32 x)
  9405		// cond:
  9406		// result: (MOVHUreg x)
  9407		for {
  9408			x := v.Args[0]
  9409			v.reset(OpMIPSMOVHUreg)
  9410			v.AddArg(x)
  9411			return true
  9412		}
  9413	}
  9414	func rewriteValueMIPS_OpZeroExt8to16_0(v *Value) bool {
  9415		// match: (ZeroExt8to16 x)
  9416		// cond:
  9417		// result: (MOVBUreg x)
  9418		for {
  9419			x := v.Args[0]
  9420			v.reset(OpMIPSMOVBUreg)
  9421			v.AddArg(x)
  9422			return true
  9423		}
  9424	}
  9425	func rewriteValueMIPS_OpZeroExt8to32_0(v *Value) bool {
  9426		// match: (ZeroExt8to32 x)
  9427		// cond:
  9428		// result: (MOVBUreg x)
  9429		for {
  9430			x := v.Args[0]
  9431			v.reset(OpMIPSMOVBUreg)
  9432			v.AddArg(x)
  9433			return true
  9434		}
  9435	}
  9436	func rewriteValueMIPS_OpZeromask_0(v *Value) bool {
  9437		b := v.Block
  9438		typ := &b.Func.Config.Types
  9439		// match: (Zeromask x)
  9440		// cond:
  9441		// result: (NEG (SGTU x (MOVWconst [0])))
  9442		for {
  9443			x := v.Args[0]
  9444			v.reset(OpMIPSNEG)
  9445			v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool)
  9446			v0.AddArg(x)
  9447			v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32)
  9448			v1.AuxInt = 0
  9449			v0.AddArg(v1)
  9450			v.AddArg(v0)
  9451			return true
  9452		}
  9453	}
  9454	func rewriteBlockMIPS(b *Block) bool {
  9455		config := b.Func.Config
  9456		typ := &config.Types
  9457		_ = typ
  9458		v := b.Control
  9459		_ = v
  9460		switch b.Kind {
  9461		case BlockMIPSEQ:
  9462			// match: (EQ (FPFlagTrue cmp) yes no)
  9463			// cond:
  9464			// result: (FPF cmp yes no)
  9465			for v.Op == OpMIPSFPFlagTrue {
  9466				cmp := v.Args[0]
  9467				b.Kind = BlockMIPSFPF
  9468				b.SetControl(cmp)
  9469				b.Aux = nil
  9470				return true
  9471			}
  9472			// match: (EQ (FPFlagFalse cmp) yes no)
  9473			// cond:
  9474			// result: (FPT cmp yes no)
  9475			for v.Op == OpMIPSFPFlagFalse {
  9476				cmp := v.Args[0]
  9477				b.Kind = BlockMIPSFPT
  9478				b.SetControl(cmp)
  9479				b.Aux = nil
  9480				return true
  9481			}
  9482			// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
  9483			// cond:
  9484			// result: (NE cmp yes no)
  9485			for v.Op == OpMIPSXORconst {
  9486				if v.AuxInt != 1 {
  9487					break
  9488				}
  9489				cmp := v.Args[0]
  9490				if cmp.Op != OpMIPSSGT {
  9491					break
  9492				}
  9493				_ = cmp.Args[1]
  9494				b.Kind = BlockMIPSNE
  9495				b.SetControl(cmp)
  9496				b.Aux = nil
  9497				return true
  9498			}
  9499			// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
  9500			// cond:
  9501			// result: (NE cmp yes no)
  9502			for v.Op == OpMIPSXORconst {
  9503				if v.AuxInt != 1 {
  9504					break
  9505				}
  9506				cmp := v.Args[0]
  9507				if cmp.Op != OpMIPSSGTU {
  9508					break
  9509				}
  9510				_ = cmp.Args[1]
  9511				b.Kind = BlockMIPSNE
  9512				b.SetControl(cmp)
  9513				b.Aux = nil
  9514				return true
  9515			}
  9516			// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
  9517			// cond:
  9518			// result: (NE cmp yes no)
  9519			for v.Op == OpMIPSXORconst {
  9520				if v.AuxInt != 1 {
  9521					break
  9522				}
  9523				cmp := v.Args[0]
  9524				if cmp.Op != OpMIPSSGTconst {
  9525					break
  9526				}
  9527				b.Kind = BlockMIPSNE
  9528				b.SetControl(cmp)
  9529				b.Aux = nil
  9530				return true
  9531			}
  9532			// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
  9533			// cond:
  9534			// result: (NE cmp yes no)
  9535			for v.Op == OpMIPSXORconst {
  9536				if v.AuxInt != 1 {
  9537					break
  9538				}
  9539				cmp := v.Args[0]
  9540				if cmp.Op != OpMIPSSGTUconst {
  9541					break
  9542				}
  9543				b.Kind = BlockMIPSNE
  9544				b.SetControl(cmp)
  9545				b.Aux = nil
  9546				return true
  9547			}
  9548			// match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no)
  9549			// cond:
  9550			// result: (NE cmp yes no)
  9551			for v.Op == OpMIPSXORconst {
  9552				if v.AuxInt != 1 {
  9553					break
  9554				}
  9555				cmp := v.Args[0]
  9556				if cmp.Op != OpMIPSSGTzero {
  9557					break
  9558				}
  9559				b.Kind = BlockMIPSNE
  9560				b.SetControl(cmp)
  9561				b.Aux = nil
  9562				return true
  9563			}
  9564			// match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no)
  9565			// cond:
  9566			// result: (NE cmp yes no)
  9567			for v.Op == OpMIPSXORconst {
  9568				if v.AuxInt != 1 {
  9569					break
  9570				}
  9571				cmp := v.Args[0]
  9572				if cmp.Op != OpMIPSSGTUzero {
  9573					break
  9574				}
  9575				b.Kind = BlockMIPSNE
  9576				b.SetControl(cmp)
  9577				b.Aux = nil
  9578				return true
  9579			}
  9580			// match: (EQ (SGTUconst [1] x) yes no)
  9581			// cond:
  9582			// result: (NE x yes no)
  9583			for v.Op == OpMIPSSGTUconst {
  9584				if v.AuxInt != 1 {
  9585					break
  9586				}
  9587				x := v.Args[0]
  9588				b.Kind = BlockMIPSNE
  9589				b.SetControl(x)
  9590				b.Aux = nil
  9591				return true
  9592			}
  9593			// match: (EQ (SGTUzero x) yes no)
  9594			// cond:
  9595			// result: (EQ x yes no)
  9596			for v.Op == OpMIPSSGTUzero {
  9597				x := v.Args[0]
  9598				b.Kind = BlockMIPSEQ
  9599				b.SetControl(x)
  9600				b.Aux = nil
  9601				return true
  9602			}
  9603			// match: (EQ (SGTconst [0] x) yes no)
  9604			// cond:
  9605			// result: (GEZ x yes no)
  9606			for v.Op == OpMIPSSGTconst {
  9607				if v.AuxInt != 0 {
  9608					break
  9609				}
  9610				x := v.Args[0]
  9611				b.Kind = BlockMIPSGEZ
  9612				b.SetControl(x)
  9613				b.Aux = nil
  9614				return true
  9615			}
  9616			// match: (EQ (SGTzero x) yes no)
  9617			// cond:
  9618			// result: (LEZ x yes no)
  9619			for v.Op == OpMIPSSGTzero {
  9620				x := v.Args[0]
  9621				b.Kind = BlockMIPSLEZ
  9622				b.SetControl(x)
  9623				b.Aux = nil
  9624				return true
  9625			}
  9626			// match: (EQ (MOVWconst [0]) yes no)
  9627			// cond:
  9628			// result: (First nil yes no)
  9629			for v.Op == OpMIPSMOVWconst {
  9630				if v.AuxInt != 0 {
  9631					break
  9632				}
  9633				b.Kind = BlockFirst
  9634				b.SetControl(nil)
  9635				b.Aux = nil
  9636				return true
  9637			}
  9638			// match: (EQ (MOVWconst [c]) yes no)
  9639			// cond: c != 0
  9640			// result: (First nil no yes)
  9641			for v.Op == OpMIPSMOVWconst {
  9642				c := v.AuxInt
  9643				if !(c != 0) {
  9644					break
  9645				}
  9646				b.Kind = BlockFirst
  9647				b.SetControl(nil)
  9648				b.Aux = nil
  9649				b.swapSuccessors()
  9650				return true
  9651			}
  9652		case BlockMIPSGEZ:
  9653			// match: (GEZ (MOVWconst [c]) yes no)
  9654			// cond: int32(c) >= 0
  9655			// result: (First nil yes no)
  9656			for v.Op == OpMIPSMOVWconst {
  9657				c := v.AuxInt
  9658				if !(int32(c) >= 0) {
  9659					break
  9660				}
  9661				b.Kind = BlockFirst
  9662				b.SetControl(nil)
  9663				b.Aux = nil
  9664				return true
  9665			}
  9666			// match: (GEZ (MOVWconst [c]) yes no)
  9667			// cond: int32(c) < 0
  9668			// result: (First nil no yes)
  9669			for v.Op == OpMIPSMOVWconst {
  9670				c := v.AuxInt
  9671				if !(int32(c) < 0) {
  9672					break
  9673				}
  9674				b.Kind = BlockFirst
  9675				b.SetControl(nil)
  9676				b.Aux = nil
  9677				b.swapSuccessors()
  9678				return true
  9679			}
  9680		case BlockMIPSGTZ:
  9681			// match: (GTZ (MOVWconst [c]) yes no)
  9682			// cond: int32(c) > 0
  9683			// result: (First nil yes no)
  9684			for v.Op == OpMIPSMOVWconst {
  9685				c := v.AuxInt
  9686				if !(int32(c) > 0) {
  9687					break
  9688				}
  9689				b.Kind = BlockFirst
  9690				b.SetControl(nil)
  9691				b.Aux = nil
  9692				return true
  9693			}
  9694			// match: (GTZ (MOVWconst [c]) yes no)
  9695			// cond: int32(c) <= 0
  9696			// result: (First nil no yes)
  9697			for v.Op == OpMIPSMOVWconst {
  9698				c := v.AuxInt
  9699				if !(int32(c) <= 0) {
  9700					break
  9701				}
  9702				b.Kind = BlockFirst
  9703				b.SetControl(nil)
  9704				b.Aux = nil
  9705				b.swapSuccessors()
  9706				return true
  9707			}
  9708		case BlockIf:
  9709			// match: (If cond yes no)
  9710			// cond:
  9711			// result: (NE cond yes no)
  9712			for {
  9713				cond := b.Control
  9714				b.Kind = BlockMIPSNE
  9715				b.SetControl(cond)
  9716				b.Aux = nil
  9717				return true
  9718			}
  9719		case BlockMIPSLEZ:
  9720			// match: (LEZ (MOVWconst [c]) yes no)
  9721			// cond: int32(c) <= 0
  9722			// result: (First nil yes no)
  9723			for v.Op == OpMIPSMOVWconst {
  9724				c := v.AuxInt
  9725				if !(int32(c) <= 0) {
  9726					break
  9727				}
  9728				b.Kind = BlockFirst
  9729				b.SetControl(nil)
  9730				b.Aux = nil
  9731				return true
  9732			}
  9733			// match: (LEZ (MOVWconst [c]) yes no)
  9734			// cond: int32(c) > 0
  9735			// result: (First nil no yes)
  9736			for v.Op == OpMIPSMOVWconst {
  9737				c := v.AuxInt
  9738				if !(int32(c) > 0) {
  9739					break
  9740				}
  9741				b.Kind = BlockFirst
  9742				b.SetControl(nil)
  9743				b.Aux = nil
  9744				b.swapSuccessors()
  9745				return true
  9746			}
  9747		case BlockMIPSLTZ:
  9748			// match: (LTZ (MOVWconst [c]) yes no)
  9749			// cond: int32(c) < 0
  9750			// result: (First nil yes no)
  9751			for v.Op == OpMIPSMOVWconst {
  9752				c := v.AuxInt
  9753				if !(int32(c) < 0) {
  9754					break
  9755				}
  9756				b.Kind = BlockFirst
  9757				b.SetControl(nil)
  9758				b.Aux = nil
  9759				return true
  9760			}
  9761			// match: (LTZ (MOVWconst [c]) yes no)
  9762			// cond: int32(c) >= 0
  9763			// result: (First nil no yes)
  9764			for v.Op == OpMIPSMOVWconst {
  9765				c := v.AuxInt
  9766				if !(int32(c) >= 0) {
  9767					break
  9768				}
  9769				b.Kind = BlockFirst
  9770				b.SetControl(nil)
  9771				b.Aux = nil
  9772				b.swapSuccessors()
  9773				return true
  9774			}
  9775		case BlockMIPSNE:
  9776			// match: (NE (FPFlagTrue cmp) yes no)
  9777			// cond:
  9778			// result: (FPT cmp yes no)
  9779			for v.Op == OpMIPSFPFlagTrue {
  9780				cmp := v.Args[0]
  9781				b.Kind = BlockMIPSFPT
  9782				b.SetControl(cmp)
  9783				b.Aux = nil
  9784				return true
  9785			}
  9786			// match: (NE (FPFlagFalse cmp) yes no)
  9787			// cond:
  9788			// result: (FPF cmp yes no)
  9789			for v.Op == OpMIPSFPFlagFalse {
  9790				cmp := v.Args[0]
  9791				b.Kind = BlockMIPSFPF
  9792				b.SetControl(cmp)
  9793				b.Aux = nil
  9794				return true
  9795			}
  9796			// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
  9797			// cond:
  9798			// result: (EQ cmp yes no)
  9799			for v.Op == OpMIPSXORconst {
  9800				if v.AuxInt != 1 {
  9801					break
  9802				}
  9803				cmp := v.Args[0]
  9804				if cmp.Op != OpMIPSSGT {
  9805					break
  9806				}
  9807				_ = cmp.Args[1]
  9808				b.Kind = BlockMIPSEQ
  9809				b.SetControl(cmp)
  9810				b.Aux = nil
  9811				return true
  9812			}
  9813			// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
  9814			// cond:
  9815			// result: (EQ cmp yes no)
  9816			for v.Op == OpMIPSXORconst {
  9817				if v.AuxInt != 1 {
  9818					break
  9819				}
  9820				cmp := v.Args[0]
  9821				if cmp.Op != OpMIPSSGTU {
  9822					break
  9823				}
  9824				_ = cmp.Args[1]
  9825				b.Kind = BlockMIPSEQ
  9826				b.SetControl(cmp)
  9827				b.Aux = nil
  9828				return true
  9829			}
  9830			// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
  9831			// cond:
  9832			// result: (EQ cmp yes no)
  9833			for v.Op == OpMIPSXORconst {
  9834				if v.AuxInt != 1 {
  9835					break
  9836				}
  9837				cmp := v.Args[0]
  9838				if cmp.Op != OpMIPSSGTconst {
  9839					break
  9840				}
  9841				b.Kind = BlockMIPSEQ
  9842				b.SetControl(cmp)
  9843				b.Aux = nil
  9844				return true
  9845			}
  9846			// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
  9847			// cond:
  9848			// result: (EQ cmp yes no)
  9849			for v.Op == OpMIPSXORconst {
  9850				if v.AuxInt != 1 {
  9851					break
  9852				}
  9853				cmp := v.Args[0]
  9854				if cmp.Op != OpMIPSSGTUconst {
  9855					break
  9856				}
  9857				b.Kind = BlockMIPSEQ
  9858				b.SetControl(cmp)
  9859				b.Aux = nil
  9860				return true
  9861			}
  9862			// match: (NE (XORconst [1] cmp:(SGTzero _)) yes no)
  9863			// cond:
  9864			// result: (EQ cmp yes no)
  9865			for v.Op == OpMIPSXORconst {
  9866				if v.AuxInt != 1 {
  9867					break
  9868				}
  9869				cmp := v.Args[0]
  9870				if cmp.Op != OpMIPSSGTzero {
  9871					break
  9872				}
  9873				b.Kind = BlockMIPSEQ
  9874				b.SetControl(cmp)
  9875				b.Aux = nil
  9876				return true
  9877			}
  9878			// match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no)
  9879			// cond:
  9880			// result: (EQ cmp yes no)
  9881			for v.Op == OpMIPSXORconst {
  9882				if v.AuxInt != 1 {
  9883					break
  9884				}
  9885				cmp := v.Args[0]
  9886				if cmp.Op != OpMIPSSGTUzero {
  9887					break
  9888				}
  9889				b.Kind = BlockMIPSEQ
  9890				b.SetControl(cmp)
  9891				b.Aux = nil
  9892				return true
  9893			}
  9894			// match: (NE (SGTUconst [1] x) yes no)
  9895			// cond:
  9896			// result: (EQ x yes no)
  9897			for v.Op == OpMIPSSGTUconst {
  9898				if v.AuxInt != 1 {
  9899					break
  9900				}
  9901				x := v.Args[0]
  9902				b.Kind = BlockMIPSEQ
  9903				b.SetControl(x)
  9904				b.Aux = nil
  9905				return true
  9906			}
  9907			// match: (NE (SGTUzero x) yes no)
  9908			// cond:
  9909			// result: (NE x yes no)
  9910			for v.Op == OpMIPSSGTUzero {
  9911				x := v.Args[0]
  9912				b.Kind = BlockMIPSNE
  9913				b.SetControl(x)
  9914				b.Aux = nil
  9915				return true
  9916			}
  9917			// match: (NE (SGTconst [0] x) yes no)
  9918			// cond:
  9919			// result: (LTZ x yes no)
  9920			for v.Op == OpMIPSSGTconst {
  9921				if v.AuxInt != 0 {
  9922					break
  9923				}
  9924				x := v.Args[0]
  9925				b.Kind = BlockMIPSLTZ
  9926				b.SetControl(x)
  9927				b.Aux = nil
  9928				return true
  9929			}
  9930			// match: (NE (SGTzero x) yes no)
  9931			// cond:
  9932			// result: (GTZ x yes no)
  9933			for v.Op == OpMIPSSGTzero {
  9934				x := v.Args[0]
  9935				b.Kind = BlockMIPSGTZ
  9936				b.SetControl(x)
  9937				b.Aux = nil
  9938				return true
  9939			}
  9940			// match: (NE (MOVWconst [0]) yes no)
  9941			// cond:
  9942			// result: (First nil no yes)
  9943			for v.Op == OpMIPSMOVWconst {
  9944				if v.AuxInt != 0 {
  9945					break
  9946				}
  9947				b.Kind = BlockFirst
  9948				b.SetControl(nil)
  9949				b.Aux = nil
  9950				b.swapSuccessors()
  9951				return true
  9952			}
  9953			// match: (NE (MOVWconst [c]) yes no)
  9954			// cond: c != 0
  9955			// result: (First nil yes no)
  9956			for v.Op == OpMIPSMOVWconst {
  9957				c := v.AuxInt
  9958				if !(c != 0) {
  9959					break
  9960				}
  9961				b.Kind = BlockFirst
  9962				b.SetControl(nil)
  9963				b.Aux = nil
  9964				return true
  9965			}
  9966		}
  9967		return false
  9968	}
  9969	

View as plain text