...

Source file src/pkg/cmd/compile/internal/ssa/rewriteMIPS64.go

     1	// Code generated from gen/MIPS64.rules; DO NOT EDIT.
     2	// generated with: cd gen; go run *.go
     3	
     4	package ssa
     5	
     6	import "fmt"
     7	import "math"
     8	import "cmd/internal/obj"
     9	import "cmd/internal/objabi"
    10	import "cmd/compile/internal/types"
    11	
    12	var _ = fmt.Println   // in case not otherwise used
    13	var _ = math.MinInt8  // in case not otherwise used
    14	var _ = obj.ANOP      // in case not otherwise used
    15	var _ = objabi.GOROOT // in case not otherwise used
    16	var _ = types.TypeMem // in case not otherwise used
    17	
    18	func rewriteValueMIPS64(v *Value) bool {
    19		switch v.Op {
    20		case OpAdd16:
    21			return rewriteValueMIPS64_OpAdd16_0(v)
    22		case OpAdd32:
    23			return rewriteValueMIPS64_OpAdd32_0(v)
    24		case OpAdd32F:
    25			return rewriteValueMIPS64_OpAdd32F_0(v)
    26		case OpAdd64:
    27			return rewriteValueMIPS64_OpAdd64_0(v)
    28		case OpAdd64F:
    29			return rewriteValueMIPS64_OpAdd64F_0(v)
    30		case OpAdd8:
    31			return rewriteValueMIPS64_OpAdd8_0(v)
    32		case OpAddPtr:
    33			return rewriteValueMIPS64_OpAddPtr_0(v)
    34		case OpAddr:
    35			return rewriteValueMIPS64_OpAddr_0(v)
    36		case OpAnd16:
    37			return rewriteValueMIPS64_OpAnd16_0(v)
    38		case OpAnd32:
    39			return rewriteValueMIPS64_OpAnd32_0(v)
    40		case OpAnd64:
    41			return rewriteValueMIPS64_OpAnd64_0(v)
    42		case OpAnd8:
    43			return rewriteValueMIPS64_OpAnd8_0(v)
    44		case OpAndB:
    45			return rewriteValueMIPS64_OpAndB_0(v)
    46		case OpAtomicAdd32:
    47			return rewriteValueMIPS64_OpAtomicAdd32_0(v)
    48		case OpAtomicAdd64:
    49			return rewriteValueMIPS64_OpAtomicAdd64_0(v)
    50		case OpAtomicCompareAndSwap32:
    51			return rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v)
    52		case OpAtomicCompareAndSwap64:
    53			return rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v)
    54		case OpAtomicExchange32:
    55			return rewriteValueMIPS64_OpAtomicExchange32_0(v)
    56		case OpAtomicExchange64:
    57			return rewriteValueMIPS64_OpAtomicExchange64_0(v)
    58		case OpAtomicLoad32:
    59			return rewriteValueMIPS64_OpAtomicLoad32_0(v)
    60		case OpAtomicLoad64:
    61			return rewriteValueMIPS64_OpAtomicLoad64_0(v)
    62		case OpAtomicLoad8:
    63			return rewriteValueMIPS64_OpAtomicLoad8_0(v)
    64		case OpAtomicLoadPtr:
    65			return rewriteValueMIPS64_OpAtomicLoadPtr_0(v)
    66		case OpAtomicStore32:
    67			return rewriteValueMIPS64_OpAtomicStore32_0(v)
    68		case OpAtomicStore64:
    69			return rewriteValueMIPS64_OpAtomicStore64_0(v)
    70		case OpAtomicStorePtrNoWB:
    71			return rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v)
    72		case OpAvg64u:
    73			return rewriteValueMIPS64_OpAvg64u_0(v)
    74		case OpClosureCall:
    75			return rewriteValueMIPS64_OpClosureCall_0(v)
    76		case OpCom16:
    77			return rewriteValueMIPS64_OpCom16_0(v)
    78		case OpCom32:
    79			return rewriteValueMIPS64_OpCom32_0(v)
    80		case OpCom64:
    81			return rewriteValueMIPS64_OpCom64_0(v)
    82		case OpCom8:
    83			return rewriteValueMIPS64_OpCom8_0(v)
    84		case OpConst16:
    85			return rewriteValueMIPS64_OpConst16_0(v)
    86		case OpConst32:
    87			return rewriteValueMIPS64_OpConst32_0(v)
    88		case OpConst32F:
    89			return rewriteValueMIPS64_OpConst32F_0(v)
    90		case OpConst64:
    91			return rewriteValueMIPS64_OpConst64_0(v)
    92		case OpConst64F:
    93			return rewriteValueMIPS64_OpConst64F_0(v)
    94		case OpConst8:
    95			return rewriteValueMIPS64_OpConst8_0(v)
    96		case OpConstBool:
    97			return rewriteValueMIPS64_OpConstBool_0(v)
    98		case OpConstNil:
    99			return rewriteValueMIPS64_OpConstNil_0(v)
   100		case OpCvt32Fto32:
   101			return rewriteValueMIPS64_OpCvt32Fto32_0(v)
   102		case OpCvt32Fto64:
   103			return rewriteValueMIPS64_OpCvt32Fto64_0(v)
   104		case OpCvt32Fto64F:
   105			return rewriteValueMIPS64_OpCvt32Fto64F_0(v)
   106		case OpCvt32to32F:
   107			return rewriteValueMIPS64_OpCvt32to32F_0(v)
   108		case OpCvt32to64F:
   109			return rewriteValueMIPS64_OpCvt32to64F_0(v)
   110		case OpCvt64Fto32:
   111			return rewriteValueMIPS64_OpCvt64Fto32_0(v)
   112		case OpCvt64Fto32F:
   113			return rewriteValueMIPS64_OpCvt64Fto32F_0(v)
   114		case OpCvt64Fto64:
   115			return rewriteValueMIPS64_OpCvt64Fto64_0(v)
   116		case OpCvt64to32F:
   117			return rewriteValueMIPS64_OpCvt64to32F_0(v)
   118		case OpCvt64to64F:
   119			return rewriteValueMIPS64_OpCvt64to64F_0(v)
   120		case OpDiv16:
   121			return rewriteValueMIPS64_OpDiv16_0(v)
   122		case OpDiv16u:
   123			return rewriteValueMIPS64_OpDiv16u_0(v)
   124		case OpDiv32:
   125			return rewriteValueMIPS64_OpDiv32_0(v)
   126		case OpDiv32F:
   127			return rewriteValueMIPS64_OpDiv32F_0(v)
   128		case OpDiv32u:
   129			return rewriteValueMIPS64_OpDiv32u_0(v)
   130		case OpDiv64:
   131			return rewriteValueMIPS64_OpDiv64_0(v)
   132		case OpDiv64F:
   133			return rewriteValueMIPS64_OpDiv64F_0(v)
   134		case OpDiv64u:
   135			return rewriteValueMIPS64_OpDiv64u_0(v)
   136		case OpDiv8:
   137			return rewriteValueMIPS64_OpDiv8_0(v)
   138		case OpDiv8u:
   139			return rewriteValueMIPS64_OpDiv8u_0(v)
   140		case OpEq16:
   141			return rewriteValueMIPS64_OpEq16_0(v)
   142		case OpEq32:
   143			return rewriteValueMIPS64_OpEq32_0(v)
   144		case OpEq32F:
   145			return rewriteValueMIPS64_OpEq32F_0(v)
   146		case OpEq64:
   147			return rewriteValueMIPS64_OpEq64_0(v)
   148		case OpEq64F:
   149			return rewriteValueMIPS64_OpEq64F_0(v)
   150		case OpEq8:
   151			return rewriteValueMIPS64_OpEq8_0(v)
   152		case OpEqB:
   153			return rewriteValueMIPS64_OpEqB_0(v)
   154		case OpEqPtr:
   155			return rewriteValueMIPS64_OpEqPtr_0(v)
   156		case OpGeq16:
   157			return rewriteValueMIPS64_OpGeq16_0(v)
   158		case OpGeq16U:
   159			return rewriteValueMIPS64_OpGeq16U_0(v)
   160		case OpGeq32:
   161			return rewriteValueMIPS64_OpGeq32_0(v)
   162		case OpGeq32F:
   163			return rewriteValueMIPS64_OpGeq32F_0(v)
   164		case OpGeq32U:
   165			return rewriteValueMIPS64_OpGeq32U_0(v)
   166		case OpGeq64:
   167			return rewriteValueMIPS64_OpGeq64_0(v)
   168		case OpGeq64F:
   169			return rewriteValueMIPS64_OpGeq64F_0(v)
   170		case OpGeq64U:
   171			return rewriteValueMIPS64_OpGeq64U_0(v)
   172		case OpGeq8:
   173			return rewriteValueMIPS64_OpGeq8_0(v)
   174		case OpGeq8U:
   175			return rewriteValueMIPS64_OpGeq8U_0(v)
   176		case OpGetCallerPC:
   177			return rewriteValueMIPS64_OpGetCallerPC_0(v)
   178		case OpGetCallerSP:
   179			return rewriteValueMIPS64_OpGetCallerSP_0(v)
   180		case OpGetClosurePtr:
   181			return rewriteValueMIPS64_OpGetClosurePtr_0(v)
   182		case OpGreater16:
   183			return rewriteValueMIPS64_OpGreater16_0(v)
   184		case OpGreater16U:
   185			return rewriteValueMIPS64_OpGreater16U_0(v)
   186		case OpGreater32:
   187			return rewriteValueMIPS64_OpGreater32_0(v)
   188		case OpGreater32F:
   189			return rewriteValueMIPS64_OpGreater32F_0(v)
   190		case OpGreater32U:
   191			return rewriteValueMIPS64_OpGreater32U_0(v)
   192		case OpGreater64:
   193			return rewriteValueMIPS64_OpGreater64_0(v)
   194		case OpGreater64F:
   195			return rewriteValueMIPS64_OpGreater64F_0(v)
   196		case OpGreater64U:
   197			return rewriteValueMIPS64_OpGreater64U_0(v)
   198		case OpGreater8:
   199			return rewriteValueMIPS64_OpGreater8_0(v)
   200		case OpGreater8U:
   201			return rewriteValueMIPS64_OpGreater8U_0(v)
   202		case OpHmul32:
   203			return rewriteValueMIPS64_OpHmul32_0(v)
   204		case OpHmul32u:
   205			return rewriteValueMIPS64_OpHmul32u_0(v)
   206		case OpHmul64:
   207			return rewriteValueMIPS64_OpHmul64_0(v)
   208		case OpHmul64u:
   209			return rewriteValueMIPS64_OpHmul64u_0(v)
   210		case OpInterCall:
   211			return rewriteValueMIPS64_OpInterCall_0(v)
   212		case OpIsInBounds:
   213			return rewriteValueMIPS64_OpIsInBounds_0(v)
   214		case OpIsNonNil:
   215			return rewriteValueMIPS64_OpIsNonNil_0(v)
   216		case OpIsSliceInBounds:
   217			return rewriteValueMIPS64_OpIsSliceInBounds_0(v)
   218		case OpLeq16:
   219			return rewriteValueMIPS64_OpLeq16_0(v)
   220		case OpLeq16U:
   221			return rewriteValueMIPS64_OpLeq16U_0(v)
   222		case OpLeq32:
   223			return rewriteValueMIPS64_OpLeq32_0(v)
   224		case OpLeq32F:
   225			return rewriteValueMIPS64_OpLeq32F_0(v)
   226		case OpLeq32U:
   227			return rewriteValueMIPS64_OpLeq32U_0(v)
   228		case OpLeq64:
   229			return rewriteValueMIPS64_OpLeq64_0(v)
   230		case OpLeq64F:
   231			return rewriteValueMIPS64_OpLeq64F_0(v)
   232		case OpLeq64U:
   233			return rewriteValueMIPS64_OpLeq64U_0(v)
   234		case OpLeq8:
   235			return rewriteValueMIPS64_OpLeq8_0(v)
   236		case OpLeq8U:
   237			return rewriteValueMIPS64_OpLeq8U_0(v)
   238		case OpLess16:
   239			return rewriteValueMIPS64_OpLess16_0(v)
   240		case OpLess16U:
   241			return rewriteValueMIPS64_OpLess16U_0(v)
   242		case OpLess32:
   243			return rewriteValueMIPS64_OpLess32_0(v)
   244		case OpLess32F:
   245			return rewriteValueMIPS64_OpLess32F_0(v)
   246		case OpLess32U:
   247			return rewriteValueMIPS64_OpLess32U_0(v)
   248		case OpLess64:
   249			return rewriteValueMIPS64_OpLess64_0(v)
   250		case OpLess64F:
   251			return rewriteValueMIPS64_OpLess64F_0(v)
   252		case OpLess64U:
   253			return rewriteValueMIPS64_OpLess64U_0(v)
   254		case OpLess8:
   255			return rewriteValueMIPS64_OpLess8_0(v)
   256		case OpLess8U:
   257			return rewriteValueMIPS64_OpLess8U_0(v)
   258		case OpLoad:
   259			return rewriteValueMIPS64_OpLoad_0(v)
   260		case OpLocalAddr:
   261			return rewriteValueMIPS64_OpLocalAddr_0(v)
   262		case OpLsh16x16:
   263			return rewriteValueMIPS64_OpLsh16x16_0(v)
   264		case OpLsh16x32:
   265			return rewriteValueMIPS64_OpLsh16x32_0(v)
   266		case OpLsh16x64:
   267			return rewriteValueMIPS64_OpLsh16x64_0(v)
   268		case OpLsh16x8:
   269			return rewriteValueMIPS64_OpLsh16x8_0(v)
   270		case OpLsh32x16:
   271			return rewriteValueMIPS64_OpLsh32x16_0(v)
   272		case OpLsh32x32:
   273			return rewriteValueMIPS64_OpLsh32x32_0(v)
   274		case OpLsh32x64:
   275			return rewriteValueMIPS64_OpLsh32x64_0(v)
   276		case OpLsh32x8:
   277			return rewriteValueMIPS64_OpLsh32x8_0(v)
   278		case OpLsh64x16:
   279			return rewriteValueMIPS64_OpLsh64x16_0(v)
   280		case OpLsh64x32:
   281			return rewriteValueMIPS64_OpLsh64x32_0(v)
   282		case OpLsh64x64:
   283			return rewriteValueMIPS64_OpLsh64x64_0(v)
   284		case OpLsh64x8:
   285			return rewriteValueMIPS64_OpLsh64x8_0(v)
   286		case OpLsh8x16:
   287			return rewriteValueMIPS64_OpLsh8x16_0(v)
   288		case OpLsh8x32:
   289			return rewriteValueMIPS64_OpLsh8x32_0(v)
   290		case OpLsh8x64:
   291			return rewriteValueMIPS64_OpLsh8x64_0(v)
   292		case OpLsh8x8:
   293			return rewriteValueMIPS64_OpLsh8x8_0(v)
   294		case OpMIPS64ADDV:
   295			return rewriteValueMIPS64_OpMIPS64ADDV_0(v)
   296		case OpMIPS64ADDVconst:
   297			return rewriteValueMIPS64_OpMIPS64ADDVconst_0(v)
   298		case OpMIPS64AND:
   299			return rewriteValueMIPS64_OpMIPS64AND_0(v)
   300		case OpMIPS64ANDconst:
   301			return rewriteValueMIPS64_OpMIPS64ANDconst_0(v)
   302		case OpMIPS64LoweredAtomicAdd32:
   303			return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v)
   304		case OpMIPS64LoweredAtomicAdd64:
   305			return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v)
   306		case OpMIPS64LoweredAtomicStore32:
   307			return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v)
   308		case OpMIPS64LoweredAtomicStore64:
   309			return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v)
   310		case OpMIPS64MOVBUload:
   311			return rewriteValueMIPS64_OpMIPS64MOVBUload_0(v)
   312		case OpMIPS64MOVBUreg:
   313			return rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v)
   314		case OpMIPS64MOVBload:
   315			return rewriteValueMIPS64_OpMIPS64MOVBload_0(v)
   316		case OpMIPS64MOVBreg:
   317			return rewriteValueMIPS64_OpMIPS64MOVBreg_0(v)
   318		case OpMIPS64MOVBstore:
   319			return rewriteValueMIPS64_OpMIPS64MOVBstore_0(v)
   320		case OpMIPS64MOVBstorezero:
   321			return rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v)
   322		case OpMIPS64MOVDload:
   323			return rewriteValueMIPS64_OpMIPS64MOVDload_0(v)
   324		case OpMIPS64MOVDstore:
   325			return rewriteValueMIPS64_OpMIPS64MOVDstore_0(v)
   326		case OpMIPS64MOVFload:
   327			return rewriteValueMIPS64_OpMIPS64MOVFload_0(v)
   328		case OpMIPS64MOVFstore:
   329			return rewriteValueMIPS64_OpMIPS64MOVFstore_0(v)
   330		case OpMIPS64MOVHUload:
   331			return rewriteValueMIPS64_OpMIPS64MOVHUload_0(v)
   332		case OpMIPS64MOVHUreg:
   333			return rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v)
   334		case OpMIPS64MOVHload:
   335			return rewriteValueMIPS64_OpMIPS64MOVHload_0(v)
   336		case OpMIPS64MOVHreg:
   337			return rewriteValueMIPS64_OpMIPS64MOVHreg_0(v)
   338		case OpMIPS64MOVHstore:
   339			return rewriteValueMIPS64_OpMIPS64MOVHstore_0(v)
   340		case OpMIPS64MOVHstorezero:
   341			return rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v)
   342		case OpMIPS64MOVVload:
   343			return rewriteValueMIPS64_OpMIPS64MOVVload_0(v)
   344		case OpMIPS64MOVVreg:
   345			return rewriteValueMIPS64_OpMIPS64MOVVreg_0(v)
   346		case OpMIPS64MOVVstore:
   347			return rewriteValueMIPS64_OpMIPS64MOVVstore_0(v)
   348		case OpMIPS64MOVVstorezero:
   349			return rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v)
   350		case OpMIPS64MOVWUload:
   351			return rewriteValueMIPS64_OpMIPS64MOVWUload_0(v)
   352		case OpMIPS64MOVWUreg:
   353			return rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v)
   354		case OpMIPS64MOVWload:
   355			return rewriteValueMIPS64_OpMIPS64MOVWload_0(v)
   356		case OpMIPS64MOVWreg:
   357			return rewriteValueMIPS64_OpMIPS64MOVWreg_0(v) || rewriteValueMIPS64_OpMIPS64MOVWreg_10(v)
   358		case OpMIPS64MOVWstore:
   359			return rewriteValueMIPS64_OpMIPS64MOVWstore_0(v)
   360		case OpMIPS64MOVWstorezero:
   361			return rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v)
   362		case OpMIPS64NEGV:
   363			return rewriteValueMIPS64_OpMIPS64NEGV_0(v)
   364		case OpMIPS64NOR:
   365			return rewriteValueMIPS64_OpMIPS64NOR_0(v)
   366		case OpMIPS64NORconst:
   367			return rewriteValueMIPS64_OpMIPS64NORconst_0(v)
   368		case OpMIPS64OR:
   369			return rewriteValueMIPS64_OpMIPS64OR_0(v)
   370		case OpMIPS64ORconst:
   371			return rewriteValueMIPS64_OpMIPS64ORconst_0(v)
   372		case OpMIPS64SGT:
   373			return rewriteValueMIPS64_OpMIPS64SGT_0(v)
   374		case OpMIPS64SGTU:
   375			return rewriteValueMIPS64_OpMIPS64SGTU_0(v)
   376		case OpMIPS64SGTUconst:
   377			return rewriteValueMIPS64_OpMIPS64SGTUconst_0(v)
   378		case OpMIPS64SGTconst:
   379			return rewriteValueMIPS64_OpMIPS64SGTconst_0(v) || rewriteValueMIPS64_OpMIPS64SGTconst_10(v)
   380		case OpMIPS64SLLV:
   381			return rewriteValueMIPS64_OpMIPS64SLLV_0(v)
   382		case OpMIPS64SLLVconst:
   383			return rewriteValueMIPS64_OpMIPS64SLLVconst_0(v)
   384		case OpMIPS64SRAV:
   385			return rewriteValueMIPS64_OpMIPS64SRAV_0(v)
   386		case OpMIPS64SRAVconst:
   387			return rewriteValueMIPS64_OpMIPS64SRAVconst_0(v)
   388		case OpMIPS64SRLV:
   389			return rewriteValueMIPS64_OpMIPS64SRLV_0(v)
   390		case OpMIPS64SRLVconst:
   391			return rewriteValueMIPS64_OpMIPS64SRLVconst_0(v)
   392		case OpMIPS64SUBV:
   393			return rewriteValueMIPS64_OpMIPS64SUBV_0(v)
   394		case OpMIPS64SUBVconst:
   395			return rewriteValueMIPS64_OpMIPS64SUBVconst_0(v)
   396		case OpMIPS64XOR:
   397			return rewriteValueMIPS64_OpMIPS64XOR_0(v)
   398		case OpMIPS64XORconst:
   399			return rewriteValueMIPS64_OpMIPS64XORconst_0(v)
   400		case OpMod16:
   401			return rewriteValueMIPS64_OpMod16_0(v)
   402		case OpMod16u:
   403			return rewriteValueMIPS64_OpMod16u_0(v)
   404		case OpMod32:
   405			return rewriteValueMIPS64_OpMod32_0(v)
   406		case OpMod32u:
   407			return rewriteValueMIPS64_OpMod32u_0(v)
   408		case OpMod64:
   409			return rewriteValueMIPS64_OpMod64_0(v)
   410		case OpMod64u:
   411			return rewriteValueMIPS64_OpMod64u_0(v)
   412		case OpMod8:
   413			return rewriteValueMIPS64_OpMod8_0(v)
   414		case OpMod8u:
   415			return rewriteValueMIPS64_OpMod8u_0(v)
   416		case OpMove:
   417			return rewriteValueMIPS64_OpMove_0(v) || rewriteValueMIPS64_OpMove_10(v)
   418		case OpMul16:
   419			return rewriteValueMIPS64_OpMul16_0(v)
   420		case OpMul32:
   421			return rewriteValueMIPS64_OpMul32_0(v)
   422		case OpMul32F:
   423			return rewriteValueMIPS64_OpMul32F_0(v)
   424		case OpMul64:
   425			return rewriteValueMIPS64_OpMul64_0(v)
   426		case OpMul64F:
   427			return rewriteValueMIPS64_OpMul64F_0(v)
   428		case OpMul8:
   429			return rewriteValueMIPS64_OpMul8_0(v)
   430		case OpNeg16:
   431			return rewriteValueMIPS64_OpNeg16_0(v)
   432		case OpNeg32:
   433			return rewriteValueMIPS64_OpNeg32_0(v)
   434		case OpNeg32F:
   435			return rewriteValueMIPS64_OpNeg32F_0(v)
   436		case OpNeg64:
   437			return rewriteValueMIPS64_OpNeg64_0(v)
   438		case OpNeg64F:
   439			return rewriteValueMIPS64_OpNeg64F_0(v)
   440		case OpNeg8:
   441			return rewriteValueMIPS64_OpNeg8_0(v)
   442		case OpNeq16:
   443			return rewriteValueMIPS64_OpNeq16_0(v)
   444		case OpNeq32:
   445			return rewriteValueMIPS64_OpNeq32_0(v)
   446		case OpNeq32F:
   447			return rewriteValueMIPS64_OpNeq32F_0(v)
   448		case OpNeq64:
   449			return rewriteValueMIPS64_OpNeq64_0(v)
   450		case OpNeq64F:
   451			return rewriteValueMIPS64_OpNeq64F_0(v)
   452		case OpNeq8:
   453			return rewriteValueMIPS64_OpNeq8_0(v)
   454		case OpNeqB:
   455			return rewriteValueMIPS64_OpNeqB_0(v)
   456		case OpNeqPtr:
   457			return rewriteValueMIPS64_OpNeqPtr_0(v)
   458		case OpNilCheck:
   459			return rewriteValueMIPS64_OpNilCheck_0(v)
   460		case OpNot:
   461			return rewriteValueMIPS64_OpNot_0(v)
   462		case OpOffPtr:
   463			return rewriteValueMIPS64_OpOffPtr_0(v)
   464		case OpOr16:
   465			return rewriteValueMIPS64_OpOr16_0(v)
   466		case OpOr32:
   467			return rewriteValueMIPS64_OpOr32_0(v)
   468		case OpOr64:
   469			return rewriteValueMIPS64_OpOr64_0(v)
   470		case OpOr8:
   471			return rewriteValueMIPS64_OpOr8_0(v)
   472		case OpOrB:
   473			return rewriteValueMIPS64_OpOrB_0(v)
   474		case OpPanicBounds:
   475			return rewriteValueMIPS64_OpPanicBounds_0(v)
   476		case OpRotateLeft16:
   477			return rewriteValueMIPS64_OpRotateLeft16_0(v)
   478		case OpRotateLeft32:
   479			return rewriteValueMIPS64_OpRotateLeft32_0(v)
   480		case OpRotateLeft64:
   481			return rewriteValueMIPS64_OpRotateLeft64_0(v)
   482		case OpRotateLeft8:
   483			return rewriteValueMIPS64_OpRotateLeft8_0(v)
   484		case OpRound32F:
   485			return rewriteValueMIPS64_OpRound32F_0(v)
   486		case OpRound64F:
   487			return rewriteValueMIPS64_OpRound64F_0(v)
   488		case OpRsh16Ux16:
   489			return rewriteValueMIPS64_OpRsh16Ux16_0(v)
   490		case OpRsh16Ux32:
   491			return rewriteValueMIPS64_OpRsh16Ux32_0(v)
   492		case OpRsh16Ux64:
   493			return rewriteValueMIPS64_OpRsh16Ux64_0(v)
   494		case OpRsh16Ux8:
   495			return rewriteValueMIPS64_OpRsh16Ux8_0(v)
   496		case OpRsh16x16:
   497			return rewriteValueMIPS64_OpRsh16x16_0(v)
   498		case OpRsh16x32:
   499			return rewriteValueMIPS64_OpRsh16x32_0(v)
   500		case OpRsh16x64:
   501			return rewriteValueMIPS64_OpRsh16x64_0(v)
   502		case OpRsh16x8:
   503			return rewriteValueMIPS64_OpRsh16x8_0(v)
   504		case OpRsh32Ux16:
   505			return rewriteValueMIPS64_OpRsh32Ux16_0(v)
   506		case OpRsh32Ux32:
   507			return rewriteValueMIPS64_OpRsh32Ux32_0(v)
   508		case OpRsh32Ux64:
   509			return rewriteValueMIPS64_OpRsh32Ux64_0(v)
   510		case OpRsh32Ux8:
   511			return rewriteValueMIPS64_OpRsh32Ux8_0(v)
   512		case OpRsh32x16:
   513			return rewriteValueMIPS64_OpRsh32x16_0(v)
   514		case OpRsh32x32:
   515			return rewriteValueMIPS64_OpRsh32x32_0(v)
   516		case OpRsh32x64:
   517			return rewriteValueMIPS64_OpRsh32x64_0(v)
   518		case OpRsh32x8:
   519			return rewriteValueMIPS64_OpRsh32x8_0(v)
   520		case OpRsh64Ux16:
   521			return rewriteValueMIPS64_OpRsh64Ux16_0(v)
   522		case OpRsh64Ux32:
   523			return rewriteValueMIPS64_OpRsh64Ux32_0(v)
   524		case OpRsh64Ux64:
   525			return rewriteValueMIPS64_OpRsh64Ux64_0(v)
   526		case OpRsh64Ux8:
   527			return rewriteValueMIPS64_OpRsh64Ux8_0(v)
   528		case OpRsh64x16:
   529			return rewriteValueMIPS64_OpRsh64x16_0(v)
   530		case OpRsh64x32:
   531			return rewriteValueMIPS64_OpRsh64x32_0(v)
   532		case OpRsh64x64:
   533			return rewriteValueMIPS64_OpRsh64x64_0(v)
   534		case OpRsh64x8:
   535			return rewriteValueMIPS64_OpRsh64x8_0(v)
   536		case OpRsh8Ux16:
   537			return rewriteValueMIPS64_OpRsh8Ux16_0(v)
   538		case OpRsh8Ux32:
   539			return rewriteValueMIPS64_OpRsh8Ux32_0(v)
   540		case OpRsh8Ux64:
   541			return rewriteValueMIPS64_OpRsh8Ux64_0(v)
   542		case OpRsh8Ux8:
   543			return rewriteValueMIPS64_OpRsh8Ux8_0(v)
   544		case OpRsh8x16:
   545			return rewriteValueMIPS64_OpRsh8x16_0(v)
   546		case OpRsh8x32:
   547			return rewriteValueMIPS64_OpRsh8x32_0(v)
   548		case OpRsh8x64:
   549			return rewriteValueMIPS64_OpRsh8x64_0(v)
   550		case OpRsh8x8:
   551			return rewriteValueMIPS64_OpRsh8x8_0(v)
   552		case OpSelect0:
   553			return rewriteValueMIPS64_OpSelect0_0(v)
   554		case OpSelect1:
   555			return rewriteValueMIPS64_OpSelect1_0(v) || rewriteValueMIPS64_OpSelect1_10(v) || rewriteValueMIPS64_OpSelect1_20(v)
   556		case OpSignExt16to32:
   557			return rewriteValueMIPS64_OpSignExt16to32_0(v)
   558		case OpSignExt16to64:
   559			return rewriteValueMIPS64_OpSignExt16to64_0(v)
   560		case OpSignExt32to64:
   561			return rewriteValueMIPS64_OpSignExt32to64_0(v)
   562		case OpSignExt8to16:
   563			return rewriteValueMIPS64_OpSignExt8to16_0(v)
   564		case OpSignExt8to32:
   565			return rewriteValueMIPS64_OpSignExt8to32_0(v)
   566		case OpSignExt8to64:
   567			return rewriteValueMIPS64_OpSignExt8to64_0(v)
   568		case OpSlicemask:
   569			return rewriteValueMIPS64_OpSlicemask_0(v)
   570		case OpSqrt:
   571			return rewriteValueMIPS64_OpSqrt_0(v)
   572		case OpStaticCall:
   573			return rewriteValueMIPS64_OpStaticCall_0(v)
   574		case OpStore:
   575			return rewriteValueMIPS64_OpStore_0(v)
   576		case OpSub16:
   577			return rewriteValueMIPS64_OpSub16_0(v)
   578		case OpSub32:
   579			return rewriteValueMIPS64_OpSub32_0(v)
   580		case OpSub32F:
   581			return rewriteValueMIPS64_OpSub32F_0(v)
   582		case OpSub64:
   583			return rewriteValueMIPS64_OpSub64_0(v)
   584		case OpSub64F:
   585			return rewriteValueMIPS64_OpSub64F_0(v)
   586		case OpSub8:
   587			return rewriteValueMIPS64_OpSub8_0(v)
   588		case OpSubPtr:
   589			return rewriteValueMIPS64_OpSubPtr_0(v)
   590		case OpTrunc16to8:
   591			return rewriteValueMIPS64_OpTrunc16to8_0(v)
   592		case OpTrunc32to16:
   593			return rewriteValueMIPS64_OpTrunc32to16_0(v)
   594		case OpTrunc32to8:
   595			return rewriteValueMIPS64_OpTrunc32to8_0(v)
   596		case OpTrunc64to16:
   597			return rewriteValueMIPS64_OpTrunc64to16_0(v)
   598		case OpTrunc64to32:
   599			return rewriteValueMIPS64_OpTrunc64to32_0(v)
   600		case OpTrunc64to8:
   601			return rewriteValueMIPS64_OpTrunc64to8_0(v)
   602		case OpWB:
   603			return rewriteValueMIPS64_OpWB_0(v)
   604		case OpXor16:
   605			return rewriteValueMIPS64_OpXor16_0(v)
   606		case OpXor32:
   607			return rewriteValueMIPS64_OpXor32_0(v)
   608		case OpXor64:
   609			return rewriteValueMIPS64_OpXor64_0(v)
   610		case OpXor8:
   611			return rewriteValueMIPS64_OpXor8_0(v)
   612		case OpZero:
   613			return rewriteValueMIPS64_OpZero_0(v) || rewriteValueMIPS64_OpZero_10(v)
   614		case OpZeroExt16to32:
   615			return rewriteValueMIPS64_OpZeroExt16to32_0(v)
   616		case OpZeroExt16to64:
   617			return rewriteValueMIPS64_OpZeroExt16to64_0(v)
   618		case OpZeroExt32to64:
   619			return rewriteValueMIPS64_OpZeroExt32to64_0(v)
   620		case OpZeroExt8to16:
   621			return rewriteValueMIPS64_OpZeroExt8to16_0(v)
   622		case OpZeroExt8to32:
   623			return rewriteValueMIPS64_OpZeroExt8to32_0(v)
   624		case OpZeroExt8to64:
   625			return rewriteValueMIPS64_OpZeroExt8to64_0(v)
   626		}
   627		return false
   628	}
   629	func rewriteValueMIPS64_OpAdd16_0(v *Value) bool {
   630		// match: (Add16 x y)
   631		// cond:
   632		// result: (ADDV x y)
   633		for {
   634			y := v.Args[1]
   635			x := v.Args[0]
   636			v.reset(OpMIPS64ADDV)
   637			v.AddArg(x)
   638			v.AddArg(y)
   639			return true
   640		}
   641	}
   642	func rewriteValueMIPS64_OpAdd32_0(v *Value) bool {
   643		// match: (Add32 x y)
   644		// cond:
   645		// result: (ADDV x y)
   646		for {
   647			y := v.Args[1]
   648			x := v.Args[0]
   649			v.reset(OpMIPS64ADDV)
   650			v.AddArg(x)
   651			v.AddArg(y)
   652			return true
   653		}
   654	}
   655	func rewriteValueMIPS64_OpAdd32F_0(v *Value) bool {
   656		// match: (Add32F x y)
   657		// cond:
   658		// result: (ADDF x y)
   659		for {
   660			y := v.Args[1]
   661			x := v.Args[0]
   662			v.reset(OpMIPS64ADDF)
   663			v.AddArg(x)
   664			v.AddArg(y)
   665			return true
   666		}
   667	}
   668	func rewriteValueMIPS64_OpAdd64_0(v *Value) bool {
   669		// match: (Add64 x y)
   670		// cond:
   671		// result: (ADDV x y)
   672		for {
   673			y := v.Args[1]
   674			x := v.Args[0]
   675			v.reset(OpMIPS64ADDV)
   676			v.AddArg(x)
   677			v.AddArg(y)
   678			return true
   679		}
   680	}
   681	func rewriteValueMIPS64_OpAdd64F_0(v *Value) bool {
   682		// match: (Add64F x y)
   683		// cond:
   684		// result: (ADDD x y)
   685		for {
   686			y := v.Args[1]
   687			x := v.Args[0]
   688			v.reset(OpMIPS64ADDD)
   689			v.AddArg(x)
   690			v.AddArg(y)
   691			return true
   692		}
   693	}
   694	func rewriteValueMIPS64_OpAdd8_0(v *Value) bool {
   695		// match: (Add8 x y)
   696		// cond:
   697		// result: (ADDV x y)
   698		for {
   699			y := v.Args[1]
   700			x := v.Args[0]
   701			v.reset(OpMIPS64ADDV)
   702			v.AddArg(x)
   703			v.AddArg(y)
   704			return true
   705		}
   706	}
   707	func rewriteValueMIPS64_OpAddPtr_0(v *Value) bool {
   708		// match: (AddPtr x y)
   709		// cond:
   710		// result: (ADDV x y)
   711		for {
   712			y := v.Args[1]
   713			x := v.Args[0]
   714			v.reset(OpMIPS64ADDV)
   715			v.AddArg(x)
   716			v.AddArg(y)
   717			return true
   718		}
   719	}
   720	func rewriteValueMIPS64_OpAddr_0(v *Value) bool {
   721		// match: (Addr {sym} base)
   722		// cond:
   723		// result: (MOVVaddr {sym} base)
   724		for {
   725			sym := v.Aux
   726			base := v.Args[0]
   727			v.reset(OpMIPS64MOVVaddr)
   728			v.Aux = sym
   729			v.AddArg(base)
   730			return true
   731		}
   732	}
   733	func rewriteValueMIPS64_OpAnd16_0(v *Value) bool {
   734		// match: (And16 x y)
   735		// cond:
   736		// result: (AND x y)
   737		for {
   738			y := v.Args[1]
   739			x := v.Args[0]
   740			v.reset(OpMIPS64AND)
   741			v.AddArg(x)
   742			v.AddArg(y)
   743			return true
   744		}
   745	}
   746	func rewriteValueMIPS64_OpAnd32_0(v *Value) bool {
   747		// match: (And32 x y)
   748		// cond:
   749		// result: (AND x y)
   750		for {
   751			y := v.Args[1]
   752			x := v.Args[0]
   753			v.reset(OpMIPS64AND)
   754			v.AddArg(x)
   755			v.AddArg(y)
   756			return true
   757		}
   758	}
   759	func rewriteValueMIPS64_OpAnd64_0(v *Value) bool {
   760		// match: (And64 x y)
   761		// cond:
   762		// result: (AND x y)
   763		for {
   764			y := v.Args[1]
   765			x := v.Args[0]
   766			v.reset(OpMIPS64AND)
   767			v.AddArg(x)
   768			v.AddArg(y)
   769			return true
   770		}
   771	}
   772	func rewriteValueMIPS64_OpAnd8_0(v *Value) bool {
   773		// match: (And8 x y)
   774		// cond:
   775		// result: (AND x y)
   776		for {
   777			y := v.Args[1]
   778			x := v.Args[0]
   779			v.reset(OpMIPS64AND)
   780			v.AddArg(x)
   781			v.AddArg(y)
   782			return true
   783		}
   784	}
   785	func rewriteValueMIPS64_OpAndB_0(v *Value) bool {
   786		// match: (AndB x y)
   787		// cond:
   788		// result: (AND x y)
   789		for {
   790			y := v.Args[1]
   791			x := v.Args[0]
   792			v.reset(OpMIPS64AND)
   793			v.AddArg(x)
   794			v.AddArg(y)
   795			return true
   796		}
   797	}
   798	func rewriteValueMIPS64_OpAtomicAdd32_0(v *Value) bool {
   799		// match: (AtomicAdd32 ptr val mem)
   800		// cond:
   801		// result: (LoweredAtomicAdd32 ptr val mem)
   802		for {
   803			mem := v.Args[2]
   804			ptr := v.Args[0]
   805			val := v.Args[1]
   806			v.reset(OpMIPS64LoweredAtomicAdd32)
   807			v.AddArg(ptr)
   808			v.AddArg(val)
   809			v.AddArg(mem)
   810			return true
   811		}
   812	}
   813	func rewriteValueMIPS64_OpAtomicAdd64_0(v *Value) bool {
   814		// match: (AtomicAdd64 ptr val mem)
   815		// cond:
   816		// result: (LoweredAtomicAdd64 ptr val mem)
   817		for {
   818			mem := v.Args[2]
   819			ptr := v.Args[0]
   820			val := v.Args[1]
   821			v.reset(OpMIPS64LoweredAtomicAdd64)
   822			v.AddArg(ptr)
   823			v.AddArg(val)
   824			v.AddArg(mem)
   825			return true
   826		}
   827	}
   828	func rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v *Value) bool {
   829		// match: (AtomicCompareAndSwap32 ptr old new_ mem)
   830		// cond:
   831		// result: (LoweredAtomicCas32 ptr old new_ mem)
   832		for {
   833			mem := v.Args[3]
   834			ptr := v.Args[0]
   835			old := v.Args[1]
   836			new_ := v.Args[2]
   837			v.reset(OpMIPS64LoweredAtomicCas32)
   838			v.AddArg(ptr)
   839			v.AddArg(old)
   840			v.AddArg(new_)
   841			v.AddArg(mem)
   842			return true
   843		}
   844	}
   845	func rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v *Value) bool {
   846		// match: (AtomicCompareAndSwap64 ptr old new_ mem)
   847		// cond:
   848		// result: (LoweredAtomicCas64 ptr old new_ mem)
   849		for {
   850			mem := v.Args[3]
   851			ptr := v.Args[0]
   852			old := v.Args[1]
   853			new_ := v.Args[2]
   854			v.reset(OpMIPS64LoweredAtomicCas64)
   855			v.AddArg(ptr)
   856			v.AddArg(old)
   857			v.AddArg(new_)
   858			v.AddArg(mem)
   859			return true
   860		}
   861	}
   862	func rewriteValueMIPS64_OpAtomicExchange32_0(v *Value) bool {
   863		// match: (AtomicExchange32 ptr val mem)
   864		// cond:
   865		// result: (LoweredAtomicExchange32 ptr val mem)
   866		for {
   867			mem := v.Args[2]
   868			ptr := v.Args[0]
   869			val := v.Args[1]
   870			v.reset(OpMIPS64LoweredAtomicExchange32)
   871			v.AddArg(ptr)
   872			v.AddArg(val)
   873			v.AddArg(mem)
   874			return true
   875		}
   876	}
   877	func rewriteValueMIPS64_OpAtomicExchange64_0(v *Value) bool {
   878		// match: (AtomicExchange64 ptr val mem)
   879		// cond:
   880		// result: (LoweredAtomicExchange64 ptr val mem)
   881		for {
   882			mem := v.Args[2]
   883			ptr := v.Args[0]
   884			val := v.Args[1]
   885			v.reset(OpMIPS64LoweredAtomicExchange64)
   886			v.AddArg(ptr)
   887			v.AddArg(val)
   888			v.AddArg(mem)
   889			return true
   890		}
   891	}
   892	func rewriteValueMIPS64_OpAtomicLoad32_0(v *Value) bool {
   893		// match: (AtomicLoad32 ptr mem)
   894		// cond:
   895		// result: (LoweredAtomicLoad32 ptr mem)
   896		for {
   897			mem := v.Args[1]
   898			ptr := v.Args[0]
   899			v.reset(OpMIPS64LoweredAtomicLoad32)
   900			v.AddArg(ptr)
   901			v.AddArg(mem)
   902			return true
   903		}
   904	}
   905	func rewriteValueMIPS64_OpAtomicLoad64_0(v *Value) bool {
   906		// match: (AtomicLoad64 ptr mem)
   907		// cond:
   908		// result: (LoweredAtomicLoad64 ptr mem)
   909		for {
   910			mem := v.Args[1]
   911			ptr := v.Args[0]
   912			v.reset(OpMIPS64LoweredAtomicLoad64)
   913			v.AddArg(ptr)
   914			v.AddArg(mem)
   915			return true
   916		}
   917	}
   918	func rewriteValueMIPS64_OpAtomicLoad8_0(v *Value) bool {
   919		// match: (AtomicLoad8 ptr mem)
   920		// cond:
   921		// result: (LoweredAtomicLoad8 ptr mem)
   922		for {
   923			mem := v.Args[1]
   924			ptr := v.Args[0]
   925			v.reset(OpMIPS64LoweredAtomicLoad8)
   926			v.AddArg(ptr)
   927			v.AddArg(mem)
   928			return true
   929		}
   930	}
   931	func rewriteValueMIPS64_OpAtomicLoadPtr_0(v *Value) bool {
   932		// match: (AtomicLoadPtr ptr mem)
   933		// cond:
   934		// result: (LoweredAtomicLoad64 ptr mem)
   935		for {
   936			mem := v.Args[1]
   937			ptr := v.Args[0]
   938			v.reset(OpMIPS64LoweredAtomicLoad64)
   939			v.AddArg(ptr)
   940			v.AddArg(mem)
   941			return true
   942		}
   943	}
   944	func rewriteValueMIPS64_OpAtomicStore32_0(v *Value) bool {
   945		// match: (AtomicStore32 ptr val mem)
   946		// cond:
   947		// result: (LoweredAtomicStore32 ptr val mem)
   948		for {
   949			mem := v.Args[2]
   950			ptr := v.Args[0]
   951			val := v.Args[1]
   952			v.reset(OpMIPS64LoweredAtomicStore32)
   953			v.AddArg(ptr)
   954			v.AddArg(val)
   955			v.AddArg(mem)
   956			return true
   957		}
   958	}
   959	func rewriteValueMIPS64_OpAtomicStore64_0(v *Value) bool {
   960		// match: (AtomicStore64 ptr val mem)
   961		// cond:
   962		// result: (LoweredAtomicStore64 ptr val mem)
   963		for {
   964			mem := v.Args[2]
   965			ptr := v.Args[0]
   966			val := v.Args[1]
   967			v.reset(OpMIPS64LoweredAtomicStore64)
   968			v.AddArg(ptr)
   969			v.AddArg(val)
   970			v.AddArg(mem)
   971			return true
   972		}
   973	}
   974	func rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v *Value) bool {
   975		// match: (AtomicStorePtrNoWB ptr val mem)
   976		// cond:
   977		// result: (LoweredAtomicStore64 ptr val mem)
   978		for {
   979			mem := v.Args[2]
   980			ptr := v.Args[0]
   981			val := v.Args[1]
   982			v.reset(OpMIPS64LoweredAtomicStore64)
   983			v.AddArg(ptr)
   984			v.AddArg(val)
   985			v.AddArg(mem)
   986			return true
   987		}
   988	}
   989	func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool {
   990		b := v.Block
   991		// match: (Avg64u <t> x y)
   992		// cond:
   993		// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
   994		for {
   995			t := v.Type
   996			y := v.Args[1]
   997			x := v.Args[0]
   998			v.reset(OpMIPS64ADDV)
   999			v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
  1000			v0.AuxInt = 1
  1001			v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
  1002			v1.AddArg(x)
  1003			v1.AddArg(y)
  1004			v0.AddArg(v1)
  1005			v.AddArg(v0)
  1006			v.AddArg(y)
  1007			return true
  1008		}
  1009	}
  1010	func rewriteValueMIPS64_OpClosureCall_0(v *Value) bool {
  1011		// match: (ClosureCall [argwid] entry closure mem)
  1012		// cond:
  1013		// result: (CALLclosure [argwid] entry closure mem)
  1014		for {
  1015			argwid := v.AuxInt
  1016			mem := v.Args[2]
  1017			entry := v.Args[0]
  1018			closure := v.Args[1]
  1019			v.reset(OpMIPS64CALLclosure)
  1020			v.AuxInt = argwid
  1021			v.AddArg(entry)
  1022			v.AddArg(closure)
  1023			v.AddArg(mem)
  1024			return true
  1025		}
  1026	}
  1027	func rewriteValueMIPS64_OpCom16_0(v *Value) bool {
  1028		b := v.Block
  1029		typ := &b.Func.Config.Types
  1030		// match: (Com16 x)
  1031		// cond:
  1032		// result: (NOR (MOVVconst [0]) x)
  1033		for {
  1034			x := v.Args[0]
  1035			v.reset(OpMIPS64NOR)
  1036			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1037			v0.AuxInt = 0
  1038			v.AddArg(v0)
  1039			v.AddArg(x)
  1040			return true
  1041		}
  1042	}
  1043	func rewriteValueMIPS64_OpCom32_0(v *Value) bool {
  1044		b := v.Block
  1045		typ := &b.Func.Config.Types
  1046		// match: (Com32 x)
  1047		// cond:
  1048		// result: (NOR (MOVVconst [0]) x)
  1049		for {
  1050			x := v.Args[0]
  1051			v.reset(OpMIPS64NOR)
  1052			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1053			v0.AuxInt = 0
  1054			v.AddArg(v0)
  1055			v.AddArg(x)
  1056			return true
  1057		}
  1058	}
  1059	func rewriteValueMIPS64_OpCom64_0(v *Value) bool {
  1060		b := v.Block
  1061		typ := &b.Func.Config.Types
  1062		// match: (Com64 x)
  1063		// cond:
  1064		// result: (NOR (MOVVconst [0]) x)
  1065		for {
  1066			x := v.Args[0]
  1067			v.reset(OpMIPS64NOR)
  1068			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1069			v0.AuxInt = 0
  1070			v.AddArg(v0)
  1071			v.AddArg(x)
  1072			return true
  1073		}
  1074	}
  1075	func rewriteValueMIPS64_OpCom8_0(v *Value) bool {
  1076		b := v.Block
  1077		typ := &b.Func.Config.Types
  1078		// match: (Com8 x)
  1079		// cond:
  1080		// result: (NOR (MOVVconst [0]) x)
  1081		for {
  1082			x := v.Args[0]
  1083			v.reset(OpMIPS64NOR)
  1084			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1085			v0.AuxInt = 0
  1086			v.AddArg(v0)
  1087			v.AddArg(x)
  1088			return true
  1089		}
  1090	}
  1091	func rewriteValueMIPS64_OpConst16_0(v *Value) bool {
  1092		// match: (Const16 [val])
  1093		// cond:
  1094		// result: (MOVVconst [val])
  1095		for {
  1096			val := v.AuxInt
  1097			v.reset(OpMIPS64MOVVconst)
  1098			v.AuxInt = val
  1099			return true
  1100		}
  1101	}
  1102	func rewriteValueMIPS64_OpConst32_0(v *Value) bool {
  1103		// match: (Const32 [val])
  1104		// cond:
  1105		// result: (MOVVconst [val])
  1106		for {
  1107			val := v.AuxInt
  1108			v.reset(OpMIPS64MOVVconst)
  1109			v.AuxInt = val
  1110			return true
  1111		}
  1112	}
  1113	func rewriteValueMIPS64_OpConst32F_0(v *Value) bool {
  1114		// match: (Const32F [val])
  1115		// cond:
  1116		// result: (MOVFconst [val])
  1117		for {
  1118			val := v.AuxInt
  1119			v.reset(OpMIPS64MOVFconst)
  1120			v.AuxInt = val
  1121			return true
  1122		}
  1123	}
  1124	func rewriteValueMIPS64_OpConst64_0(v *Value) bool {
  1125		// match: (Const64 [val])
  1126		// cond:
  1127		// result: (MOVVconst [val])
  1128		for {
  1129			val := v.AuxInt
  1130			v.reset(OpMIPS64MOVVconst)
  1131			v.AuxInt = val
  1132			return true
  1133		}
  1134	}
  1135	func rewriteValueMIPS64_OpConst64F_0(v *Value) bool {
  1136		// match: (Const64F [val])
  1137		// cond:
  1138		// result: (MOVDconst [val])
  1139		for {
  1140			val := v.AuxInt
  1141			v.reset(OpMIPS64MOVDconst)
  1142			v.AuxInt = val
  1143			return true
  1144		}
  1145	}
  1146	func rewriteValueMIPS64_OpConst8_0(v *Value) bool {
  1147		// match: (Const8 [val])
  1148		// cond:
  1149		// result: (MOVVconst [val])
  1150		for {
  1151			val := v.AuxInt
  1152			v.reset(OpMIPS64MOVVconst)
  1153			v.AuxInt = val
  1154			return true
  1155		}
  1156	}
  1157	func rewriteValueMIPS64_OpConstBool_0(v *Value) bool {
  1158		// match: (ConstBool [b])
  1159		// cond:
  1160		// result: (MOVVconst [b])
  1161		for {
  1162			b := v.AuxInt
  1163			v.reset(OpMIPS64MOVVconst)
  1164			v.AuxInt = b
  1165			return true
  1166		}
  1167	}
  1168	func rewriteValueMIPS64_OpConstNil_0(v *Value) bool {
  1169		// match: (ConstNil)
  1170		// cond:
  1171		// result: (MOVVconst [0])
  1172		for {
  1173			v.reset(OpMIPS64MOVVconst)
  1174			v.AuxInt = 0
  1175			return true
  1176		}
  1177	}
  1178	func rewriteValueMIPS64_OpCvt32Fto32_0(v *Value) bool {
  1179		// match: (Cvt32Fto32 x)
  1180		// cond:
  1181		// result: (TRUNCFW x)
  1182		for {
  1183			x := v.Args[0]
  1184			v.reset(OpMIPS64TRUNCFW)
  1185			v.AddArg(x)
  1186			return true
  1187		}
  1188	}
  1189	func rewriteValueMIPS64_OpCvt32Fto64_0(v *Value) bool {
  1190		// match: (Cvt32Fto64 x)
  1191		// cond:
  1192		// result: (TRUNCFV x)
  1193		for {
  1194			x := v.Args[0]
  1195			v.reset(OpMIPS64TRUNCFV)
  1196			v.AddArg(x)
  1197			return true
  1198		}
  1199	}
  1200	func rewriteValueMIPS64_OpCvt32Fto64F_0(v *Value) bool {
  1201		// match: (Cvt32Fto64F x)
  1202		// cond:
  1203		// result: (MOVFD x)
  1204		for {
  1205			x := v.Args[0]
  1206			v.reset(OpMIPS64MOVFD)
  1207			v.AddArg(x)
  1208			return true
  1209		}
  1210	}
  1211	func rewriteValueMIPS64_OpCvt32to32F_0(v *Value) bool {
  1212		// match: (Cvt32to32F x)
  1213		// cond:
  1214		// result: (MOVWF x)
  1215		for {
  1216			x := v.Args[0]
  1217			v.reset(OpMIPS64MOVWF)
  1218			v.AddArg(x)
  1219			return true
  1220		}
  1221	}
  1222	func rewriteValueMIPS64_OpCvt32to64F_0(v *Value) bool {
  1223		// match: (Cvt32to64F x)
  1224		// cond:
  1225		// result: (MOVWD x)
  1226		for {
  1227			x := v.Args[0]
  1228			v.reset(OpMIPS64MOVWD)
  1229			v.AddArg(x)
  1230			return true
  1231		}
  1232	}
  1233	func rewriteValueMIPS64_OpCvt64Fto32_0(v *Value) bool {
  1234		// match: (Cvt64Fto32 x)
  1235		// cond:
  1236		// result: (TRUNCDW x)
  1237		for {
  1238			x := v.Args[0]
  1239			v.reset(OpMIPS64TRUNCDW)
  1240			v.AddArg(x)
  1241			return true
  1242		}
  1243	}
  1244	func rewriteValueMIPS64_OpCvt64Fto32F_0(v *Value) bool {
  1245		// match: (Cvt64Fto32F x)
  1246		// cond:
  1247		// result: (MOVDF x)
  1248		for {
  1249			x := v.Args[0]
  1250			v.reset(OpMIPS64MOVDF)
  1251			v.AddArg(x)
  1252			return true
  1253		}
  1254	}
  1255	func rewriteValueMIPS64_OpCvt64Fto64_0(v *Value) bool {
  1256		// match: (Cvt64Fto64 x)
  1257		// cond:
  1258		// result: (TRUNCDV x)
  1259		for {
  1260			x := v.Args[0]
  1261			v.reset(OpMIPS64TRUNCDV)
  1262			v.AddArg(x)
  1263			return true
  1264		}
  1265	}
  1266	func rewriteValueMIPS64_OpCvt64to32F_0(v *Value) bool {
  1267		// match: (Cvt64to32F x)
  1268		// cond:
  1269		// result: (MOVVF x)
  1270		for {
  1271			x := v.Args[0]
  1272			v.reset(OpMIPS64MOVVF)
  1273			v.AddArg(x)
  1274			return true
  1275		}
  1276	}
  1277	func rewriteValueMIPS64_OpCvt64to64F_0(v *Value) bool {
  1278		// match: (Cvt64to64F x)
  1279		// cond:
  1280		// result: (MOVVD x)
  1281		for {
  1282			x := v.Args[0]
  1283			v.reset(OpMIPS64MOVVD)
  1284			v.AddArg(x)
  1285			return true
  1286		}
  1287	}
  1288	func rewriteValueMIPS64_OpDiv16_0(v *Value) bool {
  1289		b := v.Block
  1290		typ := &b.Func.Config.Types
  1291		// match: (Div16 x y)
  1292		// cond:
  1293		// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  1294		for {
  1295			y := v.Args[1]
  1296			x := v.Args[0]
  1297			v.reset(OpSelect1)
  1298			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1299			v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1300			v1.AddArg(x)
  1301			v0.AddArg(v1)
  1302			v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1303			v2.AddArg(y)
  1304			v0.AddArg(v2)
  1305			v.AddArg(v0)
  1306			return true
  1307		}
  1308	}
  1309	func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool {
  1310		b := v.Block
  1311		typ := &b.Func.Config.Types
  1312		// match: (Div16u x y)
  1313		// cond:
  1314		// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1315		for {
  1316			y := v.Args[1]
  1317			x := v.Args[0]
  1318			v.reset(OpSelect1)
  1319			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1320			v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1321			v1.AddArg(x)
  1322			v0.AddArg(v1)
  1323			v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1324			v2.AddArg(y)
  1325			v0.AddArg(v2)
  1326			v.AddArg(v0)
  1327			return true
  1328		}
  1329	}
  1330	func rewriteValueMIPS64_OpDiv32_0(v *Value) bool {
  1331		b := v.Block
  1332		typ := &b.Func.Config.Types
  1333		// match: (Div32 x y)
  1334		// cond:
  1335		// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  1336		for {
  1337			y := v.Args[1]
  1338			x := v.Args[0]
  1339			v.reset(OpSelect1)
  1340			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1341			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1342			v1.AddArg(x)
  1343			v0.AddArg(v1)
  1344			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1345			v2.AddArg(y)
  1346			v0.AddArg(v2)
  1347			v.AddArg(v0)
  1348			return true
  1349		}
  1350	}
  1351	func rewriteValueMIPS64_OpDiv32F_0(v *Value) bool {
  1352		// match: (Div32F x y)
  1353		// cond:
  1354		// result: (DIVF x y)
  1355		for {
  1356			y := v.Args[1]
  1357			x := v.Args[0]
  1358			v.reset(OpMIPS64DIVF)
  1359			v.AddArg(x)
  1360			v.AddArg(y)
  1361			return true
  1362		}
  1363	}
  1364	func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool {
  1365		b := v.Block
  1366		typ := &b.Func.Config.Types
  1367		// match: (Div32u x y)
  1368		// cond:
  1369		// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1370		for {
  1371			y := v.Args[1]
  1372			x := v.Args[0]
  1373			v.reset(OpSelect1)
  1374			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1375			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1376			v1.AddArg(x)
  1377			v0.AddArg(v1)
  1378			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1379			v2.AddArg(y)
  1380			v0.AddArg(v2)
  1381			v.AddArg(v0)
  1382			return true
  1383		}
  1384	}
  1385	func rewriteValueMIPS64_OpDiv64_0(v *Value) bool {
  1386		b := v.Block
  1387		typ := &b.Func.Config.Types
  1388		// match: (Div64 x y)
  1389		// cond:
  1390		// result: (Select1 (DIVV x y))
  1391		for {
  1392			y := v.Args[1]
  1393			x := v.Args[0]
  1394			v.reset(OpSelect1)
  1395			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1396			v0.AddArg(x)
  1397			v0.AddArg(y)
  1398			v.AddArg(v0)
  1399			return true
  1400		}
  1401	}
  1402	func rewriteValueMIPS64_OpDiv64F_0(v *Value) bool {
  1403		// match: (Div64F x y)
  1404		// cond:
  1405		// result: (DIVD x y)
  1406		for {
  1407			y := v.Args[1]
  1408			x := v.Args[0]
  1409			v.reset(OpMIPS64DIVD)
  1410			v.AddArg(x)
  1411			v.AddArg(y)
  1412			return true
  1413		}
  1414	}
  1415	func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool {
  1416		b := v.Block
  1417		typ := &b.Func.Config.Types
  1418		// match: (Div64u x y)
  1419		// cond:
  1420		// result: (Select1 (DIVVU x y))
  1421		for {
  1422			y := v.Args[1]
  1423			x := v.Args[0]
  1424			v.reset(OpSelect1)
  1425			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1426			v0.AddArg(x)
  1427			v0.AddArg(y)
  1428			v.AddArg(v0)
  1429			return true
  1430		}
  1431	}
  1432	func rewriteValueMIPS64_OpDiv8_0(v *Value) bool {
  1433		b := v.Block
  1434		typ := &b.Func.Config.Types
  1435		// match: (Div8 x y)
  1436		// cond:
  1437		// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  1438		for {
  1439			y := v.Args[1]
  1440			x := v.Args[0]
  1441			v.reset(OpSelect1)
  1442			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  1443			v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1444			v1.AddArg(x)
  1445			v0.AddArg(v1)
  1446			v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1447			v2.AddArg(y)
  1448			v0.AddArg(v2)
  1449			v.AddArg(v0)
  1450			return true
  1451		}
  1452	}
  1453	func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool {
  1454		b := v.Block
  1455		typ := &b.Func.Config.Types
  1456		// match: (Div8u x y)
  1457		// cond:
  1458		// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1459		for {
  1460			y := v.Args[1]
  1461			x := v.Args[0]
  1462			v.reset(OpSelect1)
  1463			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  1464			v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1465			v1.AddArg(x)
  1466			v0.AddArg(v1)
  1467			v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1468			v2.AddArg(y)
  1469			v0.AddArg(v2)
  1470			v.AddArg(v0)
  1471			return true
  1472		}
  1473	}
  1474	func rewriteValueMIPS64_OpEq16_0(v *Value) bool {
  1475		b := v.Block
  1476		typ := &b.Func.Config.Types
  1477		// match: (Eq16 x y)
  1478		// cond:
  1479		// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1480		for {
  1481			y := v.Args[1]
  1482			x := v.Args[0]
  1483			v.reset(OpMIPS64SGTU)
  1484			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1485			v0.AuxInt = 1
  1486			v.AddArg(v0)
  1487			v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1488			v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1489			v2.AddArg(x)
  1490			v1.AddArg(v2)
  1491			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1492			v3.AddArg(y)
  1493			v1.AddArg(v3)
  1494			v.AddArg(v1)
  1495			return true
  1496		}
  1497	}
  1498	func rewriteValueMIPS64_OpEq32_0(v *Value) bool {
  1499		b := v.Block
  1500		typ := &b.Func.Config.Types
  1501		// match: (Eq32 x y)
  1502		// cond:
  1503		// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1504		for {
  1505			y := v.Args[1]
  1506			x := v.Args[0]
  1507			v.reset(OpMIPS64SGTU)
  1508			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1509			v0.AuxInt = 1
  1510			v.AddArg(v0)
  1511			v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1512			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1513			v2.AddArg(x)
  1514			v1.AddArg(v2)
  1515			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1516			v3.AddArg(y)
  1517			v1.AddArg(v3)
  1518			v.AddArg(v1)
  1519			return true
  1520		}
  1521	}
  1522	func rewriteValueMIPS64_OpEq32F_0(v *Value) bool {
  1523		b := v.Block
  1524		// match: (Eq32F x y)
  1525		// cond:
  1526		// result: (FPFlagTrue (CMPEQF x y))
  1527		for {
  1528			y := v.Args[1]
  1529			x := v.Args[0]
  1530			v.reset(OpMIPS64FPFlagTrue)
  1531			v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  1532			v0.AddArg(x)
  1533			v0.AddArg(y)
  1534			v.AddArg(v0)
  1535			return true
  1536		}
  1537	}
  1538	func rewriteValueMIPS64_OpEq64_0(v *Value) bool {
  1539		b := v.Block
  1540		typ := &b.Func.Config.Types
  1541		// match: (Eq64 x y)
  1542		// cond:
  1543		// result: (SGTU (MOVVconst [1]) (XOR x y))
  1544		for {
  1545			y := v.Args[1]
  1546			x := v.Args[0]
  1547			v.reset(OpMIPS64SGTU)
  1548			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1549			v0.AuxInt = 1
  1550			v.AddArg(v0)
  1551			v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1552			v1.AddArg(x)
  1553			v1.AddArg(y)
  1554			v.AddArg(v1)
  1555			return true
  1556		}
  1557	}
  1558	func rewriteValueMIPS64_OpEq64F_0(v *Value) bool {
  1559		b := v.Block
  1560		// match: (Eq64F x y)
  1561		// cond:
  1562		// result: (FPFlagTrue (CMPEQD x y))
  1563		for {
  1564			y := v.Args[1]
  1565			x := v.Args[0]
  1566			v.reset(OpMIPS64FPFlagTrue)
  1567			v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  1568			v0.AddArg(x)
  1569			v0.AddArg(y)
  1570			v.AddArg(v0)
  1571			return true
  1572		}
  1573	}
  1574	func rewriteValueMIPS64_OpEq8_0(v *Value) bool {
  1575		b := v.Block
  1576		typ := &b.Func.Config.Types
  1577		// match: (Eq8 x y)
  1578		// cond:
  1579		// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1580		for {
  1581			y := v.Args[1]
  1582			x := v.Args[0]
  1583			v.reset(OpMIPS64SGTU)
  1584			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1585			v0.AuxInt = 1
  1586			v.AddArg(v0)
  1587			v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1588			v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1589			v2.AddArg(x)
  1590			v1.AddArg(v2)
  1591			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1592			v3.AddArg(y)
  1593			v1.AddArg(v3)
  1594			v.AddArg(v1)
  1595			return true
  1596		}
  1597	}
  1598	func rewriteValueMIPS64_OpEqB_0(v *Value) bool {
  1599		b := v.Block
  1600		typ := &b.Func.Config.Types
  1601		// match: (EqB x y)
  1602		// cond:
  1603		// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
  1604		for {
  1605			y := v.Args[1]
  1606			x := v.Args[0]
  1607			v.reset(OpMIPS64XOR)
  1608			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1609			v0.AuxInt = 1
  1610			v.AddArg(v0)
  1611			v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
  1612			v1.AddArg(x)
  1613			v1.AddArg(y)
  1614			v.AddArg(v1)
  1615			return true
  1616		}
  1617	}
  1618	func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool {
  1619		b := v.Block
  1620		typ := &b.Func.Config.Types
  1621		// match: (EqPtr x y)
  1622		// cond:
  1623		// result: (SGTU (MOVVconst [1]) (XOR x y))
  1624		for {
  1625			y := v.Args[1]
  1626			x := v.Args[0]
  1627			v.reset(OpMIPS64SGTU)
  1628			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1629			v0.AuxInt = 1
  1630			v.AddArg(v0)
  1631			v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  1632			v1.AddArg(x)
  1633			v1.AddArg(y)
  1634			v.AddArg(v1)
  1635			return true
  1636		}
  1637	}
  1638	func rewriteValueMIPS64_OpGeq16_0(v *Value) bool {
  1639		b := v.Block
  1640		typ := &b.Func.Config.Types
  1641		// match: (Geq16 x y)
  1642		// cond:
  1643		// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
  1644		for {
  1645			y := v.Args[1]
  1646			x := v.Args[0]
  1647			v.reset(OpMIPS64XOR)
  1648			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1649			v0.AuxInt = 1
  1650			v.AddArg(v0)
  1651			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1652			v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1653			v2.AddArg(y)
  1654			v1.AddArg(v2)
  1655			v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1656			v3.AddArg(x)
  1657			v1.AddArg(v3)
  1658			v.AddArg(v1)
  1659			return true
  1660		}
  1661	}
  1662	func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool {
  1663		b := v.Block
  1664		typ := &b.Func.Config.Types
  1665		// match: (Geq16U x y)
  1666		// cond:
  1667		// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))
  1668		for {
  1669			y := v.Args[1]
  1670			x := v.Args[0]
  1671			v.reset(OpMIPS64XOR)
  1672			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1673			v0.AuxInt = 1
  1674			v.AddArg(v0)
  1675			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1676			v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1677			v2.AddArg(y)
  1678			v1.AddArg(v2)
  1679			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1680			v3.AddArg(x)
  1681			v1.AddArg(v3)
  1682			v.AddArg(v1)
  1683			return true
  1684		}
  1685	}
  1686	func rewriteValueMIPS64_OpGeq32_0(v *Value) bool {
  1687		b := v.Block
  1688		typ := &b.Func.Config.Types
  1689		// match: (Geq32 x y)
  1690		// cond:
  1691		// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
  1692		for {
  1693			y := v.Args[1]
  1694			x := v.Args[0]
  1695			v.reset(OpMIPS64XOR)
  1696			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1697			v0.AuxInt = 1
  1698			v.AddArg(v0)
  1699			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1700			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1701			v2.AddArg(y)
  1702			v1.AddArg(v2)
  1703			v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1704			v3.AddArg(x)
  1705			v1.AddArg(v3)
  1706			v.AddArg(v1)
  1707			return true
  1708		}
  1709	}
  1710	func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool {
  1711		b := v.Block
  1712		// match: (Geq32F x y)
  1713		// cond:
  1714		// result: (FPFlagTrue (CMPGEF x y))
  1715		for {
  1716			y := v.Args[1]
  1717			x := v.Args[0]
  1718			v.reset(OpMIPS64FPFlagTrue)
  1719			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  1720			v0.AddArg(x)
  1721			v0.AddArg(y)
  1722			v.AddArg(v0)
  1723			return true
  1724		}
  1725	}
  1726	func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool {
  1727		b := v.Block
  1728		typ := &b.Func.Config.Types
  1729		// match: (Geq32U x y)
  1730		// cond:
  1731		// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)))
  1732		for {
  1733			y := v.Args[1]
  1734			x := v.Args[0]
  1735			v.reset(OpMIPS64XOR)
  1736			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1737			v0.AuxInt = 1
  1738			v.AddArg(v0)
  1739			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1740			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1741			v2.AddArg(y)
  1742			v1.AddArg(v2)
  1743			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1744			v3.AddArg(x)
  1745			v1.AddArg(v3)
  1746			v.AddArg(v1)
  1747			return true
  1748		}
  1749	}
  1750	func rewriteValueMIPS64_OpGeq64_0(v *Value) bool {
  1751		b := v.Block
  1752		typ := &b.Func.Config.Types
  1753		// match: (Geq64 x y)
  1754		// cond:
  1755		// result: (XOR (MOVVconst [1]) (SGT y x))
  1756		for {
  1757			y := v.Args[1]
  1758			x := v.Args[0]
  1759			v.reset(OpMIPS64XOR)
  1760			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1761			v0.AuxInt = 1
  1762			v.AddArg(v0)
  1763			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1764			v1.AddArg(y)
  1765			v1.AddArg(x)
  1766			v.AddArg(v1)
  1767			return true
  1768		}
  1769	}
  1770	func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool {
  1771		b := v.Block
  1772		// match: (Geq64F x y)
  1773		// cond:
  1774		// result: (FPFlagTrue (CMPGED x y))
  1775		for {
  1776			y := v.Args[1]
  1777			x := v.Args[0]
  1778			v.reset(OpMIPS64FPFlagTrue)
  1779			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  1780			v0.AddArg(x)
  1781			v0.AddArg(y)
  1782			v.AddArg(v0)
  1783			return true
  1784		}
  1785	}
  1786	func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool {
  1787		b := v.Block
  1788		typ := &b.Func.Config.Types
  1789		// match: (Geq64U x y)
  1790		// cond:
  1791		// result: (XOR (MOVVconst [1]) (SGTU y x))
  1792		for {
  1793			y := v.Args[1]
  1794			x := v.Args[0]
  1795			v.reset(OpMIPS64XOR)
  1796			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1797			v0.AuxInt = 1
  1798			v.AddArg(v0)
  1799			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1800			v1.AddArg(y)
  1801			v1.AddArg(x)
  1802			v.AddArg(v1)
  1803			return true
  1804		}
  1805	}
  1806	func rewriteValueMIPS64_OpGeq8_0(v *Value) bool {
  1807		b := v.Block
  1808		typ := &b.Func.Config.Types
  1809		// match: (Geq8 x y)
  1810		// cond:
  1811		// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x)))
  1812		for {
  1813			y := v.Args[1]
  1814			x := v.Args[0]
  1815			v.reset(OpMIPS64XOR)
  1816			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1817			v0.AuxInt = 1
  1818			v.AddArg(v0)
  1819			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  1820			v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1821			v2.AddArg(y)
  1822			v1.AddArg(v2)
  1823			v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1824			v3.AddArg(x)
  1825			v1.AddArg(v3)
  1826			v.AddArg(v1)
  1827			return true
  1828		}
  1829	}
  1830	func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool {
  1831		b := v.Block
  1832		typ := &b.Func.Config.Types
  1833		// match: (Geq8U x y)
  1834		// cond:
  1835		// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
  1836		for {
  1837			y := v.Args[1]
  1838			x := v.Args[0]
  1839			v.reset(OpMIPS64XOR)
  1840			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  1841			v0.AuxInt = 1
  1842			v.AddArg(v0)
  1843			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  1844			v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1845			v2.AddArg(y)
  1846			v1.AddArg(v2)
  1847			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1848			v3.AddArg(x)
  1849			v1.AddArg(v3)
  1850			v.AddArg(v1)
  1851			return true
  1852		}
  1853	}
  1854	func rewriteValueMIPS64_OpGetCallerPC_0(v *Value) bool {
  1855		// match: (GetCallerPC)
  1856		// cond:
  1857		// result: (LoweredGetCallerPC)
  1858		for {
  1859			v.reset(OpMIPS64LoweredGetCallerPC)
  1860			return true
  1861		}
  1862	}
  1863	func rewriteValueMIPS64_OpGetCallerSP_0(v *Value) bool {
  1864		// match: (GetCallerSP)
  1865		// cond:
  1866		// result: (LoweredGetCallerSP)
  1867		for {
  1868			v.reset(OpMIPS64LoweredGetCallerSP)
  1869			return true
  1870		}
  1871	}
  1872	func rewriteValueMIPS64_OpGetClosurePtr_0(v *Value) bool {
  1873		// match: (GetClosurePtr)
  1874		// cond:
  1875		// result: (LoweredGetClosurePtr)
  1876		for {
  1877			v.reset(OpMIPS64LoweredGetClosurePtr)
  1878			return true
  1879		}
  1880	}
  1881	func rewriteValueMIPS64_OpGreater16_0(v *Value) bool {
  1882		b := v.Block
  1883		typ := &b.Func.Config.Types
  1884		// match: (Greater16 x y)
  1885		// cond:
  1886		// result: (SGT (SignExt16to64 x) (SignExt16to64 y))
  1887		for {
  1888			y := v.Args[1]
  1889			x := v.Args[0]
  1890			v.reset(OpMIPS64SGT)
  1891			v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1892			v0.AddArg(x)
  1893			v.AddArg(v0)
  1894			v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1895			v1.AddArg(y)
  1896			v.AddArg(v1)
  1897			return true
  1898		}
  1899	}
  1900	func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool {
  1901		b := v.Block
  1902		typ := &b.Func.Config.Types
  1903		// match: (Greater16U x y)
  1904		// cond:
  1905		// result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1906		for {
  1907			y := v.Args[1]
  1908			x := v.Args[0]
  1909			v.reset(OpMIPS64SGTU)
  1910			v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1911			v0.AddArg(x)
  1912			v.AddArg(v0)
  1913			v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1914			v1.AddArg(y)
  1915			v.AddArg(v1)
  1916			return true
  1917		}
  1918	}
  1919	func rewriteValueMIPS64_OpGreater32_0(v *Value) bool {
  1920		b := v.Block
  1921		typ := &b.Func.Config.Types
  1922		// match: (Greater32 x y)
  1923		// cond:
  1924		// result: (SGT (SignExt32to64 x) (SignExt32to64 y))
  1925		for {
  1926			y := v.Args[1]
  1927			x := v.Args[0]
  1928			v.reset(OpMIPS64SGT)
  1929			v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1930			v0.AddArg(x)
  1931			v.AddArg(v0)
  1932			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1933			v1.AddArg(y)
  1934			v.AddArg(v1)
  1935			return true
  1936		}
  1937	}
  1938	func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool {
  1939		b := v.Block
  1940		// match: (Greater32F x y)
  1941		// cond:
  1942		// result: (FPFlagTrue (CMPGTF x y))
  1943		for {
  1944			y := v.Args[1]
  1945			x := v.Args[0]
  1946			v.reset(OpMIPS64FPFlagTrue)
  1947			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  1948			v0.AddArg(x)
  1949			v0.AddArg(y)
  1950			v.AddArg(v0)
  1951			return true
  1952		}
  1953	}
  1954	func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool {
  1955		b := v.Block
  1956		typ := &b.Func.Config.Types
  1957		// match: (Greater32U x y)
  1958		// cond:
  1959		// result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1960		for {
  1961			y := v.Args[1]
  1962			x := v.Args[0]
  1963			v.reset(OpMIPS64SGTU)
  1964			v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1965			v0.AddArg(x)
  1966			v.AddArg(v0)
  1967			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1968			v1.AddArg(y)
  1969			v.AddArg(v1)
  1970			return true
  1971		}
  1972	}
  1973	func rewriteValueMIPS64_OpGreater64_0(v *Value) bool {
  1974		// match: (Greater64 x y)
  1975		// cond:
  1976		// result: (SGT x y)
  1977		for {
  1978			y := v.Args[1]
  1979			x := v.Args[0]
  1980			v.reset(OpMIPS64SGT)
  1981			v.AddArg(x)
  1982			v.AddArg(y)
  1983			return true
  1984		}
  1985	}
  1986	func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool {
  1987		b := v.Block
  1988		// match: (Greater64F x y)
  1989		// cond:
  1990		// result: (FPFlagTrue (CMPGTD x y))
  1991		for {
  1992			y := v.Args[1]
  1993			x := v.Args[0]
  1994			v.reset(OpMIPS64FPFlagTrue)
  1995			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  1996			v0.AddArg(x)
  1997			v0.AddArg(y)
  1998			v.AddArg(v0)
  1999			return true
  2000		}
  2001	}
  2002	func rewriteValueMIPS64_OpGreater64U_0(v *Value) bool {
  2003		// match: (Greater64U x y)
  2004		// cond:
  2005		// result: (SGTU x y)
  2006		for {
  2007			y := v.Args[1]
  2008			x := v.Args[0]
  2009			v.reset(OpMIPS64SGTU)
  2010			v.AddArg(x)
  2011			v.AddArg(y)
  2012			return true
  2013		}
  2014	}
  2015	func rewriteValueMIPS64_OpGreater8_0(v *Value) bool {
  2016		b := v.Block
  2017		typ := &b.Func.Config.Types
  2018		// match: (Greater8 x y)
  2019		// cond:
  2020		// result: (SGT (SignExt8to64 x) (SignExt8to64 y))
  2021		for {
  2022			y := v.Args[1]
  2023			x := v.Args[0]
  2024			v.reset(OpMIPS64SGT)
  2025			v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2026			v0.AddArg(x)
  2027			v.AddArg(v0)
  2028			v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2029			v1.AddArg(y)
  2030			v.AddArg(v1)
  2031			return true
  2032		}
  2033	}
  2034	func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool {
  2035		b := v.Block
  2036		typ := &b.Func.Config.Types
  2037		// match: (Greater8U x y)
  2038		// cond:
  2039		// result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  2040		for {
  2041			y := v.Args[1]
  2042			x := v.Args[0]
  2043			v.reset(OpMIPS64SGTU)
  2044			v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2045			v0.AddArg(x)
  2046			v.AddArg(v0)
  2047			v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2048			v1.AddArg(y)
  2049			v.AddArg(v1)
  2050			return true
  2051		}
  2052	}
  2053	func rewriteValueMIPS64_OpHmul32_0(v *Value) bool {
  2054		b := v.Block
  2055		typ := &b.Func.Config.Types
  2056		// match: (Hmul32 x y)
  2057		// cond:
  2058		// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
  2059		for {
  2060			y := v.Args[1]
  2061			x := v.Args[0]
  2062			v.reset(OpMIPS64SRAVconst)
  2063			v.AuxInt = 32
  2064			v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
  2065			v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  2066			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2067			v2.AddArg(x)
  2068			v1.AddArg(v2)
  2069			v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2070			v3.AddArg(y)
  2071			v1.AddArg(v3)
  2072			v0.AddArg(v1)
  2073			v.AddArg(v0)
  2074			return true
  2075		}
  2076	}
  2077	func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool {
  2078		b := v.Block
  2079		typ := &b.Func.Config.Types
  2080		// match: (Hmul32u x y)
  2081		// cond:
  2082		// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
  2083		for {
  2084			y := v.Args[1]
  2085			x := v.Args[0]
  2086			v.reset(OpMIPS64SRLVconst)
  2087			v.AuxInt = 32
  2088			v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
  2089			v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  2090			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2091			v2.AddArg(x)
  2092			v1.AddArg(v2)
  2093			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2094			v3.AddArg(y)
  2095			v1.AddArg(v3)
  2096			v0.AddArg(v1)
  2097			v.AddArg(v0)
  2098			return true
  2099		}
  2100	}
  2101	func rewriteValueMIPS64_OpHmul64_0(v *Value) bool {
  2102		b := v.Block
  2103		typ := &b.Func.Config.Types
  2104		// match: (Hmul64 x y)
  2105		// cond:
  2106		// result: (Select0 (MULV x y))
  2107		for {
  2108			y := v.Args[1]
  2109			x := v.Args[0]
  2110			v.reset(OpSelect0)
  2111			v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
  2112			v0.AddArg(x)
  2113			v0.AddArg(y)
  2114			v.AddArg(v0)
  2115			return true
  2116		}
  2117	}
  2118	func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool {
  2119		b := v.Block
  2120		typ := &b.Func.Config.Types
  2121		// match: (Hmul64u x y)
  2122		// cond:
  2123		// result: (Select0 (MULVU x y))
  2124		for {
  2125			y := v.Args[1]
  2126			x := v.Args[0]
  2127			v.reset(OpSelect0)
  2128			v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  2129			v0.AddArg(x)
  2130			v0.AddArg(y)
  2131			v.AddArg(v0)
  2132			return true
  2133		}
  2134	}
  2135	func rewriteValueMIPS64_OpInterCall_0(v *Value) bool {
  2136		// match: (InterCall [argwid] entry mem)
  2137		// cond:
  2138		// result: (CALLinter [argwid] entry mem)
  2139		for {
  2140			argwid := v.AuxInt
  2141			mem := v.Args[1]
  2142			entry := v.Args[0]
  2143			v.reset(OpMIPS64CALLinter)
  2144			v.AuxInt = argwid
  2145			v.AddArg(entry)
  2146			v.AddArg(mem)
  2147			return true
  2148		}
  2149	}
  2150	func rewriteValueMIPS64_OpIsInBounds_0(v *Value) bool {
  2151		// match: (IsInBounds idx len)
  2152		// cond:
  2153		// result: (SGTU len idx)
  2154		for {
  2155			len := v.Args[1]
  2156			idx := v.Args[0]
  2157			v.reset(OpMIPS64SGTU)
  2158			v.AddArg(len)
  2159			v.AddArg(idx)
  2160			return true
  2161		}
  2162	}
  2163	func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool {
  2164		b := v.Block
  2165		typ := &b.Func.Config.Types
  2166		// match: (IsNonNil ptr)
  2167		// cond:
  2168		// result: (SGTU ptr (MOVVconst [0]))
  2169		for {
  2170			ptr := v.Args[0]
  2171			v.reset(OpMIPS64SGTU)
  2172			v.AddArg(ptr)
  2173			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2174			v0.AuxInt = 0
  2175			v.AddArg(v0)
  2176			return true
  2177		}
  2178	}
  2179	func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool {
  2180		b := v.Block
  2181		typ := &b.Func.Config.Types
  2182		// match: (IsSliceInBounds idx len)
  2183		// cond:
  2184		// result: (XOR (MOVVconst [1]) (SGTU idx len))
  2185		for {
  2186			len := v.Args[1]
  2187			idx := v.Args[0]
  2188			v.reset(OpMIPS64XOR)
  2189			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2190			v0.AuxInt = 1
  2191			v.AddArg(v0)
  2192			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2193			v1.AddArg(idx)
  2194			v1.AddArg(len)
  2195			v.AddArg(v1)
  2196			return true
  2197		}
  2198	}
  2199	func rewriteValueMIPS64_OpLeq16_0(v *Value) bool {
  2200		b := v.Block
  2201		typ := &b.Func.Config.Types
  2202		// match: (Leq16 x y)
  2203		// cond:
  2204		// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
  2205		for {
  2206			y := v.Args[1]
  2207			x := v.Args[0]
  2208			v.reset(OpMIPS64XOR)
  2209			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2210			v0.AuxInt = 1
  2211			v.AddArg(v0)
  2212			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2213			v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2214			v2.AddArg(x)
  2215			v1.AddArg(v2)
  2216			v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2217			v3.AddArg(y)
  2218			v1.AddArg(v3)
  2219			v.AddArg(v1)
  2220			return true
  2221		}
  2222	}
  2223	func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool {
  2224		b := v.Block
  2225		typ := &b.Func.Config.Types
  2226		// match: (Leq16U x y)
  2227		// cond:
  2228		// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  2229		for {
  2230			y := v.Args[1]
  2231			x := v.Args[0]
  2232			v.reset(OpMIPS64XOR)
  2233			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2234			v0.AuxInt = 1
  2235			v.AddArg(v0)
  2236			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2237			v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2238			v2.AddArg(x)
  2239			v1.AddArg(v2)
  2240			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2241			v3.AddArg(y)
  2242			v1.AddArg(v3)
  2243			v.AddArg(v1)
  2244			return true
  2245		}
  2246	}
  2247	func rewriteValueMIPS64_OpLeq32_0(v *Value) bool {
  2248		b := v.Block
  2249		typ := &b.Func.Config.Types
  2250		// match: (Leq32 x y)
  2251		// cond:
  2252		// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
  2253		for {
  2254			y := v.Args[1]
  2255			x := v.Args[0]
  2256			v.reset(OpMIPS64XOR)
  2257			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2258			v0.AuxInt = 1
  2259			v.AddArg(v0)
  2260			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2261			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2262			v2.AddArg(x)
  2263			v1.AddArg(v2)
  2264			v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2265			v3.AddArg(y)
  2266			v1.AddArg(v3)
  2267			v.AddArg(v1)
  2268			return true
  2269		}
  2270	}
  2271	func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool {
  2272		b := v.Block
  2273		// match: (Leq32F x y)
  2274		// cond:
  2275		// result: (FPFlagTrue (CMPGEF y x))
  2276		for {
  2277			y := v.Args[1]
  2278			x := v.Args[0]
  2279			v.reset(OpMIPS64FPFlagTrue)
  2280			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
  2281			v0.AddArg(y)
  2282			v0.AddArg(x)
  2283			v.AddArg(v0)
  2284			return true
  2285		}
  2286	}
  2287	func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool {
  2288		b := v.Block
  2289		typ := &b.Func.Config.Types
  2290		// match: (Leq32U x y)
  2291		// cond:
  2292		// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  2293		for {
  2294			y := v.Args[1]
  2295			x := v.Args[0]
  2296			v.reset(OpMIPS64XOR)
  2297			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2298			v0.AuxInt = 1
  2299			v.AddArg(v0)
  2300			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2301			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2302			v2.AddArg(x)
  2303			v1.AddArg(v2)
  2304			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2305			v3.AddArg(y)
  2306			v1.AddArg(v3)
  2307			v.AddArg(v1)
  2308			return true
  2309		}
  2310	}
  2311	func rewriteValueMIPS64_OpLeq64_0(v *Value) bool {
  2312		b := v.Block
  2313		typ := &b.Func.Config.Types
  2314		// match: (Leq64 x y)
  2315		// cond:
  2316		// result: (XOR (MOVVconst [1]) (SGT x y))
  2317		for {
  2318			y := v.Args[1]
  2319			x := v.Args[0]
  2320			v.reset(OpMIPS64XOR)
  2321			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2322			v0.AuxInt = 1
  2323			v.AddArg(v0)
  2324			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2325			v1.AddArg(x)
  2326			v1.AddArg(y)
  2327			v.AddArg(v1)
  2328			return true
  2329		}
  2330	}
  2331	func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool {
  2332		b := v.Block
  2333		// match: (Leq64F x y)
  2334		// cond:
  2335		// result: (FPFlagTrue (CMPGED y x))
  2336		for {
  2337			y := v.Args[1]
  2338			x := v.Args[0]
  2339			v.reset(OpMIPS64FPFlagTrue)
  2340			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
  2341			v0.AddArg(y)
  2342			v0.AddArg(x)
  2343			v.AddArg(v0)
  2344			return true
  2345		}
  2346	}
  2347	func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool {
  2348		b := v.Block
  2349		typ := &b.Func.Config.Types
  2350		// match: (Leq64U x y)
  2351		// cond:
  2352		// result: (XOR (MOVVconst [1]) (SGTU x y))
  2353		for {
  2354			y := v.Args[1]
  2355			x := v.Args[0]
  2356			v.reset(OpMIPS64XOR)
  2357			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2358			v0.AuxInt = 1
  2359			v.AddArg(v0)
  2360			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2361			v1.AddArg(x)
  2362			v1.AddArg(y)
  2363			v.AddArg(v1)
  2364			return true
  2365		}
  2366	}
  2367	func rewriteValueMIPS64_OpLeq8_0(v *Value) bool {
  2368		b := v.Block
  2369		typ := &b.Func.Config.Types
  2370		// match: (Leq8 x y)
  2371		// cond:
  2372		// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
  2373		for {
  2374			y := v.Args[1]
  2375			x := v.Args[0]
  2376			v.reset(OpMIPS64XOR)
  2377			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2378			v0.AuxInt = 1
  2379			v.AddArg(v0)
  2380			v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
  2381			v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2382			v2.AddArg(x)
  2383			v1.AddArg(v2)
  2384			v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2385			v3.AddArg(y)
  2386			v1.AddArg(v3)
  2387			v.AddArg(v1)
  2388			return true
  2389		}
  2390	}
  2391	func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool {
  2392		b := v.Block
  2393		typ := &b.Func.Config.Types
  2394		// match: (Leq8U x y)
  2395		// cond:
  2396		// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  2397		for {
  2398			y := v.Args[1]
  2399			x := v.Args[0]
  2400			v.reset(OpMIPS64XOR)
  2401			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2402			v0.AuxInt = 1
  2403			v.AddArg(v0)
  2404			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2405			v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2406			v2.AddArg(x)
  2407			v1.AddArg(v2)
  2408			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2409			v3.AddArg(y)
  2410			v1.AddArg(v3)
  2411			v.AddArg(v1)
  2412			return true
  2413		}
  2414	}
  2415	func rewriteValueMIPS64_OpLess16_0(v *Value) bool {
  2416		b := v.Block
  2417		typ := &b.Func.Config.Types
  2418		// match: (Less16 x y)
  2419		// cond:
  2420		// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
  2421		for {
  2422			y := v.Args[1]
  2423			x := v.Args[0]
  2424			v.reset(OpMIPS64SGT)
  2425			v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2426			v0.AddArg(y)
  2427			v.AddArg(v0)
  2428			v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  2429			v1.AddArg(x)
  2430			v.AddArg(v1)
  2431			return true
  2432		}
  2433	}
  2434	func rewriteValueMIPS64_OpLess16U_0(v *Value) bool {
  2435		b := v.Block
  2436		typ := &b.Func.Config.Types
  2437		// match: (Less16U x y)
  2438		// cond:
  2439		// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
  2440		for {
  2441			y := v.Args[1]
  2442			x := v.Args[0]
  2443			v.reset(OpMIPS64SGTU)
  2444			v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2445			v0.AddArg(y)
  2446			v.AddArg(v0)
  2447			v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2448			v1.AddArg(x)
  2449			v.AddArg(v1)
  2450			return true
  2451		}
  2452	}
  2453	func rewriteValueMIPS64_OpLess32_0(v *Value) bool {
  2454		b := v.Block
  2455		typ := &b.Func.Config.Types
  2456		// match: (Less32 x y)
  2457		// cond:
  2458		// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
  2459		for {
  2460			y := v.Args[1]
  2461			x := v.Args[0]
  2462			v.reset(OpMIPS64SGT)
  2463			v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2464			v0.AddArg(y)
  2465			v.AddArg(v0)
  2466			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  2467			v1.AddArg(x)
  2468			v.AddArg(v1)
  2469			return true
  2470		}
  2471	}
  2472	func rewriteValueMIPS64_OpLess32F_0(v *Value) bool {
  2473		b := v.Block
  2474		// match: (Less32F x y)
  2475		// cond:
  2476		// result: (FPFlagTrue (CMPGTF y x))
  2477		for {
  2478			y := v.Args[1]
  2479			x := v.Args[0]
  2480			v.reset(OpMIPS64FPFlagTrue)
  2481			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
  2482			v0.AddArg(y)
  2483			v0.AddArg(x)
  2484			v.AddArg(v0)
  2485			return true
  2486		}
  2487	}
  2488	func rewriteValueMIPS64_OpLess32U_0(v *Value) bool {
  2489		b := v.Block
  2490		typ := &b.Func.Config.Types
  2491		// match: (Less32U x y)
  2492		// cond:
  2493		// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
  2494		for {
  2495			y := v.Args[1]
  2496			x := v.Args[0]
  2497			v.reset(OpMIPS64SGTU)
  2498			v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2499			v0.AddArg(y)
  2500			v.AddArg(v0)
  2501			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2502			v1.AddArg(x)
  2503			v.AddArg(v1)
  2504			return true
  2505		}
  2506	}
  2507	func rewriteValueMIPS64_OpLess64_0(v *Value) bool {
  2508		// match: (Less64 x y)
  2509		// cond:
  2510		// result: (SGT y x)
  2511		for {
  2512			y := v.Args[1]
  2513			x := v.Args[0]
  2514			v.reset(OpMIPS64SGT)
  2515			v.AddArg(y)
  2516			v.AddArg(x)
  2517			return true
  2518		}
  2519	}
  2520	func rewriteValueMIPS64_OpLess64F_0(v *Value) bool {
  2521		b := v.Block
  2522		// match: (Less64F x y)
  2523		// cond:
  2524		// result: (FPFlagTrue (CMPGTD y x))
  2525		for {
  2526			y := v.Args[1]
  2527			x := v.Args[0]
  2528			v.reset(OpMIPS64FPFlagTrue)
  2529			v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
  2530			v0.AddArg(y)
  2531			v0.AddArg(x)
  2532			v.AddArg(v0)
  2533			return true
  2534		}
  2535	}
  2536	func rewriteValueMIPS64_OpLess64U_0(v *Value) bool {
  2537		// match: (Less64U x y)
  2538		// cond:
  2539		// result: (SGTU y x)
  2540		for {
  2541			y := v.Args[1]
  2542			x := v.Args[0]
  2543			v.reset(OpMIPS64SGTU)
  2544			v.AddArg(y)
  2545			v.AddArg(x)
  2546			return true
  2547		}
  2548	}
  2549	func rewriteValueMIPS64_OpLess8_0(v *Value) bool {
  2550		b := v.Block
  2551		typ := &b.Func.Config.Types
  2552		// match: (Less8 x y)
  2553		// cond:
  2554		// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
  2555		for {
  2556			y := v.Args[1]
  2557			x := v.Args[0]
  2558			v.reset(OpMIPS64SGT)
  2559			v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2560			v0.AddArg(y)
  2561			v.AddArg(v0)
  2562			v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  2563			v1.AddArg(x)
  2564			v.AddArg(v1)
  2565			return true
  2566		}
  2567	}
  2568	func rewriteValueMIPS64_OpLess8U_0(v *Value) bool {
  2569		b := v.Block
  2570		typ := &b.Func.Config.Types
  2571		// match: (Less8U x y)
  2572		// cond:
  2573		// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
  2574		for {
  2575			y := v.Args[1]
  2576			x := v.Args[0]
  2577			v.reset(OpMIPS64SGTU)
  2578			v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2579			v0.AddArg(y)
  2580			v.AddArg(v0)
  2581			v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2582			v1.AddArg(x)
  2583			v.AddArg(v1)
  2584			return true
  2585		}
  2586	}
  2587	func rewriteValueMIPS64_OpLoad_0(v *Value) bool {
  2588		// match: (Load <t> ptr mem)
  2589		// cond: t.IsBoolean()
  2590		// result: (MOVBUload ptr mem)
  2591		for {
  2592			t := v.Type
  2593			mem := v.Args[1]
  2594			ptr := v.Args[0]
  2595			if !(t.IsBoolean()) {
  2596				break
  2597			}
  2598			v.reset(OpMIPS64MOVBUload)
  2599			v.AddArg(ptr)
  2600			v.AddArg(mem)
  2601			return true
  2602		}
  2603		// match: (Load <t> ptr mem)
  2604		// cond: (is8BitInt(t) && isSigned(t))
  2605		// result: (MOVBload ptr mem)
  2606		for {
  2607			t := v.Type
  2608			mem := v.Args[1]
  2609			ptr := v.Args[0]
  2610			if !(is8BitInt(t) && isSigned(t)) {
  2611				break
  2612			}
  2613			v.reset(OpMIPS64MOVBload)
  2614			v.AddArg(ptr)
  2615			v.AddArg(mem)
  2616			return true
  2617		}
  2618		// match: (Load <t> ptr mem)
  2619		// cond: (is8BitInt(t) && !isSigned(t))
  2620		// result: (MOVBUload ptr mem)
  2621		for {
  2622			t := v.Type
  2623			mem := v.Args[1]
  2624			ptr := v.Args[0]
  2625			if !(is8BitInt(t) && !isSigned(t)) {
  2626				break
  2627			}
  2628			v.reset(OpMIPS64MOVBUload)
  2629			v.AddArg(ptr)
  2630			v.AddArg(mem)
  2631			return true
  2632		}
  2633		// match: (Load <t> ptr mem)
  2634		// cond: (is16BitInt(t) && isSigned(t))
  2635		// result: (MOVHload ptr mem)
  2636		for {
  2637			t := v.Type
  2638			mem := v.Args[1]
  2639			ptr := v.Args[0]
  2640			if !(is16BitInt(t) && isSigned(t)) {
  2641				break
  2642			}
  2643			v.reset(OpMIPS64MOVHload)
  2644			v.AddArg(ptr)
  2645			v.AddArg(mem)
  2646			return true
  2647		}
  2648		// match: (Load <t> ptr mem)
  2649		// cond: (is16BitInt(t) && !isSigned(t))
  2650		// result: (MOVHUload ptr mem)
  2651		for {
  2652			t := v.Type
  2653			mem := v.Args[1]
  2654			ptr := v.Args[0]
  2655			if !(is16BitInt(t) && !isSigned(t)) {
  2656				break
  2657			}
  2658			v.reset(OpMIPS64MOVHUload)
  2659			v.AddArg(ptr)
  2660			v.AddArg(mem)
  2661			return true
  2662		}
  2663		// match: (Load <t> ptr mem)
  2664		// cond: (is32BitInt(t) && isSigned(t))
  2665		// result: (MOVWload ptr mem)
  2666		for {
  2667			t := v.Type
  2668			mem := v.Args[1]
  2669			ptr := v.Args[0]
  2670			if !(is32BitInt(t) && isSigned(t)) {
  2671				break
  2672			}
  2673			v.reset(OpMIPS64MOVWload)
  2674			v.AddArg(ptr)
  2675			v.AddArg(mem)
  2676			return true
  2677		}
  2678		// match: (Load <t> ptr mem)
  2679		// cond: (is32BitInt(t) && !isSigned(t))
  2680		// result: (MOVWUload ptr mem)
  2681		for {
  2682			t := v.Type
  2683			mem := v.Args[1]
  2684			ptr := v.Args[0]
  2685			if !(is32BitInt(t) && !isSigned(t)) {
  2686				break
  2687			}
  2688			v.reset(OpMIPS64MOVWUload)
  2689			v.AddArg(ptr)
  2690			v.AddArg(mem)
  2691			return true
  2692		}
  2693		// match: (Load <t> ptr mem)
  2694		// cond: (is64BitInt(t) || isPtr(t))
  2695		// result: (MOVVload ptr mem)
  2696		for {
  2697			t := v.Type
  2698			mem := v.Args[1]
  2699			ptr := v.Args[0]
  2700			if !(is64BitInt(t) || isPtr(t)) {
  2701				break
  2702			}
  2703			v.reset(OpMIPS64MOVVload)
  2704			v.AddArg(ptr)
  2705			v.AddArg(mem)
  2706			return true
  2707		}
  2708		// match: (Load <t> ptr mem)
  2709		// cond: is32BitFloat(t)
  2710		// result: (MOVFload ptr mem)
  2711		for {
  2712			t := v.Type
  2713			mem := v.Args[1]
  2714			ptr := v.Args[0]
  2715			if !(is32BitFloat(t)) {
  2716				break
  2717			}
  2718			v.reset(OpMIPS64MOVFload)
  2719			v.AddArg(ptr)
  2720			v.AddArg(mem)
  2721			return true
  2722		}
  2723		// match: (Load <t> ptr mem)
  2724		// cond: is64BitFloat(t)
  2725		// result: (MOVDload ptr mem)
  2726		for {
  2727			t := v.Type
  2728			mem := v.Args[1]
  2729			ptr := v.Args[0]
  2730			if !(is64BitFloat(t)) {
  2731				break
  2732			}
  2733			v.reset(OpMIPS64MOVDload)
  2734			v.AddArg(ptr)
  2735			v.AddArg(mem)
  2736			return true
  2737		}
  2738		return false
  2739	}
  2740	func rewriteValueMIPS64_OpLocalAddr_0(v *Value) bool {
  2741		// match: (LocalAddr {sym} base _)
  2742		// cond:
  2743		// result: (MOVVaddr {sym} base)
  2744		for {
  2745			sym := v.Aux
  2746			_ = v.Args[1]
  2747			base := v.Args[0]
  2748			v.reset(OpMIPS64MOVVaddr)
  2749			v.Aux = sym
  2750			v.AddArg(base)
  2751			return true
  2752		}
  2753	}
  2754	func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool {
  2755		b := v.Block
  2756		typ := &b.Func.Config.Types
  2757		// match: (Lsh16x16 <t> x y)
  2758		// cond:
  2759		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2760		for {
  2761			t := v.Type
  2762			y := v.Args[1]
  2763			x := v.Args[0]
  2764			v.reset(OpMIPS64AND)
  2765			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2766			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2767			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2768			v2.AuxInt = 64
  2769			v1.AddArg(v2)
  2770			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2771			v3.AddArg(y)
  2772			v1.AddArg(v3)
  2773			v0.AddArg(v1)
  2774			v.AddArg(v0)
  2775			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2776			v4.AddArg(x)
  2777			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2778			v5.AddArg(y)
  2779			v4.AddArg(v5)
  2780			v.AddArg(v4)
  2781			return true
  2782		}
  2783	}
  2784	func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool {
  2785		b := v.Block
  2786		typ := &b.Func.Config.Types
  2787		// match: (Lsh16x32 <t> x y)
  2788		// cond:
  2789		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2790		for {
  2791			t := v.Type
  2792			y := v.Args[1]
  2793			x := v.Args[0]
  2794			v.reset(OpMIPS64AND)
  2795			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2796			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2797			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2798			v2.AuxInt = 64
  2799			v1.AddArg(v2)
  2800			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2801			v3.AddArg(y)
  2802			v1.AddArg(v3)
  2803			v0.AddArg(v1)
  2804			v.AddArg(v0)
  2805			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2806			v4.AddArg(x)
  2807			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2808			v5.AddArg(y)
  2809			v4.AddArg(v5)
  2810			v.AddArg(v4)
  2811			return true
  2812		}
  2813	}
  2814	func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool {
  2815		b := v.Block
  2816		typ := &b.Func.Config.Types
  2817		// match: (Lsh16x64 <t> x y)
  2818		// cond:
  2819		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2820		for {
  2821			t := v.Type
  2822			y := v.Args[1]
  2823			x := v.Args[0]
  2824			v.reset(OpMIPS64AND)
  2825			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2826			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2827			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2828			v2.AuxInt = 64
  2829			v1.AddArg(v2)
  2830			v1.AddArg(y)
  2831			v0.AddArg(v1)
  2832			v.AddArg(v0)
  2833			v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2834			v3.AddArg(x)
  2835			v3.AddArg(y)
  2836			v.AddArg(v3)
  2837			return true
  2838		}
  2839	}
  2840	func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
  2841		b := v.Block
  2842		typ := &b.Func.Config.Types
  2843		// match: (Lsh16x8 <t> x y)
  2844		// cond:
  2845		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2846		for {
  2847			t := v.Type
  2848			y := v.Args[1]
  2849			x := v.Args[0]
  2850			v.reset(OpMIPS64AND)
  2851			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2852			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2853			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2854			v2.AuxInt = 64
  2855			v1.AddArg(v2)
  2856			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2857			v3.AddArg(y)
  2858			v1.AddArg(v3)
  2859			v0.AddArg(v1)
  2860			v.AddArg(v0)
  2861			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2862			v4.AddArg(x)
  2863			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2864			v5.AddArg(y)
  2865			v4.AddArg(v5)
  2866			v.AddArg(v4)
  2867			return true
  2868		}
  2869	}
  2870	func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool {
  2871		b := v.Block
  2872		typ := &b.Func.Config.Types
  2873		// match: (Lsh32x16 <t> x y)
  2874		// cond:
  2875		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2876		for {
  2877			t := v.Type
  2878			y := v.Args[1]
  2879			x := v.Args[0]
  2880			v.reset(OpMIPS64AND)
  2881			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2882			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2883			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2884			v2.AuxInt = 64
  2885			v1.AddArg(v2)
  2886			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2887			v3.AddArg(y)
  2888			v1.AddArg(v3)
  2889			v0.AddArg(v1)
  2890			v.AddArg(v0)
  2891			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2892			v4.AddArg(x)
  2893			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2894			v5.AddArg(y)
  2895			v4.AddArg(v5)
  2896			v.AddArg(v4)
  2897			return true
  2898		}
  2899	}
  2900	func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool {
  2901		b := v.Block
  2902		typ := &b.Func.Config.Types
  2903		// match: (Lsh32x32 <t> x y)
  2904		// cond:
  2905		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  2906		for {
  2907			t := v.Type
  2908			y := v.Args[1]
  2909			x := v.Args[0]
  2910			v.reset(OpMIPS64AND)
  2911			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2912			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2913			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2914			v2.AuxInt = 64
  2915			v1.AddArg(v2)
  2916			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2917			v3.AddArg(y)
  2918			v1.AddArg(v3)
  2919			v0.AddArg(v1)
  2920			v.AddArg(v0)
  2921			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2922			v4.AddArg(x)
  2923			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2924			v5.AddArg(y)
  2925			v4.AddArg(v5)
  2926			v.AddArg(v4)
  2927			return true
  2928		}
  2929	}
  2930	func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool {
  2931		b := v.Block
  2932		typ := &b.Func.Config.Types
  2933		// match: (Lsh32x64 <t> x y)
  2934		// cond:
  2935		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  2936		for {
  2937			t := v.Type
  2938			y := v.Args[1]
  2939			x := v.Args[0]
  2940			v.reset(OpMIPS64AND)
  2941			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2942			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2943			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2944			v2.AuxInt = 64
  2945			v1.AddArg(v2)
  2946			v1.AddArg(y)
  2947			v0.AddArg(v1)
  2948			v.AddArg(v0)
  2949			v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2950			v3.AddArg(x)
  2951			v3.AddArg(y)
  2952			v.AddArg(v3)
  2953			return true
  2954		}
  2955	}
  2956	func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
  2957		b := v.Block
  2958		typ := &b.Func.Config.Types
  2959		// match: (Lsh32x8 <t> x y)
  2960		// cond:
  2961		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  2962		for {
  2963			t := v.Type
  2964			y := v.Args[1]
  2965			x := v.Args[0]
  2966			v.reset(OpMIPS64AND)
  2967			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2968			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2969			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  2970			v2.AuxInt = 64
  2971			v1.AddArg(v2)
  2972			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2973			v3.AddArg(y)
  2974			v1.AddArg(v3)
  2975			v0.AddArg(v1)
  2976			v.AddArg(v0)
  2977			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  2978			v4.AddArg(x)
  2979			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2980			v5.AddArg(y)
  2981			v4.AddArg(v5)
  2982			v.AddArg(v4)
  2983			return true
  2984		}
  2985	}
  2986	func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool {
  2987		b := v.Block
  2988		typ := &b.Func.Config.Types
  2989		// match: (Lsh64x16 <t> x y)
  2990		// cond:
  2991		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  2992		for {
  2993			t := v.Type
  2994			y := v.Args[1]
  2995			x := v.Args[0]
  2996			v.reset(OpMIPS64AND)
  2997			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  2998			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  2999			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3000			v2.AuxInt = 64
  3001			v1.AddArg(v2)
  3002			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3003			v3.AddArg(y)
  3004			v1.AddArg(v3)
  3005			v0.AddArg(v1)
  3006			v.AddArg(v0)
  3007			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3008			v4.AddArg(x)
  3009			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3010			v5.AddArg(y)
  3011			v4.AddArg(v5)
  3012			v.AddArg(v4)
  3013			return true
  3014		}
  3015	}
  3016	func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool {
  3017		b := v.Block
  3018		typ := &b.Func.Config.Types
  3019		// match: (Lsh64x32 <t> x y)
  3020		// cond:
  3021		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3022		for {
  3023			t := v.Type
  3024			y := v.Args[1]
  3025			x := v.Args[0]
  3026			v.reset(OpMIPS64AND)
  3027			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3028			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3029			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3030			v2.AuxInt = 64
  3031			v1.AddArg(v2)
  3032			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3033			v3.AddArg(y)
  3034			v1.AddArg(v3)
  3035			v0.AddArg(v1)
  3036			v.AddArg(v0)
  3037			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3038			v4.AddArg(x)
  3039			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3040			v5.AddArg(y)
  3041			v4.AddArg(v5)
  3042			v.AddArg(v4)
  3043			return true
  3044		}
  3045	}
  3046	func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool {
  3047		b := v.Block
  3048		typ := &b.Func.Config.Types
  3049		// match: (Lsh64x64 <t> x y)
  3050		// cond:
  3051		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3052		for {
  3053			t := v.Type
  3054			y := v.Args[1]
  3055			x := v.Args[0]
  3056			v.reset(OpMIPS64AND)
  3057			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3058			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3059			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3060			v2.AuxInt = 64
  3061			v1.AddArg(v2)
  3062			v1.AddArg(y)
  3063			v0.AddArg(v1)
  3064			v.AddArg(v0)
  3065			v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3066			v3.AddArg(x)
  3067			v3.AddArg(y)
  3068			v.AddArg(v3)
  3069			return true
  3070		}
  3071	}
  3072	func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
  3073		b := v.Block
  3074		typ := &b.Func.Config.Types
  3075		// match: (Lsh64x8 <t> x y)
  3076		// cond:
  3077		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  3078		for {
  3079			t := v.Type
  3080			y := v.Args[1]
  3081			x := v.Args[0]
  3082			v.reset(OpMIPS64AND)
  3083			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3084			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3085			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3086			v2.AuxInt = 64
  3087			v1.AddArg(v2)
  3088			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3089			v3.AddArg(y)
  3090			v1.AddArg(v3)
  3091			v0.AddArg(v1)
  3092			v.AddArg(v0)
  3093			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3094			v4.AddArg(x)
  3095			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3096			v5.AddArg(y)
  3097			v4.AddArg(v5)
  3098			v.AddArg(v4)
  3099			return true
  3100		}
  3101	}
  3102	func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool {
  3103		b := v.Block
  3104		typ := &b.Func.Config.Types
  3105		// match: (Lsh8x16 <t> x y)
  3106		// cond:
  3107		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
  3108		for {
  3109			t := v.Type
  3110			y := v.Args[1]
  3111			x := v.Args[0]
  3112			v.reset(OpMIPS64AND)
  3113			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3114			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3115			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3116			v2.AuxInt = 64
  3117			v1.AddArg(v2)
  3118			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3119			v3.AddArg(y)
  3120			v1.AddArg(v3)
  3121			v0.AddArg(v1)
  3122			v.AddArg(v0)
  3123			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3124			v4.AddArg(x)
  3125			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  3126			v5.AddArg(y)
  3127			v4.AddArg(v5)
  3128			v.AddArg(v4)
  3129			return true
  3130		}
  3131	}
  3132	func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool {
  3133		b := v.Block
  3134		typ := &b.Func.Config.Types
  3135		// match: (Lsh8x32 <t> x y)
  3136		// cond:
  3137		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
  3138		for {
  3139			t := v.Type
  3140			y := v.Args[1]
  3141			x := v.Args[0]
  3142			v.reset(OpMIPS64AND)
  3143			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3144			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3145			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3146			v2.AuxInt = 64
  3147			v1.AddArg(v2)
  3148			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3149			v3.AddArg(y)
  3150			v1.AddArg(v3)
  3151			v0.AddArg(v1)
  3152			v.AddArg(v0)
  3153			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3154			v4.AddArg(x)
  3155			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  3156			v5.AddArg(y)
  3157			v4.AddArg(v5)
  3158			v.AddArg(v4)
  3159			return true
  3160		}
  3161	}
  3162	func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool {
  3163		b := v.Block
  3164		typ := &b.Func.Config.Types
  3165		// match: (Lsh8x64 <t> x y)
  3166		// cond:
  3167		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
  3168		for {
  3169			t := v.Type
  3170			y := v.Args[1]
  3171			x := v.Args[0]
  3172			v.reset(OpMIPS64AND)
  3173			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3174			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3175			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3176			v2.AuxInt = 64
  3177			v1.AddArg(v2)
  3178			v1.AddArg(y)
  3179			v0.AddArg(v1)
  3180			v.AddArg(v0)
  3181			v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3182			v3.AddArg(x)
  3183			v3.AddArg(y)
  3184			v.AddArg(v3)
  3185			return true
  3186		}
  3187	}
  3188	func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
  3189		b := v.Block
  3190		typ := &b.Func.Config.Types
  3191		// match: (Lsh8x8 <t> x y)
  3192		// cond:
  3193		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
  3194		for {
  3195			t := v.Type
  3196			y := v.Args[1]
  3197			x := v.Args[0]
  3198			v.reset(OpMIPS64AND)
  3199			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  3200			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  3201			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  3202			v2.AuxInt = 64
  3203			v1.AddArg(v2)
  3204			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3205			v3.AddArg(y)
  3206			v1.AddArg(v3)
  3207			v0.AddArg(v1)
  3208			v.AddArg(v0)
  3209			v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
  3210			v4.AddArg(x)
  3211			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  3212			v5.AddArg(y)
  3213			v4.AddArg(v5)
  3214			v.AddArg(v4)
  3215			return true
  3216		}
  3217	}
  3218	func rewriteValueMIPS64_OpMIPS64ADDV_0(v *Value) bool {
  3219		// match: (ADDV x (MOVVconst [c]))
  3220		// cond: is32Bit(c)
  3221		// result: (ADDVconst [c] x)
  3222		for {
  3223			_ = v.Args[1]
  3224			x := v.Args[0]
  3225			v_1 := v.Args[1]
  3226			if v_1.Op != OpMIPS64MOVVconst {
  3227				break
  3228			}
  3229			c := v_1.AuxInt
  3230			if !(is32Bit(c)) {
  3231				break
  3232			}
  3233			v.reset(OpMIPS64ADDVconst)
  3234			v.AuxInt = c
  3235			v.AddArg(x)
  3236			return true
  3237		}
  3238		// match: (ADDV (MOVVconst [c]) x)
  3239		// cond: is32Bit(c)
  3240		// result: (ADDVconst [c] x)
  3241		for {
  3242			x := v.Args[1]
  3243			v_0 := v.Args[0]
  3244			if v_0.Op != OpMIPS64MOVVconst {
  3245				break
  3246			}
  3247			c := v_0.AuxInt
  3248			if !(is32Bit(c)) {
  3249				break
  3250			}
  3251			v.reset(OpMIPS64ADDVconst)
  3252			v.AuxInt = c
  3253			v.AddArg(x)
  3254			return true
  3255		}
  3256		// match: (ADDV x (NEGV y))
  3257		// cond:
  3258		// result: (SUBV x y)
  3259		for {
  3260			_ = v.Args[1]
  3261			x := v.Args[0]
  3262			v_1 := v.Args[1]
  3263			if v_1.Op != OpMIPS64NEGV {
  3264				break
  3265			}
  3266			y := v_1.Args[0]
  3267			v.reset(OpMIPS64SUBV)
  3268			v.AddArg(x)
  3269			v.AddArg(y)
  3270			return true
  3271		}
  3272		// match: (ADDV (NEGV y) x)
  3273		// cond:
  3274		// result: (SUBV x y)
  3275		for {
  3276			x := v.Args[1]
  3277			v_0 := v.Args[0]
  3278			if v_0.Op != OpMIPS64NEGV {
  3279				break
  3280			}
  3281			y := v_0.Args[0]
  3282			v.reset(OpMIPS64SUBV)
  3283			v.AddArg(x)
  3284			v.AddArg(y)
  3285			return true
  3286		}
  3287		return false
  3288	}
  3289	func rewriteValueMIPS64_OpMIPS64ADDVconst_0(v *Value) bool {
  3290		// match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr))
  3291		// cond:
  3292		// result: (MOVVaddr [off1+off2] {sym} ptr)
  3293		for {
  3294			off1 := v.AuxInt
  3295			v_0 := v.Args[0]
  3296			if v_0.Op != OpMIPS64MOVVaddr {
  3297				break
  3298			}
  3299			off2 := v_0.AuxInt
  3300			sym := v_0.Aux
  3301			ptr := v_0.Args[0]
  3302			v.reset(OpMIPS64MOVVaddr)
  3303			v.AuxInt = off1 + off2
  3304			v.Aux = sym
  3305			v.AddArg(ptr)
  3306			return true
  3307		}
  3308		// match: (ADDVconst [0] x)
  3309		// cond:
  3310		// result: x
  3311		for {
  3312			if v.AuxInt != 0 {
  3313				break
  3314			}
  3315			x := v.Args[0]
  3316			v.reset(OpCopy)
  3317			v.Type = x.Type
  3318			v.AddArg(x)
  3319			return true
  3320		}
  3321		// match: (ADDVconst [c] (MOVVconst [d]))
  3322		// cond:
  3323		// result: (MOVVconst [c+d])
  3324		for {
  3325			c := v.AuxInt
  3326			v_0 := v.Args[0]
  3327			if v_0.Op != OpMIPS64MOVVconst {
  3328				break
  3329			}
  3330			d := v_0.AuxInt
  3331			v.reset(OpMIPS64MOVVconst)
  3332			v.AuxInt = c + d
  3333			return true
  3334		}
  3335		// match: (ADDVconst [c] (ADDVconst [d] x))
  3336		// cond: is32Bit(c+d)
  3337		// result: (ADDVconst [c+d] x)
  3338		for {
  3339			c := v.AuxInt
  3340			v_0 := v.Args[0]
  3341			if v_0.Op != OpMIPS64ADDVconst {
  3342				break
  3343			}
  3344			d := v_0.AuxInt
  3345			x := v_0.Args[0]
  3346			if !(is32Bit(c + d)) {
  3347				break
  3348			}
  3349			v.reset(OpMIPS64ADDVconst)
  3350			v.AuxInt = c + d
  3351			v.AddArg(x)
  3352			return true
  3353		}
  3354		// match: (ADDVconst [c] (SUBVconst [d] x))
  3355		// cond: is32Bit(c-d)
  3356		// result: (ADDVconst [c-d] x)
  3357		for {
  3358			c := v.AuxInt
  3359			v_0 := v.Args[0]
  3360			if v_0.Op != OpMIPS64SUBVconst {
  3361				break
  3362			}
  3363			d := v_0.AuxInt
  3364			x := v_0.Args[0]
  3365			if !(is32Bit(c - d)) {
  3366				break
  3367			}
  3368			v.reset(OpMIPS64ADDVconst)
  3369			v.AuxInt = c - d
  3370			v.AddArg(x)
  3371			return true
  3372		}
  3373		return false
  3374	}
  3375	func rewriteValueMIPS64_OpMIPS64AND_0(v *Value) bool {
  3376		// match: (AND x (MOVVconst [c]))
  3377		// cond: is32Bit(c)
  3378		// result: (ANDconst [c] x)
  3379		for {
  3380			_ = v.Args[1]
  3381			x := v.Args[0]
  3382			v_1 := v.Args[1]
  3383			if v_1.Op != OpMIPS64MOVVconst {
  3384				break
  3385			}
  3386			c := v_1.AuxInt
  3387			if !(is32Bit(c)) {
  3388				break
  3389			}
  3390			v.reset(OpMIPS64ANDconst)
  3391			v.AuxInt = c
  3392			v.AddArg(x)
  3393			return true
  3394		}
  3395		// match: (AND (MOVVconst [c]) x)
  3396		// cond: is32Bit(c)
  3397		// result: (ANDconst [c] x)
  3398		for {
  3399			x := v.Args[1]
  3400			v_0 := v.Args[0]
  3401			if v_0.Op != OpMIPS64MOVVconst {
  3402				break
  3403			}
  3404			c := v_0.AuxInt
  3405			if !(is32Bit(c)) {
  3406				break
  3407			}
  3408			v.reset(OpMIPS64ANDconst)
  3409			v.AuxInt = c
  3410			v.AddArg(x)
  3411			return true
  3412		}
  3413		// match: (AND x x)
  3414		// cond:
  3415		// result: x
  3416		for {
  3417			x := v.Args[1]
  3418			if x != v.Args[0] {
  3419				break
  3420			}
  3421			v.reset(OpCopy)
  3422			v.Type = x.Type
  3423			v.AddArg(x)
  3424			return true
  3425		}
  3426		return false
  3427	}
  3428	func rewriteValueMIPS64_OpMIPS64ANDconst_0(v *Value) bool {
  3429		// match: (ANDconst [0] _)
  3430		// cond:
  3431		// result: (MOVVconst [0])
  3432		for {
  3433			if v.AuxInt != 0 {
  3434				break
  3435			}
  3436			v.reset(OpMIPS64MOVVconst)
  3437			v.AuxInt = 0
  3438			return true
  3439		}
  3440		// match: (ANDconst [-1] x)
  3441		// cond:
  3442		// result: x
  3443		for {
  3444			if v.AuxInt != -1 {
  3445				break
  3446			}
  3447			x := v.Args[0]
  3448			v.reset(OpCopy)
  3449			v.Type = x.Type
  3450			v.AddArg(x)
  3451			return true
  3452		}
  3453		// match: (ANDconst [c] (MOVVconst [d]))
  3454		// cond:
  3455		// result: (MOVVconst [c&d])
  3456		for {
  3457			c := v.AuxInt
  3458			v_0 := v.Args[0]
  3459			if v_0.Op != OpMIPS64MOVVconst {
  3460				break
  3461			}
  3462			d := v_0.AuxInt
  3463			v.reset(OpMIPS64MOVVconst)
  3464			v.AuxInt = c & d
  3465			return true
  3466		}
  3467		// match: (ANDconst [c] (ANDconst [d] x))
  3468		// cond:
  3469		// result: (ANDconst [c&d] x)
  3470		for {
  3471			c := v.AuxInt
  3472			v_0 := v.Args[0]
  3473			if v_0.Op != OpMIPS64ANDconst {
  3474				break
  3475			}
  3476			d := v_0.AuxInt
  3477			x := v_0.Args[0]
  3478			v.reset(OpMIPS64ANDconst)
  3479			v.AuxInt = c & d
  3480			v.AddArg(x)
  3481			return true
  3482		}
  3483		return false
  3484	}
  3485	func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v *Value) bool {
  3486		// match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem)
  3487		// cond: is32Bit(c)
  3488		// result: (LoweredAtomicAddconst32 [c] ptr mem)
  3489		for {
  3490			mem := v.Args[2]
  3491			ptr := v.Args[0]
  3492			v_1 := v.Args[1]
  3493			if v_1.Op != OpMIPS64MOVVconst {
  3494				break
  3495			}
  3496			c := v_1.AuxInt
  3497			if !(is32Bit(c)) {
  3498				break
  3499			}
  3500			v.reset(OpMIPS64LoweredAtomicAddconst32)
  3501			v.AuxInt = c
  3502			v.AddArg(ptr)
  3503			v.AddArg(mem)
  3504			return true
  3505		}
  3506		return false
  3507	}
  3508	func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v *Value) bool {
  3509		// match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem)
  3510		// cond: is32Bit(c)
  3511		// result: (LoweredAtomicAddconst64 [c] ptr mem)
  3512		for {
  3513			mem := v.Args[2]
  3514			ptr := v.Args[0]
  3515			v_1 := v.Args[1]
  3516			if v_1.Op != OpMIPS64MOVVconst {
  3517				break
  3518			}
  3519			c := v_1.AuxInt
  3520			if !(is32Bit(c)) {
  3521				break
  3522			}
  3523			v.reset(OpMIPS64LoweredAtomicAddconst64)
  3524			v.AuxInt = c
  3525			v.AddArg(ptr)
  3526			v.AddArg(mem)
  3527			return true
  3528		}
  3529		return false
  3530	}
  3531	func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v *Value) bool {
  3532		// match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem)
  3533		// cond:
  3534		// result: (LoweredAtomicStorezero32 ptr mem)
  3535		for {
  3536			mem := v.Args[2]
  3537			ptr := v.Args[0]
  3538			v_1 := v.Args[1]
  3539			if v_1.Op != OpMIPS64MOVVconst {
  3540				break
  3541			}
  3542			if v_1.AuxInt != 0 {
  3543				break
  3544			}
  3545			v.reset(OpMIPS64LoweredAtomicStorezero32)
  3546			v.AddArg(ptr)
  3547			v.AddArg(mem)
  3548			return true
  3549		}
  3550		return false
  3551	}
  3552	func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v *Value) bool {
  3553		// match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem)
  3554		// cond:
  3555		// result: (LoweredAtomicStorezero64 ptr mem)
  3556		for {
  3557			mem := v.Args[2]
  3558			ptr := v.Args[0]
  3559			v_1 := v.Args[1]
  3560			if v_1.Op != OpMIPS64MOVVconst {
  3561				break
  3562			}
  3563			if v_1.AuxInt != 0 {
  3564				break
  3565			}
  3566			v.reset(OpMIPS64LoweredAtomicStorezero64)
  3567			v.AddArg(ptr)
  3568			v.AddArg(mem)
  3569			return true
  3570		}
  3571		return false
  3572	}
  3573	func rewriteValueMIPS64_OpMIPS64MOVBUload_0(v *Value) bool {
  3574		// match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3575		// cond: is32Bit(off1+off2)
  3576		// result: (MOVBUload [off1+off2] {sym} ptr mem)
  3577		for {
  3578			off1 := v.AuxInt
  3579			sym := v.Aux
  3580			mem := v.Args[1]
  3581			v_0 := v.Args[0]
  3582			if v_0.Op != OpMIPS64ADDVconst {
  3583				break
  3584			}
  3585			off2 := v_0.AuxInt
  3586			ptr := v_0.Args[0]
  3587			if !(is32Bit(off1 + off2)) {
  3588				break
  3589			}
  3590			v.reset(OpMIPS64MOVBUload)
  3591			v.AuxInt = off1 + off2
  3592			v.Aux = sym
  3593			v.AddArg(ptr)
  3594			v.AddArg(mem)
  3595			return true
  3596		}
  3597		// match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3598		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3599		// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3600		for {
  3601			off1 := v.AuxInt
  3602			sym1 := v.Aux
  3603			mem := v.Args[1]
  3604			v_0 := v.Args[0]
  3605			if v_0.Op != OpMIPS64MOVVaddr {
  3606				break
  3607			}
  3608			off2 := v_0.AuxInt
  3609			sym2 := v_0.Aux
  3610			ptr := v_0.Args[0]
  3611			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3612				break
  3613			}
  3614			v.reset(OpMIPS64MOVBUload)
  3615			v.AuxInt = off1 + off2
  3616			v.Aux = mergeSym(sym1, sym2)
  3617			v.AddArg(ptr)
  3618			v.AddArg(mem)
  3619			return true
  3620		}
  3621		return false
  3622	}
  3623	func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool {
  3624		// match: (MOVBUreg x:(MOVBUload _ _))
  3625		// cond:
  3626		// result: (MOVVreg x)
  3627		for {
  3628			x := v.Args[0]
  3629			if x.Op != OpMIPS64MOVBUload {
  3630				break
  3631			}
  3632			_ = x.Args[1]
  3633			v.reset(OpMIPS64MOVVreg)
  3634			v.AddArg(x)
  3635			return true
  3636		}
  3637		// match: (MOVBUreg x:(MOVBUreg _))
  3638		// cond:
  3639		// result: (MOVVreg x)
  3640		for {
  3641			x := v.Args[0]
  3642			if x.Op != OpMIPS64MOVBUreg {
  3643				break
  3644			}
  3645			v.reset(OpMIPS64MOVVreg)
  3646			v.AddArg(x)
  3647			return true
  3648		}
  3649		// match: (MOVBUreg (MOVVconst [c]))
  3650		// cond:
  3651		// result: (MOVVconst [int64(uint8(c))])
  3652		for {
  3653			v_0 := v.Args[0]
  3654			if v_0.Op != OpMIPS64MOVVconst {
  3655				break
  3656			}
  3657			c := v_0.AuxInt
  3658			v.reset(OpMIPS64MOVVconst)
  3659			v.AuxInt = int64(uint8(c))
  3660			return true
  3661		}
  3662		return false
  3663	}
  3664	func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool {
  3665		// match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem)
  3666		// cond: is32Bit(off1+off2)
  3667		// result: (MOVBload [off1+off2] {sym} ptr mem)
  3668		for {
  3669			off1 := v.AuxInt
  3670			sym := v.Aux
  3671			mem := v.Args[1]
  3672			v_0 := v.Args[0]
  3673			if v_0.Op != OpMIPS64ADDVconst {
  3674				break
  3675			}
  3676			off2 := v_0.AuxInt
  3677			ptr := v_0.Args[0]
  3678			if !(is32Bit(off1 + off2)) {
  3679				break
  3680			}
  3681			v.reset(OpMIPS64MOVBload)
  3682			v.AuxInt = off1 + off2
  3683			v.Aux = sym
  3684			v.AddArg(ptr)
  3685			v.AddArg(mem)
  3686			return true
  3687		}
  3688		// match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3689		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3690		// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3691		for {
  3692			off1 := v.AuxInt
  3693			sym1 := v.Aux
  3694			mem := v.Args[1]
  3695			v_0 := v.Args[0]
  3696			if v_0.Op != OpMIPS64MOVVaddr {
  3697				break
  3698			}
  3699			off2 := v_0.AuxInt
  3700			sym2 := v_0.Aux
  3701			ptr := v_0.Args[0]
  3702			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3703				break
  3704			}
  3705			v.reset(OpMIPS64MOVBload)
  3706			v.AuxInt = off1 + off2
  3707			v.Aux = mergeSym(sym1, sym2)
  3708			v.AddArg(ptr)
  3709			v.AddArg(mem)
  3710			return true
  3711		}
  3712		return false
  3713	}
  3714	func rewriteValueMIPS64_OpMIPS64MOVBreg_0(v *Value) bool {
  3715		// match: (MOVBreg x:(MOVBload _ _))
  3716		// cond:
  3717		// result: (MOVVreg x)
  3718		for {
  3719			x := v.Args[0]
  3720			if x.Op != OpMIPS64MOVBload {
  3721				break
  3722			}
  3723			_ = x.Args[1]
  3724			v.reset(OpMIPS64MOVVreg)
  3725			v.AddArg(x)
  3726			return true
  3727		}
  3728		// match: (MOVBreg x:(MOVBreg _))
  3729		// cond:
  3730		// result: (MOVVreg x)
  3731		for {
  3732			x := v.Args[0]
  3733			if x.Op != OpMIPS64MOVBreg {
  3734				break
  3735			}
  3736			v.reset(OpMIPS64MOVVreg)
  3737			v.AddArg(x)
  3738			return true
  3739		}
  3740		// match: (MOVBreg (MOVVconst [c]))
  3741		// cond:
  3742		// result: (MOVVconst [int64(int8(c))])
  3743		for {
  3744			v_0 := v.Args[0]
  3745			if v_0.Op != OpMIPS64MOVVconst {
  3746				break
  3747			}
  3748			c := v_0.AuxInt
  3749			v.reset(OpMIPS64MOVVconst)
  3750			v.AuxInt = int64(int8(c))
  3751			return true
  3752		}
  3753		return false
  3754	}
  3755	func rewriteValueMIPS64_OpMIPS64MOVBstore_0(v *Value) bool {
  3756		// match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  3757		// cond: is32Bit(off1+off2)
  3758		// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  3759		for {
  3760			off1 := v.AuxInt
  3761			sym := v.Aux
  3762			mem := v.Args[2]
  3763			v_0 := v.Args[0]
  3764			if v_0.Op != OpMIPS64ADDVconst {
  3765				break
  3766			}
  3767			off2 := v_0.AuxInt
  3768			ptr := v_0.Args[0]
  3769			val := v.Args[1]
  3770			if !(is32Bit(off1 + off2)) {
  3771				break
  3772			}
  3773			v.reset(OpMIPS64MOVBstore)
  3774			v.AuxInt = off1 + off2
  3775			v.Aux = sym
  3776			v.AddArg(ptr)
  3777			v.AddArg(val)
  3778			v.AddArg(mem)
  3779			return true
  3780		}
  3781		// match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  3782		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3783		// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  3784		for {
  3785			off1 := v.AuxInt
  3786			sym1 := v.Aux
  3787			mem := v.Args[2]
  3788			v_0 := v.Args[0]
  3789			if v_0.Op != OpMIPS64MOVVaddr {
  3790				break
  3791			}
  3792			off2 := v_0.AuxInt
  3793			sym2 := v_0.Aux
  3794			ptr := v_0.Args[0]
  3795			val := v.Args[1]
  3796			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3797				break
  3798			}
  3799			v.reset(OpMIPS64MOVBstore)
  3800			v.AuxInt = off1 + off2
  3801			v.Aux = mergeSym(sym1, sym2)
  3802			v.AddArg(ptr)
  3803			v.AddArg(val)
  3804			v.AddArg(mem)
  3805			return true
  3806		}
  3807		// match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem)
  3808		// cond:
  3809		// result: (MOVBstorezero [off] {sym} ptr mem)
  3810		for {
  3811			off := v.AuxInt
  3812			sym := v.Aux
  3813			mem := v.Args[2]
  3814			ptr := v.Args[0]
  3815			v_1 := v.Args[1]
  3816			if v_1.Op != OpMIPS64MOVVconst {
  3817				break
  3818			}
  3819			if v_1.AuxInt != 0 {
  3820				break
  3821			}
  3822			v.reset(OpMIPS64MOVBstorezero)
  3823			v.AuxInt = off
  3824			v.Aux = sym
  3825			v.AddArg(ptr)
  3826			v.AddArg(mem)
  3827			return true
  3828		}
  3829		// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  3830		// cond:
  3831		// result: (MOVBstore [off] {sym} ptr x mem)
  3832		for {
  3833			off := v.AuxInt
  3834			sym := v.Aux
  3835			mem := v.Args[2]
  3836			ptr := v.Args[0]
  3837			v_1 := v.Args[1]
  3838			if v_1.Op != OpMIPS64MOVBreg {
  3839				break
  3840			}
  3841			x := v_1.Args[0]
  3842			v.reset(OpMIPS64MOVBstore)
  3843			v.AuxInt = off
  3844			v.Aux = sym
  3845			v.AddArg(ptr)
  3846			v.AddArg(x)
  3847			v.AddArg(mem)
  3848			return true
  3849		}
  3850		// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  3851		// cond:
  3852		// result: (MOVBstore [off] {sym} ptr x mem)
  3853		for {
  3854			off := v.AuxInt
  3855			sym := v.Aux
  3856			mem := v.Args[2]
  3857			ptr := v.Args[0]
  3858			v_1 := v.Args[1]
  3859			if v_1.Op != OpMIPS64MOVBUreg {
  3860				break
  3861			}
  3862			x := v_1.Args[0]
  3863			v.reset(OpMIPS64MOVBstore)
  3864			v.AuxInt = off
  3865			v.Aux = sym
  3866			v.AddArg(ptr)
  3867			v.AddArg(x)
  3868			v.AddArg(mem)
  3869			return true
  3870		}
  3871		// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  3872		// cond:
  3873		// result: (MOVBstore [off] {sym} ptr x mem)
  3874		for {
  3875			off := v.AuxInt
  3876			sym := v.Aux
  3877			mem := v.Args[2]
  3878			ptr := v.Args[0]
  3879			v_1 := v.Args[1]
  3880			if v_1.Op != OpMIPS64MOVHreg {
  3881				break
  3882			}
  3883			x := v_1.Args[0]
  3884			v.reset(OpMIPS64MOVBstore)
  3885			v.AuxInt = off
  3886			v.Aux = sym
  3887			v.AddArg(ptr)
  3888			v.AddArg(x)
  3889			v.AddArg(mem)
  3890			return true
  3891		}
  3892		// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  3893		// cond:
  3894		// result: (MOVBstore [off] {sym} ptr x mem)
  3895		for {
  3896			off := v.AuxInt
  3897			sym := v.Aux
  3898			mem := v.Args[2]
  3899			ptr := v.Args[0]
  3900			v_1 := v.Args[1]
  3901			if v_1.Op != OpMIPS64MOVHUreg {
  3902				break
  3903			}
  3904			x := v_1.Args[0]
  3905			v.reset(OpMIPS64MOVBstore)
  3906			v.AuxInt = off
  3907			v.Aux = sym
  3908			v.AddArg(ptr)
  3909			v.AddArg(x)
  3910			v.AddArg(mem)
  3911			return true
  3912		}
  3913		// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  3914		// cond:
  3915		// result: (MOVBstore [off] {sym} ptr x mem)
  3916		for {
  3917			off := v.AuxInt
  3918			sym := v.Aux
  3919			mem := v.Args[2]
  3920			ptr := v.Args[0]
  3921			v_1 := v.Args[1]
  3922			if v_1.Op != OpMIPS64MOVWreg {
  3923				break
  3924			}
  3925			x := v_1.Args[0]
  3926			v.reset(OpMIPS64MOVBstore)
  3927			v.AuxInt = off
  3928			v.Aux = sym
  3929			v.AddArg(ptr)
  3930			v.AddArg(x)
  3931			v.AddArg(mem)
  3932			return true
  3933		}
  3934		// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  3935		// cond:
  3936		// result: (MOVBstore [off] {sym} ptr x mem)
  3937		for {
  3938			off := v.AuxInt
  3939			sym := v.Aux
  3940			mem := v.Args[2]
  3941			ptr := v.Args[0]
  3942			v_1 := v.Args[1]
  3943			if v_1.Op != OpMIPS64MOVWUreg {
  3944				break
  3945			}
  3946			x := v_1.Args[0]
  3947			v.reset(OpMIPS64MOVBstore)
  3948			v.AuxInt = off
  3949			v.Aux = sym
  3950			v.AddArg(ptr)
  3951			v.AddArg(x)
  3952			v.AddArg(mem)
  3953			return true
  3954		}
  3955		return false
  3956	}
  3957	func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool {
  3958		// match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  3959		// cond: is32Bit(off1+off2)
  3960		// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
  3961		for {
  3962			off1 := v.AuxInt
  3963			sym := v.Aux
  3964			mem := v.Args[1]
  3965			v_0 := v.Args[0]
  3966			if v_0.Op != OpMIPS64ADDVconst {
  3967				break
  3968			}
  3969			off2 := v_0.AuxInt
  3970			ptr := v_0.Args[0]
  3971			if !(is32Bit(off1 + off2)) {
  3972				break
  3973			}
  3974			v.reset(OpMIPS64MOVBstorezero)
  3975			v.AuxInt = off1 + off2
  3976			v.Aux = sym
  3977			v.AddArg(ptr)
  3978			v.AddArg(mem)
  3979			return true
  3980		}
  3981		// match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  3982		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  3983		// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  3984		for {
  3985			off1 := v.AuxInt
  3986			sym1 := v.Aux
  3987			mem := v.Args[1]
  3988			v_0 := v.Args[0]
  3989			if v_0.Op != OpMIPS64MOVVaddr {
  3990				break
  3991			}
  3992			off2 := v_0.AuxInt
  3993			sym2 := v_0.Aux
  3994			ptr := v_0.Args[0]
  3995			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  3996				break
  3997			}
  3998			v.reset(OpMIPS64MOVBstorezero)
  3999			v.AuxInt = off1 + off2
  4000			v.Aux = mergeSym(sym1, sym2)
  4001			v.AddArg(ptr)
  4002			v.AddArg(mem)
  4003			return true
  4004		}
  4005		return false
  4006	}
  4007	func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool {
  4008		// match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4009		// cond: is32Bit(off1+off2)
  4010		// result: (MOVDload [off1+off2] {sym} ptr mem)
  4011		for {
  4012			off1 := v.AuxInt
  4013			sym := v.Aux
  4014			mem := v.Args[1]
  4015			v_0 := v.Args[0]
  4016			if v_0.Op != OpMIPS64ADDVconst {
  4017				break
  4018			}
  4019			off2 := v_0.AuxInt
  4020			ptr := v_0.Args[0]
  4021			if !(is32Bit(off1 + off2)) {
  4022				break
  4023			}
  4024			v.reset(OpMIPS64MOVDload)
  4025			v.AuxInt = off1 + off2
  4026			v.Aux = sym
  4027			v.AddArg(ptr)
  4028			v.AddArg(mem)
  4029			return true
  4030		}
  4031		// match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4032		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4033		// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4034		for {
  4035			off1 := v.AuxInt
  4036			sym1 := v.Aux
  4037			mem := v.Args[1]
  4038			v_0 := v.Args[0]
  4039			if v_0.Op != OpMIPS64MOVVaddr {
  4040				break
  4041			}
  4042			off2 := v_0.AuxInt
  4043			sym2 := v_0.Aux
  4044			ptr := v_0.Args[0]
  4045			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4046				break
  4047			}
  4048			v.reset(OpMIPS64MOVDload)
  4049			v.AuxInt = off1 + off2
  4050			v.Aux = mergeSym(sym1, sym2)
  4051			v.AddArg(ptr)
  4052			v.AddArg(mem)
  4053			return true
  4054		}
  4055		return false
  4056	}
  4057	func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool {
  4058		// match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4059		// cond: is32Bit(off1+off2)
  4060		// result: (MOVDstore [off1+off2] {sym} ptr val mem)
  4061		for {
  4062			off1 := v.AuxInt
  4063			sym := v.Aux
  4064			mem := v.Args[2]
  4065			v_0 := v.Args[0]
  4066			if v_0.Op != OpMIPS64ADDVconst {
  4067				break
  4068			}
  4069			off2 := v_0.AuxInt
  4070			ptr := v_0.Args[0]
  4071			val := v.Args[1]
  4072			if !(is32Bit(off1 + off2)) {
  4073				break
  4074			}
  4075			v.reset(OpMIPS64MOVDstore)
  4076			v.AuxInt = off1 + off2
  4077			v.Aux = sym
  4078			v.AddArg(ptr)
  4079			v.AddArg(val)
  4080			v.AddArg(mem)
  4081			return true
  4082		}
  4083		// match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4084		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4085		// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4086		for {
  4087			off1 := v.AuxInt
  4088			sym1 := v.Aux
  4089			mem := v.Args[2]
  4090			v_0 := v.Args[0]
  4091			if v_0.Op != OpMIPS64MOVVaddr {
  4092				break
  4093			}
  4094			off2 := v_0.AuxInt
  4095			sym2 := v_0.Aux
  4096			ptr := v_0.Args[0]
  4097			val := v.Args[1]
  4098			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4099				break
  4100			}
  4101			v.reset(OpMIPS64MOVDstore)
  4102			v.AuxInt = off1 + off2
  4103			v.Aux = mergeSym(sym1, sym2)
  4104			v.AddArg(ptr)
  4105			v.AddArg(val)
  4106			v.AddArg(mem)
  4107			return true
  4108		}
  4109		return false
  4110	}
  4111	func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool {
  4112		// match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4113		// cond: is32Bit(off1+off2)
  4114		// result: (MOVFload [off1+off2] {sym} ptr mem)
  4115		for {
  4116			off1 := v.AuxInt
  4117			sym := v.Aux
  4118			mem := v.Args[1]
  4119			v_0 := v.Args[0]
  4120			if v_0.Op != OpMIPS64ADDVconst {
  4121				break
  4122			}
  4123			off2 := v_0.AuxInt
  4124			ptr := v_0.Args[0]
  4125			if !(is32Bit(off1 + off2)) {
  4126				break
  4127			}
  4128			v.reset(OpMIPS64MOVFload)
  4129			v.AuxInt = off1 + off2
  4130			v.Aux = sym
  4131			v.AddArg(ptr)
  4132			v.AddArg(mem)
  4133			return true
  4134		}
  4135		// match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4136		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4137		// result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4138		for {
  4139			off1 := v.AuxInt
  4140			sym1 := v.Aux
  4141			mem := v.Args[1]
  4142			v_0 := v.Args[0]
  4143			if v_0.Op != OpMIPS64MOVVaddr {
  4144				break
  4145			}
  4146			off2 := v_0.AuxInt
  4147			sym2 := v_0.Aux
  4148			ptr := v_0.Args[0]
  4149			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4150				break
  4151			}
  4152			v.reset(OpMIPS64MOVFload)
  4153			v.AuxInt = off1 + off2
  4154			v.Aux = mergeSym(sym1, sym2)
  4155			v.AddArg(ptr)
  4156			v.AddArg(mem)
  4157			return true
  4158		}
  4159		return false
  4160	}
  4161	func rewriteValueMIPS64_OpMIPS64MOVFstore_0(v *Value) bool {
  4162		// match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4163		// cond: is32Bit(off1+off2)
  4164		// result: (MOVFstore [off1+off2] {sym} ptr val mem)
  4165		for {
  4166			off1 := v.AuxInt
  4167			sym := v.Aux
  4168			mem := v.Args[2]
  4169			v_0 := v.Args[0]
  4170			if v_0.Op != OpMIPS64ADDVconst {
  4171				break
  4172			}
  4173			off2 := v_0.AuxInt
  4174			ptr := v_0.Args[0]
  4175			val := v.Args[1]
  4176			if !(is32Bit(off1 + off2)) {
  4177				break
  4178			}
  4179			v.reset(OpMIPS64MOVFstore)
  4180			v.AuxInt = off1 + off2
  4181			v.Aux = sym
  4182			v.AddArg(ptr)
  4183			v.AddArg(val)
  4184			v.AddArg(mem)
  4185			return true
  4186		}
  4187		// match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4188		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4189		// result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4190		for {
  4191			off1 := v.AuxInt
  4192			sym1 := v.Aux
  4193			mem := v.Args[2]
  4194			v_0 := v.Args[0]
  4195			if v_0.Op != OpMIPS64MOVVaddr {
  4196				break
  4197			}
  4198			off2 := v_0.AuxInt
  4199			sym2 := v_0.Aux
  4200			ptr := v_0.Args[0]
  4201			val := v.Args[1]
  4202			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4203				break
  4204			}
  4205			v.reset(OpMIPS64MOVFstore)
  4206			v.AuxInt = off1 + off2
  4207			v.Aux = mergeSym(sym1, sym2)
  4208			v.AddArg(ptr)
  4209			v.AddArg(val)
  4210			v.AddArg(mem)
  4211			return true
  4212		}
  4213		return false
  4214	}
  4215	func rewriteValueMIPS64_OpMIPS64MOVHUload_0(v *Value) bool {
  4216		// match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4217		// cond: is32Bit(off1+off2)
  4218		// result: (MOVHUload [off1+off2] {sym} ptr mem)
  4219		for {
  4220			off1 := v.AuxInt
  4221			sym := v.Aux
  4222			mem := v.Args[1]
  4223			v_0 := v.Args[0]
  4224			if v_0.Op != OpMIPS64ADDVconst {
  4225				break
  4226			}
  4227			off2 := v_0.AuxInt
  4228			ptr := v_0.Args[0]
  4229			if !(is32Bit(off1 + off2)) {
  4230				break
  4231			}
  4232			v.reset(OpMIPS64MOVHUload)
  4233			v.AuxInt = off1 + off2
  4234			v.Aux = sym
  4235			v.AddArg(ptr)
  4236			v.AddArg(mem)
  4237			return true
  4238		}
  4239		// match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4240		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4241		// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4242		for {
  4243			off1 := v.AuxInt
  4244			sym1 := v.Aux
  4245			mem := v.Args[1]
  4246			v_0 := v.Args[0]
  4247			if v_0.Op != OpMIPS64MOVVaddr {
  4248				break
  4249			}
  4250			off2 := v_0.AuxInt
  4251			sym2 := v_0.Aux
  4252			ptr := v_0.Args[0]
  4253			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4254				break
  4255			}
  4256			v.reset(OpMIPS64MOVHUload)
  4257			v.AuxInt = off1 + off2
  4258			v.Aux = mergeSym(sym1, sym2)
  4259			v.AddArg(ptr)
  4260			v.AddArg(mem)
  4261			return true
  4262		}
  4263		return false
  4264	}
  4265	func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool {
  4266		// match: (MOVHUreg x:(MOVBUload _ _))
  4267		// cond:
  4268		// result: (MOVVreg x)
  4269		for {
  4270			x := v.Args[0]
  4271			if x.Op != OpMIPS64MOVBUload {
  4272				break
  4273			}
  4274			_ = x.Args[1]
  4275			v.reset(OpMIPS64MOVVreg)
  4276			v.AddArg(x)
  4277			return true
  4278		}
  4279		// match: (MOVHUreg x:(MOVHUload _ _))
  4280		// cond:
  4281		// result: (MOVVreg x)
  4282		for {
  4283			x := v.Args[0]
  4284			if x.Op != OpMIPS64MOVHUload {
  4285				break
  4286			}
  4287			_ = x.Args[1]
  4288			v.reset(OpMIPS64MOVVreg)
  4289			v.AddArg(x)
  4290			return true
  4291		}
  4292		// match: (MOVHUreg x:(MOVBUreg _))
  4293		// cond:
  4294		// result: (MOVVreg x)
  4295		for {
  4296			x := v.Args[0]
  4297			if x.Op != OpMIPS64MOVBUreg {
  4298				break
  4299			}
  4300			v.reset(OpMIPS64MOVVreg)
  4301			v.AddArg(x)
  4302			return true
  4303		}
  4304		// match: (MOVHUreg x:(MOVHUreg _))
  4305		// cond:
  4306		// result: (MOVVreg x)
  4307		for {
  4308			x := v.Args[0]
  4309			if x.Op != OpMIPS64MOVHUreg {
  4310				break
  4311			}
  4312			v.reset(OpMIPS64MOVVreg)
  4313			v.AddArg(x)
  4314			return true
  4315		}
  4316		// match: (MOVHUreg (MOVVconst [c]))
  4317		// cond:
  4318		// result: (MOVVconst [int64(uint16(c))])
  4319		for {
  4320			v_0 := v.Args[0]
  4321			if v_0.Op != OpMIPS64MOVVconst {
  4322				break
  4323			}
  4324			c := v_0.AuxInt
  4325			v.reset(OpMIPS64MOVVconst)
  4326			v.AuxInt = int64(uint16(c))
  4327			return true
  4328		}
  4329		return false
  4330	}
  4331	func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool {
  4332		// match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4333		// cond: is32Bit(off1+off2)
  4334		// result: (MOVHload [off1+off2] {sym} ptr mem)
  4335		for {
  4336			off1 := v.AuxInt
  4337			sym := v.Aux
  4338			mem := v.Args[1]
  4339			v_0 := v.Args[0]
  4340			if v_0.Op != OpMIPS64ADDVconst {
  4341				break
  4342			}
  4343			off2 := v_0.AuxInt
  4344			ptr := v_0.Args[0]
  4345			if !(is32Bit(off1 + off2)) {
  4346				break
  4347			}
  4348			v.reset(OpMIPS64MOVHload)
  4349			v.AuxInt = off1 + off2
  4350			v.Aux = sym
  4351			v.AddArg(ptr)
  4352			v.AddArg(mem)
  4353			return true
  4354		}
  4355		// match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4356		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4357		// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4358		for {
  4359			off1 := v.AuxInt
  4360			sym1 := v.Aux
  4361			mem := v.Args[1]
  4362			v_0 := v.Args[0]
  4363			if v_0.Op != OpMIPS64MOVVaddr {
  4364				break
  4365			}
  4366			off2 := v_0.AuxInt
  4367			sym2 := v_0.Aux
  4368			ptr := v_0.Args[0]
  4369			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4370				break
  4371			}
  4372			v.reset(OpMIPS64MOVHload)
  4373			v.AuxInt = off1 + off2
  4374			v.Aux = mergeSym(sym1, sym2)
  4375			v.AddArg(ptr)
  4376			v.AddArg(mem)
  4377			return true
  4378		}
  4379		return false
  4380	}
  4381	func rewriteValueMIPS64_OpMIPS64MOVHreg_0(v *Value) bool {
  4382		// match: (MOVHreg x:(MOVBload _ _))
  4383		// cond:
  4384		// result: (MOVVreg x)
  4385		for {
  4386			x := v.Args[0]
  4387			if x.Op != OpMIPS64MOVBload {
  4388				break
  4389			}
  4390			_ = x.Args[1]
  4391			v.reset(OpMIPS64MOVVreg)
  4392			v.AddArg(x)
  4393			return true
  4394		}
  4395		// match: (MOVHreg x:(MOVBUload _ _))
  4396		// cond:
  4397		// result: (MOVVreg x)
  4398		for {
  4399			x := v.Args[0]
  4400			if x.Op != OpMIPS64MOVBUload {
  4401				break
  4402			}
  4403			_ = x.Args[1]
  4404			v.reset(OpMIPS64MOVVreg)
  4405			v.AddArg(x)
  4406			return true
  4407		}
  4408		// match: (MOVHreg x:(MOVHload _ _))
  4409		// cond:
  4410		// result: (MOVVreg x)
  4411		for {
  4412			x := v.Args[0]
  4413			if x.Op != OpMIPS64MOVHload {
  4414				break
  4415			}
  4416			_ = x.Args[1]
  4417			v.reset(OpMIPS64MOVVreg)
  4418			v.AddArg(x)
  4419			return true
  4420		}
  4421		// match: (MOVHreg x:(MOVBreg _))
  4422		// cond:
  4423		// result: (MOVVreg x)
  4424		for {
  4425			x := v.Args[0]
  4426			if x.Op != OpMIPS64MOVBreg {
  4427				break
  4428			}
  4429			v.reset(OpMIPS64MOVVreg)
  4430			v.AddArg(x)
  4431			return true
  4432		}
  4433		// match: (MOVHreg x:(MOVBUreg _))
  4434		// cond:
  4435		// result: (MOVVreg x)
  4436		for {
  4437			x := v.Args[0]
  4438			if x.Op != OpMIPS64MOVBUreg {
  4439				break
  4440			}
  4441			v.reset(OpMIPS64MOVVreg)
  4442			v.AddArg(x)
  4443			return true
  4444		}
  4445		// match: (MOVHreg x:(MOVHreg _))
  4446		// cond:
  4447		// result: (MOVVreg x)
  4448		for {
  4449			x := v.Args[0]
  4450			if x.Op != OpMIPS64MOVHreg {
  4451				break
  4452			}
  4453			v.reset(OpMIPS64MOVVreg)
  4454			v.AddArg(x)
  4455			return true
  4456		}
  4457		// match: (MOVHreg (MOVVconst [c]))
  4458		// cond:
  4459		// result: (MOVVconst [int64(int16(c))])
  4460		for {
  4461			v_0 := v.Args[0]
  4462			if v_0.Op != OpMIPS64MOVVconst {
  4463				break
  4464			}
  4465			c := v_0.AuxInt
  4466			v.reset(OpMIPS64MOVVconst)
  4467			v.AuxInt = int64(int16(c))
  4468			return true
  4469		}
  4470		return false
  4471	}
  4472	func rewriteValueMIPS64_OpMIPS64MOVHstore_0(v *Value) bool {
  4473		// match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4474		// cond: is32Bit(off1+off2)
  4475		// result: (MOVHstore [off1+off2] {sym} ptr val mem)
  4476		for {
  4477			off1 := v.AuxInt
  4478			sym := v.Aux
  4479			mem := v.Args[2]
  4480			v_0 := v.Args[0]
  4481			if v_0.Op != OpMIPS64ADDVconst {
  4482				break
  4483			}
  4484			off2 := v_0.AuxInt
  4485			ptr := v_0.Args[0]
  4486			val := v.Args[1]
  4487			if !(is32Bit(off1 + off2)) {
  4488				break
  4489			}
  4490			v.reset(OpMIPS64MOVHstore)
  4491			v.AuxInt = off1 + off2
  4492			v.Aux = sym
  4493			v.AddArg(ptr)
  4494			v.AddArg(val)
  4495			v.AddArg(mem)
  4496			return true
  4497		}
  4498		// match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4499		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4500		// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4501		for {
  4502			off1 := v.AuxInt
  4503			sym1 := v.Aux
  4504			mem := v.Args[2]
  4505			v_0 := v.Args[0]
  4506			if v_0.Op != OpMIPS64MOVVaddr {
  4507				break
  4508			}
  4509			off2 := v_0.AuxInt
  4510			sym2 := v_0.Aux
  4511			ptr := v_0.Args[0]
  4512			val := v.Args[1]
  4513			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4514				break
  4515			}
  4516			v.reset(OpMIPS64MOVHstore)
  4517			v.AuxInt = off1 + off2
  4518			v.Aux = mergeSym(sym1, sym2)
  4519			v.AddArg(ptr)
  4520			v.AddArg(val)
  4521			v.AddArg(mem)
  4522			return true
  4523		}
  4524		// match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem)
  4525		// cond:
  4526		// result: (MOVHstorezero [off] {sym} ptr mem)
  4527		for {
  4528			off := v.AuxInt
  4529			sym := v.Aux
  4530			mem := v.Args[2]
  4531			ptr := v.Args[0]
  4532			v_1 := v.Args[1]
  4533			if v_1.Op != OpMIPS64MOVVconst {
  4534				break
  4535			}
  4536			if v_1.AuxInt != 0 {
  4537				break
  4538			}
  4539			v.reset(OpMIPS64MOVHstorezero)
  4540			v.AuxInt = off
  4541			v.Aux = sym
  4542			v.AddArg(ptr)
  4543			v.AddArg(mem)
  4544			return true
  4545		}
  4546		// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  4547		// cond:
  4548		// result: (MOVHstore [off] {sym} ptr x mem)
  4549		for {
  4550			off := v.AuxInt
  4551			sym := v.Aux
  4552			mem := v.Args[2]
  4553			ptr := v.Args[0]
  4554			v_1 := v.Args[1]
  4555			if v_1.Op != OpMIPS64MOVHreg {
  4556				break
  4557			}
  4558			x := v_1.Args[0]
  4559			v.reset(OpMIPS64MOVHstore)
  4560			v.AuxInt = off
  4561			v.Aux = sym
  4562			v.AddArg(ptr)
  4563			v.AddArg(x)
  4564			v.AddArg(mem)
  4565			return true
  4566		}
  4567		// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  4568		// cond:
  4569		// result: (MOVHstore [off] {sym} ptr x mem)
  4570		for {
  4571			off := v.AuxInt
  4572			sym := v.Aux
  4573			mem := v.Args[2]
  4574			ptr := v.Args[0]
  4575			v_1 := v.Args[1]
  4576			if v_1.Op != OpMIPS64MOVHUreg {
  4577				break
  4578			}
  4579			x := v_1.Args[0]
  4580			v.reset(OpMIPS64MOVHstore)
  4581			v.AuxInt = off
  4582			v.Aux = sym
  4583			v.AddArg(ptr)
  4584			v.AddArg(x)
  4585			v.AddArg(mem)
  4586			return true
  4587		}
  4588		// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  4589		// cond:
  4590		// result: (MOVHstore [off] {sym} ptr x mem)
  4591		for {
  4592			off := v.AuxInt
  4593			sym := v.Aux
  4594			mem := v.Args[2]
  4595			ptr := v.Args[0]
  4596			v_1 := v.Args[1]
  4597			if v_1.Op != OpMIPS64MOVWreg {
  4598				break
  4599			}
  4600			x := v_1.Args[0]
  4601			v.reset(OpMIPS64MOVHstore)
  4602			v.AuxInt = off
  4603			v.Aux = sym
  4604			v.AddArg(ptr)
  4605			v.AddArg(x)
  4606			v.AddArg(mem)
  4607			return true
  4608		}
  4609		// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  4610		// cond:
  4611		// result: (MOVHstore [off] {sym} ptr x mem)
  4612		for {
  4613			off := v.AuxInt
  4614			sym := v.Aux
  4615			mem := v.Args[2]
  4616			ptr := v.Args[0]
  4617			v_1 := v.Args[1]
  4618			if v_1.Op != OpMIPS64MOVWUreg {
  4619				break
  4620			}
  4621			x := v_1.Args[0]
  4622			v.reset(OpMIPS64MOVHstore)
  4623			v.AuxInt = off
  4624			v.Aux = sym
  4625			v.AddArg(ptr)
  4626			v.AddArg(x)
  4627			v.AddArg(mem)
  4628			return true
  4629		}
  4630		return false
  4631	}
  4632	func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool {
  4633		// match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4634		// cond: is32Bit(off1+off2)
  4635		// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
  4636		for {
  4637			off1 := v.AuxInt
  4638			sym := v.Aux
  4639			mem := v.Args[1]
  4640			v_0 := v.Args[0]
  4641			if v_0.Op != OpMIPS64ADDVconst {
  4642				break
  4643			}
  4644			off2 := v_0.AuxInt
  4645			ptr := v_0.Args[0]
  4646			if !(is32Bit(off1 + off2)) {
  4647				break
  4648			}
  4649			v.reset(OpMIPS64MOVHstorezero)
  4650			v.AuxInt = off1 + off2
  4651			v.Aux = sym
  4652			v.AddArg(ptr)
  4653			v.AddArg(mem)
  4654			return true
  4655		}
  4656		// match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4657		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4658		// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4659		for {
  4660			off1 := v.AuxInt
  4661			sym1 := v.Aux
  4662			mem := v.Args[1]
  4663			v_0 := v.Args[0]
  4664			if v_0.Op != OpMIPS64MOVVaddr {
  4665				break
  4666			}
  4667			off2 := v_0.AuxInt
  4668			sym2 := v_0.Aux
  4669			ptr := v_0.Args[0]
  4670			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4671				break
  4672			}
  4673			v.reset(OpMIPS64MOVHstorezero)
  4674			v.AuxInt = off1 + off2
  4675			v.Aux = mergeSym(sym1, sym2)
  4676			v.AddArg(ptr)
  4677			v.AddArg(mem)
  4678			return true
  4679		}
  4680		return false
  4681	}
  4682	func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool {
  4683		// match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4684		// cond: is32Bit(off1+off2)
  4685		// result: (MOVVload [off1+off2] {sym} ptr mem)
  4686		for {
  4687			off1 := v.AuxInt
  4688			sym := v.Aux
  4689			mem := v.Args[1]
  4690			v_0 := v.Args[0]
  4691			if v_0.Op != OpMIPS64ADDVconst {
  4692				break
  4693			}
  4694			off2 := v_0.AuxInt
  4695			ptr := v_0.Args[0]
  4696			if !(is32Bit(off1 + off2)) {
  4697				break
  4698			}
  4699			v.reset(OpMIPS64MOVVload)
  4700			v.AuxInt = off1 + off2
  4701			v.Aux = sym
  4702			v.AddArg(ptr)
  4703			v.AddArg(mem)
  4704			return true
  4705		}
  4706		// match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4707		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4708		// result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4709		for {
  4710			off1 := v.AuxInt
  4711			sym1 := v.Aux
  4712			mem := v.Args[1]
  4713			v_0 := v.Args[0]
  4714			if v_0.Op != OpMIPS64MOVVaddr {
  4715				break
  4716			}
  4717			off2 := v_0.AuxInt
  4718			sym2 := v_0.Aux
  4719			ptr := v_0.Args[0]
  4720			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4721				break
  4722			}
  4723			v.reset(OpMIPS64MOVVload)
  4724			v.AuxInt = off1 + off2
  4725			v.Aux = mergeSym(sym1, sym2)
  4726			v.AddArg(ptr)
  4727			v.AddArg(mem)
  4728			return true
  4729		}
  4730		return false
  4731	}
  4732	func rewriteValueMIPS64_OpMIPS64MOVVreg_0(v *Value) bool {
  4733		// match: (MOVVreg x)
  4734		// cond: x.Uses == 1
  4735		// result: (MOVVnop x)
  4736		for {
  4737			x := v.Args[0]
  4738			if !(x.Uses == 1) {
  4739				break
  4740			}
  4741			v.reset(OpMIPS64MOVVnop)
  4742			v.AddArg(x)
  4743			return true
  4744		}
  4745		// match: (MOVVreg (MOVVconst [c]))
  4746		// cond:
  4747		// result: (MOVVconst [c])
  4748		for {
  4749			v_0 := v.Args[0]
  4750			if v_0.Op != OpMIPS64MOVVconst {
  4751				break
  4752			}
  4753			c := v_0.AuxInt
  4754			v.reset(OpMIPS64MOVVconst)
  4755			v.AuxInt = c
  4756			return true
  4757		}
  4758		return false
  4759	}
  4760	func rewriteValueMIPS64_OpMIPS64MOVVstore_0(v *Value) bool {
  4761		// match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  4762		// cond: is32Bit(off1+off2)
  4763		// result: (MOVVstore [off1+off2] {sym} ptr val mem)
  4764		for {
  4765			off1 := v.AuxInt
  4766			sym := v.Aux
  4767			mem := v.Args[2]
  4768			v_0 := v.Args[0]
  4769			if v_0.Op != OpMIPS64ADDVconst {
  4770				break
  4771			}
  4772			off2 := v_0.AuxInt
  4773			ptr := v_0.Args[0]
  4774			val := v.Args[1]
  4775			if !(is32Bit(off1 + off2)) {
  4776				break
  4777			}
  4778			v.reset(OpMIPS64MOVVstore)
  4779			v.AuxInt = off1 + off2
  4780			v.Aux = sym
  4781			v.AddArg(ptr)
  4782			v.AddArg(val)
  4783			v.AddArg(mem)
  4784			return true
  4785		}
  4786		// match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  4787		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4788		// result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  4789		for {
  4790			off1 := v.AuxInt
  4791			sym1 := v.Aux
  4792			mem := v.Args[2]
  4793			v_0 := v.Args[0]
  4794			if v_0.Op != OpMIPS64MOVVaddr {
  4795				break
  4796			}
  4797			off2 := v_0.AuxInt
  4798			sym2 := v_0.Aux
  4799			ptr := v_0.Args[0]
  4800			val := v.Args[1]
  4801			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4802				break
  4803			}
  4804			v.reset(OpMIPS64MOVVstore)
  4805			v.AuxInt = off1 + off2
  4806			v.Aux = mergeSym(sym1, sym2)
  4807			v.AddArg(ptr)
  4808			v.AddArg(val)
  4809			v.AddArg(mem)
  4810			return true
  4811		}
  4812		// match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem)
  4813		// cond:
  4814		// result: (MOVVstorezero [off] {sym} ptr mem)
  4815		for {
  4816			off := v.AuxInt
  4817			sym := v.Aux
  4818			mem := v.Args[2]
  4819			ptr := v.Args[0]
  4820			v_1 := v.Args[1]
  4821			if v_1.Op != OpMIPS64MOVVconst {
  4822				break
  4823			}
  4824			if v_1.AuxInt != 0 {
  4825				break
  4826			}
  4827			v.reset(OpMIPS64MOVVstorezero)
  4828			v.AuxInt = off
  4829			v.Aux = sym
  4830			v.AddArg(ptr)
  4831			v.AddArg(mem)
  4832			return true
  4833		}
  4834		return false
  4835	}
  4836	func rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v *Value) bool {
  4837		// match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  4838		// cond: is32Bit(off1+off2)
  4839		// result: (MOVVstorezero [off1+off2] {sym} ptr mem)
  4840		for {
  4841			off1 := v.AuxInt
  4842			sym := v.Aux
  4843			mem := v.Args[1]
  4844			v_0 := v.Args[0]
  4845			if v_0.Op != OpMIPS64ADDVconst {
  4846				break
  4847			}
  4848			off2 := v_0.AuxInt
  4849			ptr := v_0.Args[0]
  4850			if !(is32Bit(off1 + off2)) {
  4851				break
  4852			}
  4853			v.reset(OpMIPS64MOVVstorezero)
  4854			v.AuxInt = off1 + off2
  4855			v.Aux = sym
  4856			v.AddArg(ptr)
  4857			v.AddArg(mem)
  4858			return true
  4859		}
  4860		// match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4861		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4862		// result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4863		for {
  4864			off1 := v.AuxInt
  4865			sym1 := v.Aux
  4866			mem := v.Args[1]
  4867			v_0 := v.Args[0]
  4868			if v_0.Op != OpMIPS64MOVVaddr {
  4869				break
  4870			}
  4871			off2 := v_0.AuxInt
  4872			sym2 := v_0.Aux
  4873			ptr := v_0.Args[0]
  4874			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4875				break
  4876			}
  4877			v.reset(OpMIPS64MOVVstorezero)
  4878			v.AuxInt = off1 + off2
  4879			v.Aux = mergeSym(sym1, sym2)
  4880			v.AddArg(ptr)
  4881			v.AddArg(mem)
  4882			return true
  4883		}
  4884		return false
  4885	}
  4886	func rewriteValueMIPS64_OpMIPS64MOVWUload_0(v *Value) bool {
  4887		// match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem)
  4888		// cond: is32Bit(off1+off2)
  4889		// result: (MOVWUload [off1+off2] {sym} ptr mem)
  4890		for {
  4891			off1 := v.AuxInt
  4892			sym := v.Aux
  4893			mem := v.Args[1]
  4894			v_0 := v.Args[0]
  4895			if v_0.Op != OpMIPS64ADDVconst {
  4896				break
  4897			}
  4898			off2 := v_0.AuxInt
  4899			ptr := v_0.Args[0]
  4900			if !(is32Bit(off1 + off2)) {
  4901				break
  4902			}
  4903			v.reset(OpMIPS64MOVWUload)
  4904			v.AuxInt = off1 + off2
  4905			v.Aux = sym
  4906			v.AddArg(ptr)
  4907			v.AddArg(mem)
  4908			return true
  4909		}
  4910		// match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  4911		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  4912		// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4913		for {
  4914			off1 := v.AuxInt
  4915			sym1 := v.Aux
  4916			mem := v.Args[1]
  4917			v_0 := v.Args[0]
  4918			if v_0.Op != OpMIPS64MOVVaddr {
  4919				break
  4920			}
  4921			off2 := v_0.AuxInt
  4922			sym2 := v_0.Aux
  4923			ptr := v_0.Args[0]
  4924			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  4925				break
  4926			}
  4927			v.reset(OpMIPS64MOVWUload)
  4928			v.AuxInt = off1 + off2
  4929			v.Aux = mergeSym(sym1, sym2)
  4930			v.AddArg(ptr)
  4931			v.AddArg(mem)
  4932			return true
  4933		}
  4934		return false
  4935	}
  4936	func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool {
  4937		// match: (MOVWUreg x:(MOVBUload _ _))
  4938		// cond:
  4939		// result: (MOVVreg x)
  4940		for {
  4941			x := v.Args[0]
  4942			if x.Op != OpMIPS64MOVBUload {
  4943				break
  4944			}
  4945			_ = x.Args[1]
  4946			v.reset(OpMIPS64MOVVreg)
  4947			v.AddArg(x)
  4948			return true
  4949		}
  4950		// match: (MOVWUreg x:(MOVHUload _ _))
  4951		// cond:
  4952		// result: (MOVVreg x)
  4953		for {
  4954			x := v.Args[0]
  4955			if x.Op != OpMIPS64MOVHUload {
  4956				break
  4957			}
  4958			_ = x.Args[1]
  4959			v.reset(OpMIPS64MOVVreg)
  4960			v.AddArg(x)
  4961			return true
  4962		}
  4963		// match: (MOVWUreg x:(MOVWUload _ _))
  4964		// cond:
  4965		// result: (MOVVreg x)
  4966		for {
  4967			x := v.Args[0]
  4968			if x.Op != OpMIPS64MOVWUload {
  4969				break
  4970			}
  4971			_ = x.Args[1]
  4972			v.reset(OpMIPS64MOVVreg)
  4973			v.AddArg(x)
  4974			return true
  4975		}
  4976		// match: (MOVWUreg x:(MOVBUreg _))
  4977		// cond:
  4978		// result: (MOVVreg x)
  4979		for {
  4980			x := v.Args[0]
  4981			if x.Op != OpMIPS64MOVBUreg {
  4982				break
  4983			}
  4984			v.reset(OpMIPS64MOVVreg)
  4985			v.AddArg(x)
  4986			return true
  4987		}
  4988		// match: (MOVWUreg x:(MOVHUreg _))
  4989		// cond:
  4990		// result: (MOVVreg x)
  4991		for {
  4992			x := v.Args[0]
  4993			if x.Op != OpMIPS64MOVHUreg {
  4994				break
  4995			}
  4996			v.reset(OpMIPS64MOVVreg)
  4997			v.AddArg(x)
  4998			return true
  4999		}
  5000		// match: (MOVWUreg x:(MOVWUreg _))
  5001		// cond:
  5002		// result: (MOVVreg x)
  5003		for {
  5004			x := v.Args[0]
  5005			if x.Op != OpMIPS64MOVWUreg {
  5006				break
  5007			}
  5008			v.reset(OpMIPS64MOVVreg)
  5009			v.AddArg(x)
  5010			return true
  5011		}
  5012		// match: (MOVWUreg (MOVVconst [c]))
  5013		// cond:
  5014		// result: (MOVVconst [int64(uint32(c))])
  5015		for {
  5016			v_0 := v.Args[0]
  5017			if v_0.Op != OpMIPS64MOVVconst {
  5018				break
  5019			}
  5020			c := v_0.AuxInt
  5021			v.reset(OpMIPS64MOVVconst)
  5022			v.AuxInt = int64(uint32(c))
  5023			return true
  5024		}
  5025		return false
  5026	}
  5027	func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool {
  5028		// match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem)
  5029		// cond: is32Bit(off1+off2)
  5030		// result: (MOVWload [off1+off2] {sym} ptr mem)
  5031		for {
  5032			off1 := v.AuxInt
  5033			sym := v.Aux
  5034			mem := v.Args[1]
  5035			v_0 := v.Args[0]
  5036			if v_0.Op != OpMIPS64ADDVconst {
  5037				break
  5038			}
  5039			off2 := v_0.AuxInt
  5040			ptr := v_0.Args[0]
  5041			if !(is32Bit(off1 + off2)) {
  5042				break
  5043			}
  5044			v.reset(OpMIPS64MOVWload)
  5045			v.AuxInt = off1 + off2
  5046			v.Aux = sym
  5047			v.AddArg(ptr)
  5048			v.AddArg(mem)
  5049			return true
  5050		}
  5051		// match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5052		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5053		// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5054		for {
  5055			off1 := v.AuxInt
  5056			sym1 := v.Aux
  5057			mem := v.Args[1]
  5058			v_0 := v.Args[0]
  5059			if v_0.Op != OpMIPS64MOVVaddr {
  5060				break
  5061			}
  5062			off2 := v_0.AuxInt
  5063			sym2 := v_0.Aux
  5064			ptr := v_0.Args[0]
  5065			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5066				break
  5067			}
  5068			v.reset(OpMIPS64MOVWload)
  5069			v.AuxInt = off1 + off2
  5070			v.Aux = mergeSym(sym1, sym2)
  5071			v.AddArg(ptr)
  5072			v.AddArg(mem)
  5073			return true
  5074		}
  5075		return false
  5076	}
  5077	func rewriteValueMIPS64_OpMIPS64MOVWreg_0(v *Value) bool {
  5078		// match: (MOVWreg x:(MOVBload _ _))
  5079		// cond:
  5080		// result: (MOVVreg x)
  5081		for {
  5082			x := v.Args[0]
  5083			if x.Op != OpMIPS64MOVBload {
  5084				break
  5085			}
  5086			_ = x.Args[1]
  5087			v.reset(OpMIPS64MOVVreg)
  5088			v.AddArg(x)
  5089			return true
  5090		}
  5091		// match: (MOVWreg x:(MOVBUload _ _))
  5092		// cond:
  5093		// result: (MOVVreg x)
  5094		for {
  5095			x := v.Args[0]
  5096			if x.Op != OpMIPS64MOVBUload {
  5097				break
  5098			}
  5099			_ = x.Args[1]
  5100			v.reset(OpMIPS64MOVVreg)
  5101			v.AddArg(x)
  5102			return true
  5103		}
  5104		// match: (MOVWreg x:(MOVHload _ _))
  5105		// cond:
  5106		// result: (MOVVreg x)
  5107		for {
  5108			x := v.Args[0]
  5109			if x.Op != OpMIPS64MOVHload {
  5110				break
  5111			}
  5112			_ = x.Args[1]
  5113			v.reset(OpMIPS64MOVVreg)
  5114			v.AddArg(x)
  5115			return true
  5116		}
  5117		// match: (MOVWreg x:(MOVHUload _ _))
  5118		// cond:
  5119		// result: (MOVVreg x)
  5120		for {
  5121			x := v.Args[0]
  5122			if x.Op != OpMIPS64MOVHUload {
  5123				break
  5124			}
  5125			_ = x.Args[1]
  5126			v.reset(OpMIPS64MOVVreg)
  5127			v.AddArg(x)
  5128			return true
  5129		}
  5130		// match: (MOVWreg x:(MOVWload _ _))
  5131		// cond:
  5132		// result: (MOVVreg x)
  5133		for {
  5134			x := v.Args[0]
  5135			if x.Op != OpMIPS64MOVWload {
  5136				break
  5137			}
  5138			_ = x.Args[1]
  5139			v.reset(OpMIPS64MOVVreg)
  5140			v.AddArg(x)
  5141			return true
  5142		}
  5143		// match: (MOVWreg x:(MOVBreg _))
  5144		// cond:
  5145		// result: (MOVVreg x)
  5146		for {
  5147			x := v.Args[0]
  5148			if x.Op != OpMIPS64MOVBreg {
  5149				break
  5150			}
  5151			v.reset(OpMIPS64MOVVreg)
  5152			v.AddArg(x)
  5153			return true
  5154		}
  5155		// match: (MOVWreg x:(MOVBUreg _))
  5156		// cond:
  5157		// result: (MOVVreg x)
  5158		for {
  5159			x := v.Args[0]
  5160			if x.Op != OpMIPS64MOVBUreg {
  5161				break
  5162			}
  5163			v.reset(OpMIPS64MOVVreg)
  5164			v.AddArg(x)
  5165			return true
  5166		}
  5167		// match: (MOVWreg x:(MOVHreg _))
  5168		// cond:
  5169		// result: (MOVVreg x)
  5170		for {
  5171			x := v.Args[0]
  5172			if x.Op != OpMIPS64MOVHreg {
  5173				break
  5174			}
  5175			v.reset(OpMIPS64MOVVreg)
  5176			v.AddArg(x)
  5177			return true
  5178		}
  5179		// match: (MOVWreg x:(MOVHreg _))
  5180		// cond:
  5181		// result: (MOVVreg x)
  5182		for {
  5183			x := v.Args[0]
  5184			if x.Op != OpMIPS64MOVHreg {
  5185				break
  5186			}
  5187			v.reset(OpMIPS64MOVVreg)
  5188			v.AddArg(x)
  5189			return true
  5190		}
  5191		// match: (MOVWreg x:(MOVWreg _))
  5192		// cond:
  5193		// result: (MOVVreg x)
  5194		for {
  5195			x := v.Args[0]
  5196			if x.Op != OpMIPS64MOVWreg {
  5197				break
  5198			}
  5199			v.reset(OpMIPS64MOVVreg)
  5200			v.AddArg(x)
  5201			return true
  5202		}
  5203		return false
  5204	}
  5205	func rewriteValueMIPS64_OpMIPS64MOVWreg_10(v *Value) bool {
  5206		// match: (MOVWreg (MOVVconst [c]))
  5207		// cond:
  5208		// result: (MOVVconst [int64(int32(c))])
  5209		for {
  5210			v_0 := v.Args[0]
  5211			if v_0.Op != OpMIPS64MOVVconst {
  5212				break
  5213			}
  5214			c := v_0.AuxInt
  5215			v.reset(OpMIPS64MOVVconst)
  5216			v.AuxInt = int64(int32(c))
  5217			return true
  5218		}
  5219		return false
  5220	}
  5221	func rewriteValueMIPS64_OpMIPS64MOVWstore_0(v *Value) bool {
  5222		// match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem)
  5223		// cond: is32Bit(off1+off2)
  5224		// result: (MOVWstore [off1+off2] {sym} ptr val mem)
  5225		for {
  5226			off1 := v.AuxInt
  5227			sym := v.Aux
  5228			mem := v.Args[2]
  5229			v_0 := v.Args[0]
  5230			if v_0.Op != OpMIPS64ADDVconst {
  5231				break
  5232			}
  5233			off2 := v_0.AuxInt
  5234			ptr := v_0.Args[0]
  5235			val := v.Args[1]
  5236			if !(is32Bit(off1 + off2)) {
  5237				break
  5238			}
  5239			v.reset(OpMIPS64MOVWstore)
  5240			v.AuxInt = off1 + off2
  5241			v.Aux = sym
  5242			v.AddArg(ptr)
  5243			v.AddArg(val)
  5244			v.AddArg(mem)
  5245			return true
  5246		}
  5247		// match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem)
  5248		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5249		// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5250		for {
  5251			off1 := v.AuxInt
  5252			sym1 := v.Aux
  5253			mem := v.Args[2]
  5254			v_0 := v.Args[0]
  5255			if v_0.Op != OpMIPS64MOVVaddr {
  5256				break
  5257			}
  5258			off2 := v_0.AuxInt
  5259			sym2 := v_0.Aux
  5260			ptr := v_0.Args[0]
  5261			val := v.Args[1]
  5262			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5263				break
  5264			}
  5265			v.reset(OpMIPS64MOVWstore)
  5266			v.AuxInt = off1 + off2
  5267			v.Aux = mergeSym(sym1, sym2)
  5268			v.AddArg(ptr)
  5269			v.AddArg(val)
  5270			v.AddArg(mem)
  5271			return true
  5272		}
  5273		// match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem)
  5274		// cond:
  5275		// result: (MOVWstorezero [off] {sym} ptr mem)
  5276		for {
  5277			off := v.AuxInt
  5278			sym := v.Aux
  5279			mem := v.Args[2]
  5280			ptr := v.Args[0]
  5281			v_1 := v.Args[1]
  5282			if v_1.Op != OpMIPS64MOVVconst {
  5283				break
  5284			}
  5285			if v_1.AuxInt != 0 {
  5286				break
  5287			}
  5288			v.reset(OpMIPS64MOVWstorezero)
  5289			v.AuxInt = off
  5290			v.Aux = sym
  5291			v.AddArg(ptr)
  5292			v.AddArg(mem)
  5293			return true
  5294		}
  5295		// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5296		// cond:
  5297		// result: (MOVWstore [off] {sym} ptr x mem)
  5298		for {
  5299			off := v.AuxInt
  5300			sym := v.Aux
  5301			mem := v.Args[2]
  5302			ptr := v.Args[0]
  5303			v_1 := v.Args[1]
  5304			if v_1.Op != OpMIPS64MOVWreg {
  5305				break
  5306			}
  5307			x := v_1.Args[0]
  5308			v.reset(OpMIPS64MOVWstore)
  5309			v.AuxInt = off
  5310			v.Aux = sym
  5311			v.AddArg(ptr)
  5312			v.AddArg(x)
  5313			v.AddArg(mem)
  5314			return true
  5315		}
  5316		// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5317		// cond:
  5318		// result: (MOVWstore [off] {sym} ptr x mem)
  5319		for {
  5320			off := v.AuxInt
  5321			sym := v.Aux
  5322			mem := v.Args[2]
  5323			ptr := v.Args[0]
  5324			v_1 := v.Args[1]
  5325			if v_1.Op != OpMIPS64MOVWUreg {
  5326				break
  5327			}
  5328			x := v_1.Args[0]
  5329			v.reset(OpMIPS64MOVWstore)
  5330			v.AuxInt = off
  5331			v.Aux = sym
  5332			v.AddArg(ptr)
  5333			v.AddArg(x)
  5334			v.AddArg(mem)
  5335			return true
  5336		}
  5337		return false
  5338	}
  5339	func rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v *Value) bool {
  5340		// match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem)
  5341		// cond: is32Bit(off1+off2)
  5342		// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
  5343		for {
  5344			off1 := v.AuxInt
  5345			sym := v.Aux
  5346			mem := v.Args[1]
  5347			v_0 := v.Args[0]
  5348			if v_0.Op != OpMIPS64ADDVconst {
  5349				break
  5350			}
  5351			off2 := v_0.AuxInt
  5352			ptr := v_0.Args[0]
  5353			if !(is32Bit(off1 + off2)) {
  5354				break
  5355			}
  5356			v.reset(OpMIPS64MOVWstorezero)
  5357			v.AuxInt = off1 + off2
  5358			v.Aux = sym
  5359			v.AddArg(ptr)
  5360			v.AddArg(mem)
  5361			return true
  5362		}
  5363		// match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem)
  5364		// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2)
  5365		// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5366		for {
  5367			off1 := v.AuxInt
  5368			sym1 := v.Aux
  5369			mem := v.Args[1]
  5370			v_0 := v.Args[0]
  5371			if v_0.Op != OpMIPS64MOVVaddr {
  5372				break
  5373			}
  5374			off2 := v_0.AuxInt
  5375			sym2 := v_0.Aux
  5376			ptr := v_0.Args[0]
  5377			if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) {
  5378				break
  5379			}
  5380			v.reset(OpMIPS64MOVWstorezero)
  5381			v.AuxInt = off1 + off2
  5382			v.Aux = mergeSym(sym1, sym2)
  5383			v.AddArg(ptr)
  5384			v.AddArg(mem)
  5385			return true
  5386		}
  5387		return false
  5388	}
  5389	func rewriteValueMIPS64_OpMIPS64NEGV_0(v *Value) bool {
  5390		// match: (NEGV (MOVVconst [c]))
  5391		// cond:
  5392		// result: (MOVVconst [-c])
  5393		for {
  5394			v_0 := v.Args[0]
  5395			if v_0.Op != OpMIPS64MOVVconst {
  5396				break
  5397			}
  5398			c := v_0.AuxInt
  5399			v.reset(OpMIPS64MOVVconst)
  5400			v.AuxInt = -c
  5401			return true
  5402		}
  5403		return false
  5404	}
  5405	func rewriteValueMIPS64_OpMIPS64NOR_0(v *Value) bool {
  5406		// match: (NOR x (MOVVconst [c]))
  5407		// cond: is32Bit(c)
  5408		// result: (NORconst [c] x)
  5409		for {
  5410			_ = v.Args[1]
  5411			x := v.Args[0]
  5412			v_1 := v.Args[1]
  5413			if v_1.Op != OpMIPS64MOVVconst {
  5414				break
  5415			}
  5416			c := v_1.AuxInt
  5417			if !(is32Bit(c)) {
  5418				break
  5419			}
  5420			v.reset(OpMIPS64NORconst)
  5421			v.AuxInt = c
  5422			v.AddArg(x)
  5423			return true
  5424		}
  5425		// match: (NOR (MOVVconst [c]) x)
  5426		// cond: is32Bit(c)
  5427		// result: (NORconst [c] x)
  5428		for {
  5429			x := v.Args[1]
  5430			v_0 := v.Args[0]
  5431			if v_0.Op != OpMIPS64MOVVconst {
  5432				break
  5433			}
  5434			c := v_0.AuxInt
  5435			if !(is32Bit(c)) {
  5436				break
  5437			}
  5438			v.reset(OpMIPS64NORconst)
  5439			v.AuxInt = c
  5440			v.AddArg(x)
  5441			return true
  5442		}
  5443		return false
  5444	}
  5445	func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool {
  5446		// match: (NORconst [c] (MOVVconst [d]))
  5447		// cond:
  5448		// result: (MOVVconst [^(c|d)])
  5449		for {
  5450			c := v.AuxInt
  5451			v_0 := v.Args[0]
  5452			if v_0.Op != OpMIPS64MOVVconst {
  5453				break
  5454			}
  5455			d := v_0.AuxInt
  5456			v.reset(OpMIPS64MOVVconst)
  5457			v.AuxInt = ^(c | d)
  5458			return true
  5459		}
  5460		return false
  5461	}
  5462	func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool {
  5463		// match: (OR x (MOVVconst [c]))
  5464		// cond: is32Bit(c)
  5465		// result: (ORconst [c] x)
  5466		for {
  5467			_ = v.Args[1]
  5468			x := v.Args[0]
  5469			v_1 := v.Args[1]
  5470			if v_1.Op != OpMIPS64MOVVconst {
  5471				break
  5472			}
  5473			c := v_1.AuxInt
  5474			if !(is32Bit(c)) {
  5475				break
  5476			}
  5477			v.reset(OpMIPS64ORconst)
  5478			v.AuxInt = c
  5479			v.AddArg(x)
  5480			return true
  5481		}
  5482		// match: (OR (MOVVconst [c]) x)
  5483		// cond: is32Bit(c)
  5484		// result: (ORconst [c] x)
  5485		for {
  5486			x := v.Args[1]
  5487			v_0 := v.Args[0]
  5488			if v_0.Op != OpMIPS64MOVVconst {
  5489				break
  5490			}
  5491			c := v_0.AuxInt
  5492			if !(is32Bit(c)) {
  5493				break
  5494			}
  5495			v.reset(OpMIPS64ORconst)
  5496			v.AuxInt = c
  5497			v.AddArg(x)
  5498			return true
  5499		}
  5500		// match: (OR x x)
  5501		// cond:
  5502		// result: x
  5503		for {
  5504			x := v.Args[1]
  5505			if x != v.Args[0] {
  5506				break
  5507			}
  5508			v.reset(OpCopy)
  5509			v.Type = x.Type
  5510			v.AddArg(x)
  5511			return true
  5512		}
  5513		return false
  5514	}
  5515	func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool {
  5516		// match: (ORconst [0] x)
  5517		// cond:
  5518		// result: x
  5519		for {
  5520			if v.AuxInt != 0 {
  5521				break
  5522			}
  5523			x := v.Args[0]
  5524			v.reset(OpCopy)
  5525			v.Type = x.Type
  5526			v.AddArg(x)
  5527			return true
  5528		}
  5529		// match: (ORconst [-1] _)
  5530		// cond:
  5531		// result: (MOVVconst [-1])
  5532		for {
  5533			if v.AuxInt != -1 {
  5534				break
  5535			}
  5536			v.reset(OpMIPS64MOVVconst)
  5537			v.AuxInt = -1
  5538			return true
  5539		}
  5540		// match: (ORconst [c] (MOVVconst [d]))
  5541		// cond:
  5542		// result: (MOVVconst [c|d])
  5543		for {
  5544			c := v.AuxInt
  5545			v_0 := v.Args[0]
  5546			if v_0.Op != OpMIPS64MOVVconst {
  5547				break
  5548			}
  5549			d := v_0.AuxInt
  5550			v.reset(OpMIPS64MOVVconst)
  5551			v.AuxInt = c | d
  5552			return true
  5553		}
  5554		// match: (ORconst [c] (ORconst [d] x))
  5555		// cond: is32Bit(c|d)
  5556		// result: (ORconst [c|d] x)
  5557		for {
  5558			c := v.AuxInt
  5559			v_0 := v.Args[0]
  5560			if v_0.Op != OpMIPS64ORconst {
  5561				break
  5562			}
  5563			d := v_0.AuxInt
  5564			x := v_0.Args[0]
  5565			if !(is32Bit(c | d)) {
  5566				break
  5567			}
  5568			v.reset(OpMIPS64ORconst)
  5569			v.AuxInt = c | d
  5570			v.AddArg(x)
  5571			return true
  5572		}
  5573		return false
  5574	}
  5575	func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool {
  5576		// match: (SGT (MOVVconst [c]) x)
  5577		// cond: is32Bit(c)
  5578		// result: (SGTconst [c] x)
  5579		for {
  5580			x := v.Args[1]
  5581			v_0 := v.Args[0]
  5582			if v_0.Op != OpMIPS64MOVVconst {
  5583				break
  5584			}
  5585			c := v_0.AuxInt
  5586			if !(is32Bit(c)) {
  5587				break
  5588			}
  5589			v.reset(OpMIPS64SGTconst)
  5590			v.AuxInt = c
  5591			v.AddArg(x)
  5592			return true
  5593		}
  5594		return false
  5595	}
  5596	func rewriteValueMIPS64_OpMIPS64SGTU_0(v *Value) bool {
  5597		// match: (SGTU (MOVVconst [c]) x)
  5598		// cond: is32Bit(c)
  5599		// result: (SGTUconst [c] x)
  5600		for {
  5601			x := v.Args[1]
  5602			v_0 := v.Args[0]
  5603			if v_0.Op != OpMIPS64MOVVconst {
  5604				break
  5605			}
  5606			c := v_0.AuxInt
  5607			if !(is32Bit(c)) {
  5608				break
  5609			}
  5610			v.reset(OpMIPS64SGTUconst)
  5611			v.AuxInt = c
  5612			v.AddArg(x)
  5613			return true
  5614		}
  5615		return false
  5616	}
  5617	func rewriteValueMIPS64_OpMIPS64SGTUconst_0(v *Value) bool {
  5618		// match: (SGTUconst [c] (MOVVconst [d]))
  5619		// cond: uint64(c)>uint64(d)
  5620		// result: (MOVVconst [1])
  5621		for {
  5622			c := v.AuxInt
  5623			v_0 := v.Args[0]
  5624			if v_0.Op != OpMIPS64MOVVconst {
  5625				break
  5626			}
  5627			d := v_0.AuxInt
  5628			if !(uint64(c) > uint64(d)) {
  5629				break
  5630			}
  5631			v.reset(OpMIPS64MOVVconst)
  5632			v.AuxInt = 1
  5633			return true
  5634		}
  5635		// match: (SGTUconst [c] (MOVVconst [d]))
  5636		// cond: uint64(c)<=uint64(d)
  5637		// result: (MOVVconst [0])
  5638		for {
  5639			c := v.AuxInt
  5640			v_0 := v.Args[0]
  5641			if v_0.Op != OpMIPS64MOVVconst {
  5642				break
  5643			}
  5644			d := v_0.AuxInt
  5645			if !(uint64(c) <= uint64(d)) {
  5646				break
  5647			}
  5648			v.reset(OpMIPS64MOVVconst)
  5649			v.AuxInt = 0
  5650			return true
  5651		}
  5652		// match: (SGTUconst [c] (MOVBUreg _))
  5653		// cond: 0xff < uint64(c)
  5654		// result: (MOVVconst [1])
  5655		for {
  5656			c := v.AuxInt
  5657			v_0 := v.Args[0]
  5658			if v_0.Op != OpMIPS64MOVBUreg {
  5659				break
  5660			}
  5661			if !(0xff < uint64(c)) {
  5662				break
  5663			}
  5664			v.reset(OpMIPS64MOVVconst)
  5665			v.AuxInt = 1
  5666			return true
  5667		}
  5668		// match: (SGTUconst [c] (MOVHUreg _))
  5669		// cond: 0xffff < uint64(c)
  5670		// result: (MOVVconst [1])
  5671		for {
  5672			c := v.AuxInt
  5673			v_0 := v.Args[0]
  5674			if v_0.Op != OpMIPS64MOVHUreg {
  5675				break
  5676			}
  5677			if !(0xffff < uint64(c)) {
  5678				break
  5679			}
  5680			v.reset(OpMIPS64MOVVconst)
  5681			v.AuxInt = 1
  5682			return true
  5683		}
  5684		// match: (SGTUconst [c] (ANDconst [m] _))
  5685		// cond: uint64(m) < uint64(c)
  5686		// result: (MOVVconst [1])
  5687		for {
  5688			c := v.AuxInt
  5689			v_0 := v.Args[0]
  5690			if v_0.Op != OpMIPS64ANDconst {
  5691				break
  5692			}
  5693			m := v_0.AuxInt
  5694			if !(uint64(m) < uint64(c)) {
  5695				break
  5696			}
  5697			v.reset(OpMIPS64MOVVconst)
  5698			v.AuxInt = 1
  5699			return true
  5700		}
  5701		// match: (SGTUconst [c] (SRLVconst _ [d]))
  5702		// cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  5703		// result: (MOVVconst [1])
  5704		for {
  5705			c := v.AuxInt
  5706			v_0 := v.Args[0]
  5707			if v_0.Op != OpMIPS64SRLVconst {
  5708				break
  5709			}
  5710			d := v_0.AuxInt
  5711			if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  5712				break
  5713			}
  5714			v.reset(OpMIPS64MOVVconst)
  5715			v.AuxInt = 1
  5716			return true
  5717		}
  5718		return false
  5719	}
  5720	func rewriteValueMIPS64_OpMIPS64SGTconst_0(v *Value) bool {
  5721		// match: (SGTconst [c] (MOVVconst [d]))
  5722		// cond: c>d
  5723		// result: (MOVVconst [1])
  5724		for {
  5725			c := v.AuxInt
  5726			v_0 := v.Args[0]
  5727			if v_0.Op != OpMIPS64MOVVconst {
  5728				break
  5729			}
  5730			d := v_0.AuxInt
  5731			if !(c > d) {
  5732				break
  5733			}
  5734			v.reset(OpMIPS64MOVVconst)
  5735			v.AuxInt = 1
  5736			return true
  5737		}
  5738		// match: (SGTconst [c] (MOVVconst [d]))
  5739		// cond: c<=d
  5740		// result: (MOVVconst [0])
  5741		for {
  5742			c := v.AuxInt
  5743			v_0 := v.Args[0]
  5744			if v_0.Op != OpMIPS64MOVVconst {
  5745				break
  5746			}
  5747			d := v_0.AuxInt
  5748			if !(c <= d) {
  5749				break
  5750			}
  5751			v.reset(OpMIPS64MOVVconst)
  5752			v.AuxInt = 0
  5753			return true
  5754		}
  5755		// match: (SGTconst [c] (MOVBreg _))
  5756		// cond: 0x7f < c
  5757		// result: (MOVVconst [1])
  5758		for {
  5759			c := v.AuxInt
  5760			v_0 := v.Args[0]
  5761			if v_0.Op != OpMIPS64MOVBreg {
  5762				break
  5763			}
  5764			if !(0x7f < c) {
  5765				break
  5766			}
  5767			v.reset(OpMIPS64MOVVconst)
  5768			v.AuxInt = 1
  5769			return true
  5770		}
  5771		// match: (SGTconst [c] (MOVBreg _))
  5772		// cond: c <= -0x80
  5773		// result: (MOVVconst [0])
  5774		for {
  5775			c := v.AuxInt
  5776			v_0 := v.Args[0]
  5777			if v_0.Op != OpMIPS64MOVBreg {
  5778				break
  5779			}
  5780			if !(c <= -0x80) {
  5781				break
  5782			}
  5783			v.reset(OpMIPS64MOVVconst)
  5784			v.AuxInt = 0
  5785			return true
  5786		}
  5787		// match: (SGTconst [c] (MOVBUreg _))
  5788		// cond: 0xff < c
  5789		// result: (MOVVconst [1])
  5790		for {
  5791			c := v.AuxInt
  5792			v_0 := v.Args[0]
  5793			if v_0.Op != OpMIPS64MOVBUreg {
  5794				break
  5795			}
  5796			if !(0xff < c) {
  5797				break
  5798			}
  5799			v.reset(OpMIPS64MOVVconst)
  5800			v.AuxInt = 1
  5801			return true
  5802		}
  5803		// match: (SGTconst [c] (MOVBUreg _))
  5804		// cond: c < 0
  5805		// result: (MOVVconst [0])
  5806		for {
  5807			c := v.AuxInt
  5808			v_0 := v.Args[0]
  5809			if v_0.Op != OpMIPS64MOVBUreg {
  5810				break
  5811			}
  5812			if !(c < 0) {
  5813				break
  5814			}
  5815			v.reset(OpMIPS64MOVVconst)
  5816			v.AuxInt = 0
  5817			return true
  5818		}
  5819		// match: (SGTconst [c] (MOVHreg _))
  5820		// cond: 0x7fff < c
  5821		// result: (MOVVconst [1])
  5822		for {
  5823			c := v.AuxInt
  5824			v_0 := v.Args[0]
  5825			if v_0.Op != OpMIPS64MOVHreg {
  5826				break
  5827			}
  5828			if !(0x7fff < c) {
  5829				break
  5830			}
  5831			v.reset(OpMIPS64MOVVconst)
  5832			v.AuxInt = 1
  5833			return true
  5834		}
  5835		// match: (SGTconst [c] (MOVHreg _))
  5836		// cond: c <= -0x8000
  5837		// result: (MOVVconst [0])
  5838		for {
  5839			c := v.AuxInt
  5840			v_0 := v.Args[0]
  5841			if v_0.Op != OpMIPS64MOVHreg {
  5842				break
  5843			}
  5844			if !(c <= -0x8000) {
  5845				break
  5846			}
  5847			v.reset(OpMIPS64MOVVconst)
  5848			v.AuxInt = 0
  5849			return true
  5850		}
  5851		// match: (SGTconst [c] (MOVHUreg _))
  5852		// cond: 0xffff < c
  5853		// result: (MOVVconst [1])
  5854		for {
  5855			c := v.AuxInt
  5856			v_0 := v.Args[0]
  5857			if v_0.Op != OpMIPS64MOVHUreg {
  5858				break
  5859			}
  5860			if !(0xffff < c) {
  5861				break
  5862			}
  5863			v.reset(OpMIPS64MOVVconst)
  5864			v.AuxInt = 1
  5865			return true
  5866		}
  5867		// match: (SGTconst [c] (MOVHUreg _))
  5868		// cond: c < 0
  5869		// result: (MOVVconst [0])
  5870		for {
  5871			c := v.AuxInt
  5872			v_0 := v.Args[0]
  5873			if v_0.Op != OpMIPS64MOVHUreg {
  5874				break
  5875			}
  5876			if !(c < 0) {
  5877				break
  5878			}
  5879			v.reset(OpMIPS64MOVVconst)
  5880			v.AuxInt = 0
  5881			return true
  5882		}
  5883		return false
  5884	}
  5885	func rewriteValueMIPS64_OpMIPS64SGTconst_10(v *Value) bool {
  5886		// match: (SGTconst [c] (MOVWUreg _))
  5887		// cond: c < 0
  5888		// result: (MOVVconst [0])
  5889		for {
  5890			c := v.AuxInt
  5891			v_0 := v.Args[0]
  5892			if v_0.Op != OpMIPS64MOVWUreg {
  5893				break
  5894			}
  5895			if !(c < 0) {
  5896				break
  5897			}
  5898			v.reset(OpMIPS64MOVVconst)
  5899			v.AuxInt = 0
  5900			return true
  5901		}
  5902		// match: (SGTconst [c] (ANDconst [m] _))
  5903		// cond: 0 <= m && m < c
  5904		// result: (MOVVconst [1])
  5905		for {
  5906			c := v.AuxInt
  5907			v_0 := v.Args[0]
  5908			if v_0.Op != OpMIPS64ANDconst {
  5909				break
  5910			}
  5911			m := v_0.AuxInt
  5912			if !(0 <= m && m < c) {
  5913				break
  5914			}
  5915			v.reset(OpMIPS64MOVVconst)
  5916			v.AuxInt = 1
  5917			return true
  5918		}
  5919		// match: (SGTconst [c] (SRLVconst _ [d]))
  5920		// cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)
  5921		// result: (MOVVconst [1])
  5922		for {
  5923			c := v.AuxInt
  5924			v_0 := v.Args[0]
  5925			if v_0.Op != OpMIPS64SRLVconst {
  5926				break
  5927			}
  5928			d := v_0.AuxInt
  5929			if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
  5930				break
  5931			}
  5932			v.reset(OpMIPS64MOVVconst)
  5933			v.AuxInt = 1
  5934			return true
  5935		}
  5936		return false
  5937	}
  5938	func rewriteValueMIPS64_OpMIPS64SLLV_0(v *Value) bool {
  5939		// match: (SLLV _ (MOVVconst [c]))
  5940		// cond: uint64(c)>=64
  5941		// result: (MOVVconst [0])
  5942		for {
  5943			_ = v.Args[1]
  5944			v_1 := v.Args[1]
  5945			if v_1.Op != OpMIPS64MOVVconst {
  5946				break
  5947			}
  5948			c := v_1.AuxInt
  5949			if !(uint64(c) >= 64) {
  5950				break
  5951			}
  5952			v.reset(OpMIPS64MOVVconst)
  5953			v.AuxInt = 0
  5954			return true
  5955		}
  5956		// match: (SLLV x (MOVVconst [c]))
  5957		// cond:
  5958		// result: (SLLVconst x [c])
  5959		for {
  5960			_ = v.Args[1]
  5961			x := v.Args[0]
  5962			v_1 := v.Args[1]
  5963			if v_1.Op != OpMIPS64MOVVconst {
  5964				break
  5965			}
  5966			c := v_1.AuxInt
  5967			v.reset(OpMIPS64SLLVconst)
  5968			v.AuxInt = c
  5969			v.AddArg(x)
  5970			return true
  5971		}
  5972		return false
  5973	}
  5974	func rewriteValueMIPS64_OpMIPS64SLLVconst_0(v *Value) bool {
  5975		// match: (SLLVconst [c] (MOVVconst [d]))
  5976		// cond:
  5977		// result: (MOVVconst [d<<uint64(c)])
  5978		for {
  5979			c := v.AuxInt
  5980			v_0 := v.Args[0]
  5981			if v_0.Op != OpMIPS64MOVVconst {
  5982				break
  5983			}
  5984			d := v_0.AuxInt
  5985			v.reset(OpMIPS64MOVVconst)
  5986			v.AuxInt = d << uint64(c)
  5987			return true
  5988		}
  5989		return false
  5990	}
  5991	func rewriteValueMIPS64_OpMIPS64SRAV_0(v *Value) bool {
  5992		// match: (SRAV x (MOVVconst [c]))
  5993		// cond: uint64(c)>=64
  5994		// result: (SRAVconst x [63])
  5995		for {
  5996			_ = v.Args[1]
  5997			x := v.Args[0]
  5998			v_1 := v.Args[1]
  5999			if v_1.Op != OpMIPS64MOVVconst {
  6000				break
  6001			}
  6002			c := v_1.AuxInt
  6003			if !(uint64(c) >= 64) {
  6004				break
  6005			}
  6006			v.reset(OpMIPS64SRAVconst)
  6007			v.AuxInt = 63
  6008			v.AddArg(x)
  6009			return true
  6010		}
  6011		// match: (SRAV x (MOVVconst [c]))
  6012		// cond:
  6013		// result: (SRAVconst x [c])
  6014		for {
  6015			_ = v.Args[1]
  6016			x := v.Args[0]
  6017			v_1 := v.Args[1]
  6018			if v_1.Op != OpMIPS64MOVVconst {
  6019				break
  6020			}
  6021			c := v_1.AuxInt
  6022			v.reset(OpMIPS64SRAVconst)
  6023			v.AuxInt = c
  6024			v.AddArg(x)
  6025			return true
  6026		}
  6027		return false
  6028	}
  6029	func rewriteValueMIPS64_OpMIPS64SRAVconst_0(v *Value) bool {
  6030		// match: (SRAVconst [c] (MOVVconst [d]))
  6031		// cond:
  6032		// result: (MOVVconst [d>>uint64(c)])
  6033		for {
  6034			c := v.AuxInt
  6035			v_0 := v.Args[0]
  6036			if v_0.Op != OpMIPS64MOVVconst {
  6037				break
  6038			}
  6039			d := v_0.AuxInt
  6040			v.reset(OpMIPS64MOVVconst)
  6041			v.AuxInt = d >> uint64(c)
  6042			return true
  6043		}
  6044		return false
  6045	}
  6046	func rewriteValueMIPS64_OpMIPS64SRLV_0(v *Value) bool {
  6047		// match: (SRLV _ (MOVVconst [c]))
  6048		// cond: uint64(c)>=64
  6049		// result: (MOVVconst [0])
  6050		for {
  6051			_ = v.Args[1]
  6052			v_1 := v.Args[1]
  6053			if v_1.Op != OpMIPS64MOVVconst {
  6054				break
  6055			}
  6056			c := v_1.AuxInt
  6057			if !(uint64(c) >= 64) {
  6058				break
  6059			}
  6060			v.reset(OpMIPS64MOVVconst)
  6061			v.AuxInt = 0
  6062			return true
  6063		}
  6064		// match: (SRLV x (MOVVconst [c]))
  6065		// cond:
  6066		// result: (SRLVconst x [c])
  6067		for {
  6068			_ = v.Args[1]
  6069			x := v.Args[0]
  6070			v_1 := v.Args[1]
  6071			if v_1.Op != OpMIPS64MOVVconst {
  6072				break
  6073			}
  6074			c := v_1.AuxInt
  6075			v.reset(OpMIPS64SRLVconst)
  6076			v.AuxInt = c
  6077			v.AddArg(x)
  6078			return true
  6079		}
  6080		return false
  6081	}
  6082	func rewriteValueMIPS64_OpMIPS64SRLVconst_0(v *Value) bool {
  6083		// match: (SRLVconst [c] (MOVVconst [d]))
  6084		// cond:
  6085		// result: (MOVVconst [int64(uint64(d)>>uint64(c))])
  6086		for {
  6087			c := v.AuxInt
  6088			v_0 := v.Args[0]
  6089			if v_0.Op != OpMIPS64MOVVconst {
  6090				break
  6091			}
  6092			d := v_0.AuxInt
  6093			v.reset(OpMIPS64MOVVconst)
  6094			v.AuxInt = int64(uint64(d) >> uint64(c))
  6095			return true
  6096		}
  6097		return false
  6098	}
  6099	func rewriteValueMIPS64_OpMIPS64SUBV_0(v *Value) bool {
  6100		// match: (SUBV x (MOVVconst [c]))
  6101		// cond: is32Bit(c)
  6102		// result: (SUBVconst [c] x)
  6103		for {
  6104			_ = v.Args[1]
  6105			x := v.Args[0]
  6106			v_1 := v.Args[1]
  6107			if v_1.Op != OpMIPS64MOVVconst {
  6108				break
  6109			}
  6110			c := v_1.AuxInt
  6111			if !(is32Bit(c)) {
  6112				break
  6113			}
  6114			v.reset(OpMIPS64SUBVconst)
  6115			v.AuxInt = c
  6116			v.AddArg(x)
  6117			return true
  6118		}
  6119		// match: (SUBV x x)
  6120		// cond:
  6121		// result: (MOVVconst [0])
  6122		for {
  6123			x := v.Args[1]
  6124			if x != v.Args[0] {
  6125				break
  6126			}
  6127			v.reset(OpMIPS64MOVVconst)
  6128			v.AuxInt = 0
  6129			return true
  6130		}
  6131		// match: (SUBV (MOVVconst [0]) x)
  6132		// cond:
  6133		// result: (NEGV x)
  6134		for {
  6135			x := v.Args[1]
  6136			v_0 := v.Args[0]
  6137			if v_0.Op != OpMIPS64MOVVconst {
  6138				break
  6139			}
  6140			if v_0.AuxInt != 0 {
  6141				break
  6142			}
  6143			v.reset(OpMIPS64NEGV)
  6144			v.AddArg(x)
  6145			return true
  6146		}
  6147		return false
  6148	}
  6149	func rewriteValueMIPS64_OpMIPS64SUBVconst_0(v *Value) bool {
  6150		// match: (SUBVconst [0] x)
  6151		// cond:
  6152		// result: x
  6153		for {
  6154			if v.AuxInt != 0 {
  6155				break
  6156			}
  6157			x := v.Args[0]
  6158			v.reset(OpCopy)
  6159			v.Type = x.Type
  6160			v.AddArg(x)
  6161			return true
  6162		}
  6163		// match: (SUBVconst [c] (MOVVconst [d]))
  6164		// cond:
  6165		// result: (MOVVconst [d-c])
  6166		for {
  6167			c := v.AuxInt
  6168			v_0 := v.Args[0]
  6169			if v_0.Op != OpMIPS64MOVVconst {
  6170				break
  6171			}
  6172			d := v_0.AuxInt
  6173			v.reset(OpMIPS64MOVVconst)
  6174			v.AuxInt = d - c
  6175			return true
  6176		}
  6177		// match: (SUBVconst [c] (SUBVconst [d] x))
  6178		// cond: is32Bit(-c-d)
  6179		// result: (ADDVconst [-c-d] x)
  6180		for {
  6181			c := v.AuxInt
  6182			v_0 := v.Args[0]
  6183			if v_0.Op != OpMIPS64SUBVconst {
  6184				break
  6185			}
  6186			d := v_0.AuxInt
  6187			x := v_0.Args[0]
  6188			if !(is32Bit(-c - d)) {
  6189				break
  6190			}
  6191			v.reset(OpMIPS64ADDVconst)
  6192			v.AuxInt = -c - d
  6193			v.AddArg(x)
  6194			return true
  6195		}
  6196		// match: (SUBVconst [c] (ADDVconst [d] x))
  6197		// cond: is32Bit(-c+d)
  6198		// result: (ADDVconst [-c+d] x)
  6199		for {
  6200			c := v.AuxInt
  6201			v_0 := v.Args[0]
  6202			if v_0.Op != OpMIPS64ADDVconst {
  6203				break
  6204			}
  6205			d := v_0.AuxInt
  6206			x := v_0.Args[0]
  6207			if !(is32Bit(-c + d)) {
  6208				break
  6209			}
  6210			v.reset(OpMIPS64ADDVconst)
  6211			v.AuxInt = -c + d
  6212			v.AddArg(x)
  6213			return true
  6214		}
  6215		return false
  6216	}
  6217	func rewriteValueMIPS64_OpMIPS64XOR_0(v *Value) bool {
  6218		// match: (XOR x (MOVVconst [c]))
  6219		// cond: is32Bit(c)
  6220		// result: (XORconst [c] x)
  6221		for {
  6222			_ = v.Args[1]
  6223			x := v.Args[0]
  6224			v_1 := v.Args[1]
  6225			if v_1.Op != OpMIPS64MOVVconst {
  6226				break
  6227			}
  6228			c := v_1.AuxInt
  6229			if !(is32Bit(c)) {
  6230				break
  6231			}
  6232			v.reset(OpMIPS64XORconst)
  6233			v.AuxInt = c
  6234			v.AddArg(x)
  6235			return true
  6236		}
  6237		// match: (XOR (MOVVconst [c]) x)
  6238		// cond: is32Bit(c)
  6239		// result: (XORconst [c] x)
  6240		for {
  6241			x := v.Args[1]
  6242			v_0 := v.Args[0]
  6243			if v_0.Op != OpMIPS64MOVVconst {
  6244				break
  6245			}
  6246			c := v_0.AuxInt
  6247			if !(is32Bit(c)) {
  6248				break
  6249			}
  6250			v.reset(OpMIPS64XORconst)
  6251			v.AuxInt = c
  6252			v.AddArg(x)
  6253			return true
  6254		}
  6255		// match: (XOR x x)
  6256		// cond:
  6257		// result: (MOVVconst [0])
  6258		for {
  6259			x := v.Args[1]
  6260			if x != v.Args[0] {
  6261				break
  6262			}
  6263			v.reset(OpMIPS64MOVVconst)
  6264			v.AuxInt = 0
  6265			return true
  6266		}
  6267		return false
  6268	}
  6269	func rewriteValueMIPS64_OpMIPS64XORconst_0(v *Value) bool {
  6270		// match: (XORconst [0] x)
  6271		// cond:
  6272		// result: x
  6273		for {
  6274			if v.AuxInt != 0 {
  6275				break
  6276			}
  6277			x := v.Args[0]
  6278			v.reset(OpCopy)
  6279			v.Type = x.Type
  6280			v.AddArg(x)
  6281			return true
  6282		}
  6283		// match: (XORconst [-1] x)
  6284		// cond:
  6285		// result: (NORconst [0] x)
  6286		for {
  6287			if v.AuxInt != -1 {
  6288				break
  6289			}
  6290			x := v.Args[0]
  6291			v.reset(OpMIPS64NORconst)
  6292			v.AuxInt = 0
  6293			v.AddArg(x)
  6294			return true
  6295		}
  6296		// match: (XORconst [c] (MOVVconst [d]))
  6297		// cond:
  6298		// result: (MOVVconst [c^d])
  6299		for {
  6300			c := v.AuxInt
  6301			v_0 := v.Args[0]
  6302			if v_0.Op != OpMIPS64MOVVconst {
  6303				break
  6304			}
  6305			d := v_0.AuxInt
  6306			v.reset(OpMIPS64MOVVconst)
  6307			v.AuxInt = c ^ d
  6308			return true
  6309		}
  6310		// match: (XORconst [c] (XORconst [d] x))
  6311		// cond: is32Bit(c^d)
  6312		// result: (XORconst [c^d] x)
  6313		for {
  6314			c := v.AuxInt
  6315			v_0 := v.Args[0]
  6316			if v_0.Op != OpMIPS64XORconst {
  6317				break
  6318			}
  6319			d := v_0.AuxInt
  6320			x := v_0.Args[0]
  6321			if !(is32Bit(c ^ d)) {
  6322				break
  6323			}
  6324			v.reset(OpMIPS64XORconst)
  6325			v.AuxInt = c ^ d
  6326			v.AddArg(x)
  6327			return true
  6328		}
  6329		return false
  6330	}
  6331	func rewriteValueMIPS64_OpMod16_0(v *Value) bool {
  6332		b := v.Block
  6333		typ := &b.Func.Config.Types
  6334		// match: (Mod16 x y)
  6335		// cond:
  6336		// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
  6337		for {
  6338			y := v.Args[1]
  6339			x := v.Args[0]
  6340			v.reset(OpSelect0)
  6341			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6342			v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6343			v1.AddArg(x)
  6344			v0.AddArg(v1)
  6345			v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6346			v2.AddArg(y)
  6347			v0.AddArg(v2)
  6348			v.AddArg(v0)
  6349			return true
  6350		}
  6351	}
  6352	func rewriteValueMIPS64_OpMod16u_0(v *Value) bool {
  6353		b := v.Block
  6354		typ := &b.Func.Config.Types
  6355		// match: (Mod16u x y)
  6356		// cond:
  6357		// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
  6358		for {
  6359			y := v.Args[1]
  6360			x := v.Args[0]
  6361			v.reset(OpSelect0)
  6362			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6363			v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6364			v1.AddArg(x)
  6365			v0.AddArg(v1)
  6366			v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6367			v2.AddArg(y)
  6368			v0.AddArg(v2)
  6369			v.AddArg(v0)
  6370			return true
  6371		}
  6372	}
  6373	func rewriteValueMIPS64_OpMod32_0(v *Value) bool {
  6374		b := v.Block
  6375		typ := &b.Func.Config.Types
  6376		// match: (Mod32 x y)
  6377		// cond:
  6378		// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
  6379		for {
  6380			y := v.Args[1]
  6381			x := v.Args[0]
  6382			v.reset(OpSelect0)
  6383			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6384			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6385			v1.AddArg(x)
  6386			v0.AddArg(v1)
  6387			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  6388			v2.AddArg(y)
  6389			v0.AddArg(v2)
  6390			v.AddArg(v0)
  6391			return true
  6392		}
  6393	}
  6394	func rewriteValueMIPS64_OpMod32u_0(v *Value) bool {
  6395		b := v.Block
  6396		typ := &b.Func.Config.Types
  6397		// match: (Mod32u x y)
  6398		// cond:
  6399		// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
  6400		for {
  6401			y := v.Args[1]
  6402			x := v.Args[0]
  6403			v.reset(OpSelect0)
  6404			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6405			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6406			v1.AddArg(x)
  6407			v0.AddArg(v1)
  6408			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6409			v2.AddArg(y)
  6410			v0.AddArg(v2)
  6411			v.AddArg(v0)
  6412			return true
  6413		}
  6414	}
  6415	func rewriteValueMIPS64_OpMod64_0(v *Value) bool {
  6416		b := v.Block
  6417		typ := &b.Func.Config.Types
  6418		// match: (Mod64 x y)
  6419		// cond:
  6420		// result: (Select0 (DIVV x y))
  6421		for {
  6422			y := v.Args[1]
  6423			x := v.Args[0]
  6424			v.reset(OpSelect0)
  6425			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6426			v0.AddArg(x)
  6427			v0.AddArg(y)
  6428			v.AddArg(v0)
  6429			return true
  6430		}
  6431	}
  6432	func rewriteValueMIPS64_OpMod64u_0(v *Value) bool {
  6433		b := v.Block
  6434		typ := &b.Func.Config.Types
  6435		// match: (Mod64u x y)
  6436		// cond:
  6437		// result: (Select0 (DIVVU x y))
  6438		for {
  6439			y := v.Args[1]
  6440			x := v.Args[0]
  6441			v.reset(OpSelect0)
  6442			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6443			v0.AddArg(x)
  6444			v0.AddArg(y)
  6445			v.AddArg(v0)
  6446			return true
  6447		}
  6448	}
  6449	func rewriteValueMIPS64_OpMod8_0(v *Value) bool {
  6450		b := v.Block
  6451		typ := &b.Func.Config.Types
  6452		// match: (Mod8 x y)
  6453		// cond:
  6454		// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
  6455		for {
  6456			y := v.Args[1]
  6457			x := v.Args[0]
  6458			v.reset(OpSelect0)
  6459			v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
  6460			v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6461			v1.AddArg(x)
  6462			v0.AddArg(v1)
  6463			v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  6464			v2.AddArg(y)
  6465			v0.AddArg(v2)
  6466			v.AddArg(v0)
  6467			return true
  6468		}
  6469	}
  6470	func rewriteValueMIPS64_OpMod8u_0(v *Value) bool {
  6471		b := v.Block
  6472		typ := &b.Func.Config.Types
  6473		// match: (Mod8u x y)
  6474		// cond:
  6475		// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
  6476		for {
  6477			y := v.Args[1]
  6478			x := v.Args[0]
  6479			v.reset(OpSelect0)
  6480			v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
  6481			v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6482			v1.AddArg(x)
  6483			v0.AddArg(v1)
  6484			v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6485			v2.AddArg(y)
  6486			v0.AddArg(v2)
  6487			v.AddArg(v0)
  6488			return true
  6489		}
  6490	}
  6491	func rewriteValueMIPS64_OpMove_0(v *Value) bool {
  6492		b := v.Block
  6493		typ := &b.Func.Config.Types
  6494		// match: (Move [0] _ _ mem)
  6495		// cond:
  6496		// result: mem
  6497		for {
  6498			if v.AuxInt != 0 {
  6499				break
  6500			}
  6501			mem := v.Args[2]
  6502			v.reset(OpCopy)
  6503			v.Type = mem.Type
  6504			v.AddArg(mem)
  6505			return true
  6506		}
  6507		// match: (Move [1] dst src mem)
  6508		// cond:
  6509		// result: (MOVBstore dst (MOVBload src mem) mem)
  6510		for {
  6511			if v.AuxInt != 1 {
  6512				break
  6513			}
  6514			mem := v.Args[2]
  6515			dst := v.Args[0]
  6516			src := v.Args[1]
  6517			v.reset(OpMIPS64MOVBstore)
  6518			v.AddArg(dst)
  6519			v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6520			v0.AddArg(src)
  6521			v0.AddArg(mem)
  6522			v.AddArg(v0)
  6523			v.AddArg(mem)
  6524			return true
  6525		}
  6526		// match: (Move [2] {t} dst src mem)
  6527		// cond: t.(*types.Type).Alignment()%2 == 0
  6528		// result: (MOVHstore dst (MOVHload src mem) mem)
  6529		for {
  6530			if v.AuxInt != 2 {
  6531				break
  6532			}
  6533			t := v.Aux
  6534			mem := v.Args[2]
  6535			dst := v.Args[0]
  6536			src := v.Args[1]
  6537			if !(t.(*types.Type).Alignment()%2 == 0) {
  6538				break
  6539			}
  6540			v.reset(OpMIPS64MOVHstore)
  6541			v.AddArg(dst)
  6542			v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6543			v0.AddArg(src)
  6544			v0.AddArg(mem)
  6545			v.AddArg(v0)
  6546			v.AddArg(mem)
  6547			return true
  6548		}
  6549		// match: (Move [2] dst src mem)
  6550		// cond:
  6551		// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  6552		for {
  6553			if v.AuxInt != 2 {
  6554				break
  6555			}
  6556			mem := v.Args[2]
  6557			dst := v.Args[0]
  6558			src := v.Args[1]
  6559			v.reset(OpMIPS64MOVBstore)
  6560			v.AuxInt = 1
  6561			v.AddArg(dst)
  6562			v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6563			v0.AuxInt = 1
  6564			v0.AddArg(src)
  6565			v0.AddArg(mem)
  6566			v.AddArg(v0)
  6567			v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6568			v1.AddArg(dst)
  6569			v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6570			v2.AddArg(src)
  6571			v2.AddArg(mem)
  6572			v1.AddArg(v2)
  6573			v1.AddArg(mem)
  6574			v.AddArg(v1)
  6575			return true
  6576		}
  6577		// match: (Move [4] {t} dst src mem)
  6578		// cond: t.(*types.Type).Alignment()%4 == 0
  6579		// result: (MOVWstore dst (MOVWload src mem) mem)
  6580		for {
  6581			if v.AuxInt != 4 {
  6582				break
  6583			}
  6584			t := v.Aux
  6585			mem := v.Args[2]
  6586			dst := v.Args[0]
  6587			src := v.Args[1]
  6588			if !(t.(*types.Type).Alignment()%4 == 0) {
  6589				break
  6590			}
  6591			v.reset(OpMIPS64MOVWstore)
  6592			v.AddArg(dst)
  6593			v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6594			v0.AddArg(src)
  6595			v0.AddArg(mem)
  6596			v.AddArg(v0)
  6597			v.AddArg(mem)
  6598			return true
  6599		}
  6600		// match: (Move [4] {t} dst src mem)
  6601		// cond: t.(*types.Type).Alignment()%2 == 0
  6602		// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  6603		for {
  6604			if v.AuxInt != 4 {
  6605				break
  6606			}
  6607			t := v.Aux
  6608			mem := v.Args[2]
  6609			dst := v.Args[0]
  6610			src := v.Args[1]
  6611			if !(t.(*types.Type).Alignment()%2 == 0) {
  6612				break
  6613			}
  6614			v.reset(OpMIPS64MOVHstore)
  6615			v.AuxInt = 2
  6616			v.AddArg(dst)
  6617			v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6618			v0.AuxInt = 2
  6619			v0.AddArg(src)
  6620			v0.AddArg(mem)
  6621			v.AddArg(v0)
  6622			v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6623			v1.AddArg(dst)
  6624			v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6625			v2.AddArg(src)
  6626			v2.AddArg(mem)
  6627			v1.AddArg(v2)
  6628			v1.AddArg(mem)
  6629			v.AddArg(v1)
  6630			return true
  6631		}
  6632		// match: (Move [4] dst src mem)
  6633		// cond:
  6634		// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  6635		for {
  6636			if v.AuxInt != 4 {
  6637				break
  6638			}
  6639			mem := v.Args[2]
  6640			dst := v.Args[0]
  6641			src := v.Args[1]
  6642			v.reset(OpMIPS64MOVBstore)
  6643			v.AuxInt = 3
  6644			v.AddArg(dst)
  6645			v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6646			v0.AuxInt = 3
  6647			v0.AddArg(src)
  6648			v0.AddArg(mem)
  6649			v.AddArg(v0)
  6650			v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6651			v1.AuxInt = 2
  6652			v1.AddArg(dst)
  6653			v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6654			v2.AuxInt = 2
  6655			v2.AddArg(src)
  6656			v2.AddArg(mem)
  6657			v1.AddArg(v2)
  6658			v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6659			v3.AuxInt = 1
  6660			v3.AddArg(dst)
  6661			v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6662			v4.AuxInt = 1
  6663			v4.AddArg(src)
  6664			v4.AddArg(mem)
  6665			v3.AddArg(v4)
  6666			v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6667			v5.AddArg(dst)
  6668			v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6669			v6.AddArg(src)
  6670			v6.AddArg(mem)
  6671			v5.AddArg(v6)
  6672			v5.AddArg(mem)
  6673			v3.AddArg(v5)
  6674			v1.AddArg(v3)
  6675			v.AddArg(v1)
  6676			return true
  6677		}
  6678		// match: (Move [8] {t} dst src mem)
  6679		// cond: t.(*types.Type).Alignment()%8 == 0
  6680		// result: (MOVVstore dst (MOVVload src mem) mem)
  6681		for {
  6682			if v.AuxInt != 8 {
  6683				break
  6684			}
  6685			t := v.Aux
  6686			mem := v.Args[2]
  6687			dst := v.Args[0]
  6688			src := v.Args[1]
  6689			if !(t.(*types.Type).Alignment()%8 == 0) {
  6690				break
  6691			}
  6692			v.reset(OpMIPS64MOVVstore)
  6693			v.AddArg(dst)
  6694			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6695			v0.AddArg(src)
  6696			v0.AddArg(mem)
  6697			v.AddArg(v0)
  6698			v.AddArg(mem)
  6699			return true
  6700		}
  6701		// match: (Move [8] {t} dst src mem)
  6702		// cond: t.(*types.Type).Alignment()%4 == 0
  6703		// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  6704		for {
  6705			if v.AuxInt != 8 {
  6706				break
  6707			}
  6708			t := v.Aux
  6709			mem := v.Args[2]
  6710			dst := v.Args[0]
  6711			src := v.Args[1]
  6712			if !(t.(*types.Type).Alignment()%4 == 0) {
  6713				break
  6714			}
  6715			v.reset(OpMIPS64MOVWstore)
  6716			v.AuxInt = 4
  6717			v.AddArg(dst)
  6718			v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6719			v0.AuxInt = 4
  6720			v0.AddArg(src)
  6721			v0.AddArg(mem)
  6722			v.AddArg(v0)
  6723			v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  6724			v1.AddArg(dst)
  6725			v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6726			v2.AddArg(src)
  6727			v2.AddArg(mem)
  6728			v1.AddArg(v2)
  6729			v1.AddArg(mem)
  6730			v.AddArg(v1)
  6731			return true
  6732		}
  6733		// match: (Move [8] {t} dst src mem)
  6734		// cond: t.(*types.Type).Alignment()%2 == 0
  6735		// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  6736		for {
  6737			if v.AuxInt != 8 {
  6738				break
  6739			}
  6740			t := v.Aux
  6741			mem := v.Args[2]
  6742			dst := v.Args[0]
  6743			src := v.Args[1]
  6744			if !(t.(*types.Type).Alignment()%2 == 0) {
  6745				break
  6746			}
  6747			v.reset(OpMIPS64MOVHstore)
  6748			v.AuxInt = 6
  6749			v.AddArg(dst)
  6750			v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6751			v0.AuxInt = 6
  6752			v0.AddArg(src)
  6753			v0.AddArg(mem)
  6754			v.AddArg(v0)
  6755			v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6756			v1.AuxInt = 4
  6757			v1.AddArg(dst)
  6758			v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6759			v2.AuxInt = 4
  6760			v2.AddArg(src)
  6761			v2.AddArg(mem)
  6762			v1.AddArg(v2)
  6763			v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6764			v3.AuxInt = 2
  6765			v3.AddArg(dst)
  6766			v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6767			v4.AuxInt = 2
  6768			v4.AddArg(src)
  6769			v4.AddArg(mem)
  6770			v3.AddArg(v4)
  6771			v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6772			v5.AddArg(dst)
  6773			v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6774			v6.AddArg(src)
  6775			v6.AddArg(mem)
  6776			v5.AddArg(v6)
  6777			v5.AddArg(mem)
  6778			v3.AddArg(v5)
  6779			v1.AddArg(v3)
  6780			v.AddArg(v1)
  6781			return true
  6782		}
  6783		return false
  6784	}
  6785	func rewriteValueMIPS64_OpMove_10(v *Value) bool {
  6786		b := v.Block
  6787		config := b.Func.Config
  6788		typ := &b.Func.Config.Types
  6789		// match: (Move [3] dst src mem)
  6790		// cond:
  6791		// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  6792		for {
  6793			if v.AuxInt != 3 {
  6794				break
  6795			}
  6796			mem := v.Args[2]
  6797			dst := v.Args[0]
  6798			src := v.Args[1]
  6799			v.reset(OpMIPS64MOVBstore)
  6800			v.AuxInt = 2
  6801			v.AddArg(dst)
  6802			v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6803			v0.AuxInt = 2
  6804			v0.AddArg(src)
  6805			v0.AddArg(mem)
  6806			v.AddArg(v0)
  6807			v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6808			v1.AuxInt = 1
  6809			v1.AddArg(dst)
  6810			v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6811			v2.AuxInt = 1
  6812			v2.AddArg(src)
  6813			v2.AddArg(mem)
  6814			v1.AddArg(v2)
  6815			v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  6816			v3.AddArg(dst)
  6817			v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
  6818			v4.AddArg(src)
  6819			v4.AddArg(mem)
  6820			v3.AddArg(v4)
  6821			v3.AddArg(mem)
  6822			v1.AddArg(v3)
  6823			v.AddArg(v1)
  6824			return true
  6825		}
  6826		// match: (Move [6] {t} dst src mem)
  6827		// cond: t.(*types.Type).Alignment()%2 == 0
  6828		// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  6829		for {
  6830			if v.AuxInt != 6 {
  6831				break
  6832			}
  6833			t := v.Aux
  6834			mem := v.Args[2]
  6835			dst := v.Args[0]
  6836			src := v.Args[1]
  6837			if !(t.(*types.Type).Alignment()%2 == 0) {
  6838				break
  6839			}
  6840			v.reset(OpMIPS64MOVHstore)
  6841			v.AuxInt = 4
  6842			v.AddArg(dst)
  6843			v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6844			v0.AuxInt = 4
  6845			v0.AddArg(src)
  6846			v0.AddArg(mem)
  6847			v.AddArg(v0)
  6848			v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6849			v1.AuxInt = 2
  6850			v1.AddArg(dst)
  6851			v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6852			v2.AuxInt = 2
  6853			v2.AddArg(src)
  6854			v2.AddArg(mem)
  6855			v1.AddArg(v2)
  6856			v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  6857			v3.AddArg(dst)
  6858			v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
  6859			v4.AddArg(src)
  6860			v4.AddArg(mem)
  6861			v3.AddArg(v4)
  6862			v3.AddArg(mem)
  6863			v1.AddArg(v3)
  6864			v.AddArg(v1)
  6865			return true
  6866		}
  6867		// match: (Move [12] {t} dst src mem)
  6868		// cond: t.(*types.Type).Alignment()%4 == 0
  6869		// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  6870		for {
  6871			if v.AuxInt != 12 {
  6872				break
  6873			}
  6874			t := v.Aux
  6875			mem := v.Args[2]
  6876			dst := v.Args[0]
  6877			src := v.Args[1]
  6878			if !(t.(*types.Type).Alignment()%4 == 0) {
  6879				break
  6880			}
  6881			v.reset(OpMIPS64MOVWstore)
  6882			v.AuxInt = 8
  6883			v.AddArg(dst)
  6884			v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6885			v0.AuxInt = 8
  6886			v0.AddArg(src)
  6887			v0.AddArg(mem)
  6888			v.AddArg(v0)
  6889			v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  6890			v1.AuxInt = 4
  6891			v1.AddArg(dst)
  6892			v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6893			v2.AuxInt = 4
  6894			v2.AddArg(src)
  6895			v2.AddArg(mem)
  6896			v1.AddArg(v2)
  6897			v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  6898			v3.AddArg(dst)
  6899			v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
  6900			v4.AddArg(src)
  6901			v4.AddArg(mem)
  6902			v3.AddArg(v4)
  6903			v3.AddArg(mem)
  6904			v1.AddArg(v3)
  6905			v.AddArg(v1)
  6906			return true
  6907		}
  6908		// match: (Move [16] {t} dst src mem)
  6909		// cond: t.(*types.Type).Alignment()%8 == 0
  6910		// result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))
  6911		for {
  6912			if v.AuxInt != 16 {
  6913				break
  6914			}
  6915			t := v.Aux
  6916			mem := v.Args[2]
  6917			dst := v.Args[0]
  6918			src := v.Args[1]
  6919			if !(t.(*types.Type).Alignment()%8 == 0) {
  6920				break
  6921			}
  6922			v.reset(OpMIPS64MOVVstore)
  6923			v.AuxInt = 8
  6924			v.AddArg(dst)
  6925			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6926			v0.AuxInt = 8
  6927			v0.AddArg(src)
  6928			v0.AddArg(mem)
  6929			v.AddArg(v0)
  6930			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  6931			v1.AddArg(dst)
  6932			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6933			v2.AddArg(src)
  6934			v2.AddArg(mem)
  6935			v1.AddArg(v2)
  6936			v1.AddArg(mem)
  6937			v.AddArg(v1)
  6938			return true
  6939		}
  6940		// match: (Move [24] {t} dst src mem)
  6941		// cond: t.(*types.Type).Alignment()%8 == 0
  6942		// result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)))
  6943		for {
  6944			if v.AuxInt != 24 {
  6945				break
  6946			}
  6947			t := v.Aux
  6948			mem := v.Args[2]
  6949			dst := v.Args[0]
  6950			src := v.Args[1]
  6951			if !(t.(*types.Type).Alignment()%8 == 0) {
  6952				break
  6953			}
  6954			v.reset(OpMIPS64MOVVstore)
  6955			v.AuxInt = 16
  6956			v.AddArg(dst)
  6957			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6958			v0.AuxInt = 16
  6959			v0.AddArg(src)
  6960			v0.AddArg(mem)
  6961			v.AddArg(v0)
  6962			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  6963			v1.AuxInt = 8
  6964			v1.AddArg(dst)
  6965			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6966			v2.AuxInt = 8
  6967			v2.AddArg(src)
  6968			v2.AddArg(mem)
  6969			v1.AddArg(v2)
  6970			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
  6971			v3.AddArg(dst)
  6972			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
  6973			v4.AddArg(src)
  6974			v4.AddArg(mem)
  6975			v3.AddArg(v4)
  6976			v3.AddArg(mem)
  6977			v1.AddArg(v3)
  6978			v.AddArg(v1)
  6979			return true
  6980		}
  6981		// match: (Move [s] {t} dst src mem)
  6982		// cond: s > 24 || t.(*types.Type).Alignment()%8 != 0
  6983		// result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
  6984		for {
  6985			s := v.AuxInt
  6986			t := v.Aux
  6987			mem := v.Args[2]
  6988			dst := v.Args[0]
  6989			src := v.Args[1]
  6990			if !(s > 24 || t.(*types.Type).Alignment()%8 != 0) {
  6991				break
  6992			}
  6993			v.reset(OpMIPS64LoweredMove)
  6994			v.AuxInt = t.(*types.Type).Alignment()
  6995			v.AddArg(dst)
  6996			v.AddArg(src)
  6997			v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
  6998			v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
  6999			v0.AddArg(src)
  7000			v.AddArg(v0)
  7001			v.AddArg(mem)
  7002			return true
  7003		}
  7004		return false
  7005	}
  7006	func rewriteValueMIPS64_OpMul16_0(v *Value) bool {
  7007		b := v.Block
  7008		typ := &b.Func.Config.Types
  7009		// match: (Mul16 x y)
  7010		// cond:
  7011		// result: (Select1 (MULVU x y))
  7012		for {
  7013			y := v.Args[1]
  7014			x := v.Args[0]
  7015			v.reset(OpSelect1)
  7016			v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7017			v0.AddArg(x)
  7018			v0.AddArg(y)
  7019			v.AddArg(v0)
  7020			return true
  7021		}
  7022	}
  7023	func rewriteValueMIPS64_OpMul32_0(v *Value) bool {
  7024		b := v.Block
  7025		typ := &b.Func.Config.Types
  7026		// match: (Mul32 x y)
  7027		// cond:
  7028		// result: (Select1 (MULVU x y))
  7029		for {
  7030			y := v.Args[1]
  7031			x := v.Args[0]
  7032			v.reset(OpSelect1)
  7033			v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7034			v0.AddArg(x)
  7035			v0.AddArg(y)
  7036			v.AddArg(v0)
  7037			return true
  7038		}
  7039	}
  7040	func rewriteValueMIPS64_OpMul32F_0(v *Value) bool {
  7041		// match: (Mul32F x y)
  7042		// cond:
  7043		// result: (MULF x y)
  7044		for {
  7045			y := v.Args[1]
  7046			x := v.Args[0]
  7047			v.reset(OpMIPS64MULF)
  7048			v.AddArg(x)
  7049			v.AddArg(y)
  7050			return true
  7051		}
  7052	}
  7053	func rewriteValueMIPS64_OpMul64_0(v *Value) bool {
  7054		b := v.Block
  7055		typ := &b.Func.Config.Types
  7056		// match: (Mul64 x y)
  7057		// cond:
  7058		// result: (Select1 (MULVU x y))
  7059		for {
  7060			y := v.Args[1]
  7061			x := v.Args[0]
  7062			v.reset(OpSelect1)
  7063			v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7064			v0.AddArg(x)
  7065			v0.AddArg(y)
  7066			v.AddArg(v0)
  7067			return true
  7068		}
  7069	}
  7070	func rewriteValueMIPS64_OpMul64F_0(v *Value) bool {
  7071		// match: (Mul64F x y)
  7072		// cond:
  7073		// result: (MULD x y)
  7074		for {
  7075			y := v.Args[1]
  7076			x := v.Args[0]
  7077			v.reset(OpMIPS64MULD)
  7078			v.AddArg(x)
  7079			v.AddArg(y)
  7080			return true
  7081		}
  7082	}
  7083	func rewriteValueMIPS64_OpMul8_0(v *Value) bool {
  7084		b := v.Block
  7085		typ := &b.Func.Config.Types
  7086		// match: (Mul8 x y)
  7087		// cond:
  7088		// result: (Select1 (MULVU x y))
  7089		for {
  7090			y := v.Args[1]
  7091			x := v.Args[0]
  7092			v.reset(OpSelect1)
  7093			v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
  7094			v0.AddArg(x)
  7095			v0.AddArg(y)
  7096			v.AddArg(v0)
  7097			return true
  7098		}
  7099	}
  7100	func rewriteValueMIPS64_OpNeg16_0(v *Value) bool {
  7101		// match: (Neg16 x)
  7102		// cond:
  7103		// result: (NEGV x)
  7104		for {
  7105			x := v.Args[0]
  7106			v.reset(OpMIPS64NEGV)
  7107			v.AddArg(x)
  7108			return true
  7109		}
  7110	}
  7111	func rewriteValueMIPS64_OpNeg32_0(v *Value) bool {
  7112		// match: (Neg32 x)
  7113		// cond:
  7114		// result: (NEGV x)
  7115		for {
  7116			x := v.Args[0]
  7117			v.reset(OpMIPS64NEGV)
  7118			v.AddArg(x)
  7119			return true
  7120		}
  7121	}
  7122	func rewriteValueMIPS64_OpNeg32F_0(v *Value) bool {
  7123		// match: (Neg32F x)
  7124		// cond:
  7125		// result: (NEGF x)
  7126		for {
  7127			x := v.Args[0]
  7128			v.reset(OpMIPS64NEGF)
  7129			v.AddArg(x)
  7130			return true
  7131		}
  7132	}
  7133	func rewriteValueMIPS64_OpNeg64_0(v *Value) bool {
  7134		// match: (Neg64 x)
  7135		// cond:
  7136		// result: (NEGV x)
  7137		for {
  7138			x := v.Args[0]
  7139			v.reset(OpMIPS64NEGV)
  7140			v.AddArg(x)
  7141			return true
  7142		}
  7143	}
  7144	func rewriteValueMIPS64_OpNeg64F_0(v *Value) bool {
  7145		// match: (Neg64F x)
  7146		// cond:
  7147		// result: (NEGD x)
  7148		for {
  7149			x := v.Args[0]
  7150			v.reset(OpMIPS64NEGD)
  7151			v.AddArg(x)
  7152			return true
  7153		}
  7154	}
  7155	func rewriteValueMIPS64_OpNeg8_0(v *Value) bool {
  7156		// match: (Neg8 x)
  7157		// cond:
  7158		// result: (NEGV x)
  7159		for {
  7160			x := v.Args[0]
  7161			v.reset(OpMIPS64NEGV)
  7162			v.AddArg(x)
  7163			return true
  7164		}
  7165	}
  7166	func rewriteValueMIPS64_OpNeq16_0(v *Value) bool {
  7167		b := v.Block
  7168		typ := &b.Func.Config.Types
  7169		// match: (Neq16 x y)
  7170		// cond:
  7171		// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
  7172		for {
  7173			y := v.Args[1]
  7174			x := v.Args[0]
  7175			v.reset(OpMIPS64SGTU)
  7176			v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7177			v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7178			v1.AddArg(x)
  7179			v0.AddArg(v1)
  7180			v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7181			v2.AddArg(y)
  7182			v0.AddArg(v2)
  7183			v.AddArg(v0)
  7184			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7185			v3.AuxInt = 0
  7186			v.AddArg(v3)
  7187			return true
  7188		}
  7189	}
  7190	func rewriteValueMIPS64_OpNeq32_0(v *Value) bool {
  7191		b := v.Block
  7192		typ := &b.Func.Config.Types
  7193		// match: (Neq32 x y)
  7194		// cond:
  7195		// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
  7196		for {
  7197			y := v.Args[1]
  7198			x := v.Args[0]
  7199			v.reset(OpMIPS64SGTU)
  7200			v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7201			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7202			v1.AddArg(x)
  7203			v0.AddArg(v1)
  7204			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7205			v2.AddArg(y)
  7206			v0.AddArg(v2)
  7207			v.AddArg(v0)
  7208			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7209			v3.AuxInt = 0
  7210			v.AddArg(v3)
  7211			return true
  7212		}
  7213	}
  7214	func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool {
  7215		b := v.Block
  7216		// match: (Neq32F x y)
  7217		// cond:
  7218		// result: (FPFlagFalse (CMPEQF x y))
  7219		for {
  7220			y := v.Args[1]
  7221			x := v.Args[0]
  7222			v.reset(OpMIPS64FPFlagFalse)
  7223			v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
  7224			v0.AddArg(x)
  7225			v0.AddArg(y)
  7226			v.AddArg(v0)
  7227			return true
  7228		}
  7229	}
  7230	func rewriteValueMIPS64_OpNeq64_0(v *Value) bool {
  7231		b := v.Block
  7232		typ := &b.Func.Config.Types
  7233		// match: (Neq64 x y)
  7234		// cond:
  7235		// result: (SGTU (XOR x y) (MOVVconst [0]))
  7236		for {
  7237			y := v.Args[1]
  7238			x := v.Args[0]
  7239			v.reset(OpMIPS64SGTU)
  7240			v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7241			v0.AddArg(x)
  7242			v0.AddArg(y)
  7243			v.AddArg(v0)
  7244			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7245			v1.AuxInt = 0
  7246			v.AddArg(v1)
  7247			return true
  7248		}
  7249	}
  7250	func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool {
  7251		b := v.Block
  7252		// match: (Neq64F x y)
  7253		// cond:
  7254		// result: (FPFlagFalse (CMPEQD x y))
  7255		for {
  7256			y := v.Args[1]
  7257			x := v.Args[0]
  7258			v.reset(OpMIPS64FPFlagFalse)
  7259			v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
  7260			v0.AddArg(x)
  7261			v0.AddArg(y)
  7262			v.AddArg(v0)
  7263			return true
  7264		}
  7265	}
  7266	func rewriteValueMIPS64_OpNeq8_0(v *Value) bool {
  7267		b := v.Block
  7268		typ := &b.Func.Config.Types
  7269		// match: (Neq8 x y)
  7270		// cond:
  7271		// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
  7272		for {
  7273			y := v.Args[1]
  7274			x := v.Args[0]
  7275			v.reset(OpMIPS64SGTU)
  7276			v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7277			v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7278			v1.AddArg(x)
  7279			v0.AddArg(v1)
  7280			v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7281			v2.AddArg(y)
  7282			v0.AddArg(v2)
  7283			v.AddArg(v0)
  7284			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7285			v3.AuxInt = 0
  7286			v.AddArg(v3)
  7287			return true
  7288		}
  7289	}
  7290	func rewriteValueMIPS64_OpNeqB_0(v *Value) bool {
  7291		// match: (NeqB x y)
  7292		// cond:
  7293		// result: (XOR x y)
  7294		for {
  7295			y := v.Args[1]
  7296			x := v.Args[0]
  7297			v.reset(OpMIPS64XOR)
  7298			v.AddArg(x)
  7299			v.AddArg(y)
  7300			return true
  7301		}
  7302	}
  7303	func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool {
  7304		b := v.Block
  7305		typ := &b.Func.Config.Types
  7306		// match: (NeqPtr x y)
  7307		// cond:
  7308		// result: (SGTU (XOR x y) (MOVVconst [0]))
  7309		for {
  7310			y := v.Args[1]
  7311			x := v.Args[0]
  7312			v.reset(OpMIPS64SGTU)
  7313			v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
  7314			v0.AddArg(x)
  7315			v0.AddArg(y)
  7316			v.AddArg(v0)
  7317			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7318			v1.AuxInt = 0
  7319			v.AddArg(v1)
  7320			return true
  7321		}
  7322	}
  7323	func rewriteValueMIPS64_OpNilCheck_0(v *Value) bool {
  7324		// match: (NilCheck ptr mem)
  7325		// cond:
  7326		// result: (LoweredNilCheck ptr mem)
  7327		for {
  7328			mem := v.Args[1]
  7329			ptr := v.Args[0]
  7330			v.reset(OpMIPS64LoweredNilCheck)
  7331			v.AddArg(ptr)
  7332			v.AddArg(mem)
  7333			return true
  7334		}
  7335	}
  7336	func rewriteValueMIPS64_OpNot_0(v *Value) bool {
  7337		// match: (Not x)
  7338		// cond:
  7339		// result: (XORconst [1] x)
  7340		for {
  7341			x := v.Args[0]
  7342			v.reset(OpMIPS64XORconst)
  7343			v.AuxInt = 1
  7344			v.AddArg(x)
  7345			return true
  7346		}
  7347	}
  7348	func rewriteValueMIPS64_OpOffPtr_0(v *Value) bool {
  7349		// match: (OffPtr [off] ptr:(SP))
  7350		// cond:
  7351		// result: (MOVVaddr [off] ptr)
  7352		for {
  7353			off := v.AuxInt
  7354			ptr := v.Args[0]
  7355			if ptr.Op != OpSP {
  7356				break
  7357			}
  7358			v.reset(OpMIPS64MOVVaddr)
  7359			v.AuxInt = off
  7360			v.AddArg(ptr)
  7361			return true
  7362		}
  7363		// match: (OffPtr [off] ptr)
  7364		// cond:
  7365		// result: (ADDVconst [off] ptr)
  7366		for {
  7367			off := v.AuxInt
  7368			ptr := v.Args[0]
  7369			v.reset(OpMIPS64ADDVconst)
  7370			v.AuxInt = off
  7371			v.AddArg(ptr)
  7372			return true
  7373		}
  7374	}
  7375	func rewriteValueMIPS64_OpOr16_0(v *Value) bool {
  7376		// match: (Or16 x y)
  7377		// cond:
  7378		// result: (OR x y)
  7379		for {
  7380			y := v.Args[1]
  7381			x := v.Args[0]
  7382			v.reset(OpMIPS64OR)
  7383			v.AddArg(x)
  7384			v.AddArg(y)
  7385			return true
  7386		}
  7387	}
  7388	func rewriteValueMIPS64_OpOr32_0(v *Value) bool {
  7389		// match: (Or32 x y)
  7390		// cond:
  7391		// result: (OR x y)
  7392		for {
  7393			y := v.Args[1]
  7394			x := v.Args[0]
  7395			v.reset(OpMIPS64OR)
  7396			v.AddArg(x)
  7397			v.AddArg(y)
  7398			return true
  7399		}
  7400	}
  7401	func rewriteValueMIPS64_OpOr64_0(v *Value) bool {
  7402		// match: (Or64 x y)
  7403		// cond:
  7404		// result: (OR x y)
  7405		for {
  7406			y := v.Args[1]
  7407			x := v.Args[0]
  7408			v.reset(OpMIPS64OR)
  7409			v.AddArg(x)
  7410			v.AddArg(y)
  7411			return true
  7412		}
  7413	}
  7414	func rewriteValueMIPS64_OpOr8_0(v *Value) bool {
  7415		// match: (Or8 x y)
  7416		// cond:
  7417		// result: (OR x y)
  7418		for {
  7419			y := v.Args[1]
  7420			x := v.Args[0]
  7421			v.reset(OpMIPS64OR)
  7422			v.AddArg(x)
  7423			v.AddArg(y)
  7424			return true
  7425		}
  7426	}
  7427	func rewriteValueMIPS64_OpOrB_0(v *Value) bool {
  7428		// match: (OrB x y)
  7429		// cond:
  7430		// result: (OR x y)
  7431		for {
  7432			y := v.Args[1]
  7433			x := v.Args[0]
  7434			v.reset(OpMIPS64OR)
  7435			v.AddArg(x)
  7436			v.AddArg(y)
  7437			return true
  7438		}
  7439	}
  7440	func rewriteValueMIPS64_OpPanicBounds_0(v *Value) bool {
  7441		// match: (PanicBounds [kind] x y mem)
  7442		// cond: boundsABI(kind) == 0
  7443		// result: (LoweredPanicBoundsA [kind] x y mem)
  7444		for {
  7445			kind := v.AuxInt
  7446			mem := v.Args[2]
  7447			x := v.Args[0]
  7448			y := v.Args[1]
  7449			if !(boundsABI(kind) == 0) {
  7450				break
  7451			}
  7452			v.reset(OpMIPS64LoweredPanicBoundsA)
  7453			v.AuxInt = kind
  7454			v.AddArg(x)
  7455			v.AddArg(y)
  7456			v.AddArg(mem)
  7457			return true
  7458		}
  7459		// match: (PanicBounds [kind] x y mem)
  7460		// cond: boundsABI(kind) == 1
  7461		// result: (LoweredPanicBoundsB [kind] x y mem)
  7462		for {
  7463			kind := v.AuxInt
  7464			mem := v.Args[2]
  7465			x := v.Args[0]
  7466			y := v.Args[1]
  7467			if !(boundsABI(kind) == 1) {
  7468				break
  7469			}
  7470			v.reset(OpMIPS64LoweredPanicBoundsB)
  7471			v.AuxInt = kind
  7472			v.AddArg(x)
  7473			v.AddArg(y)
  7474			v.AddArg(mem)
  7475			return true
  7476		}
  7477		// match: (PanicBounds [kind] x y mem)
  7478		// cond: boundsABI(kind) == 2
  7479		// result: (LoweredPanicBoundsC [kind] x y mem)
  7480		for {
  7481			kind := v.AuxInt
  7482			mem := v.Args[2]
  7483			x := v.Args[0]
  7484			y := v.Args[1]
  7485			if !(boundsABI(kind) == 2) {
  7486				break
  7487			}
  7488			v.reset(OpMIPS64LoweredPanicBoundsC)
  7489			v.AuxInt = kind
  7490			v.AddArg(x)
  7491			v.AddArg(y)
  7492			v.AddArg(mem)
  7493			return true
  7494		}
  7495		return false
  7496	}
  7497	func rewriteValueMIPS64_OpRotateLeft16_0(v *Value) bool {
  7498		b := v.Block
  7499		typ := &b.Func.Config.Types
  7500		// match: (RotateLeft16 <t> x (MOVVconst [c]))
  7501		// cond:
  7502		// result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15])))
  7503		for {
  7504			t := v.Type
  7505			_ = v.Args[1]
  7506			x := v.Args[0]
  7507			v_1 := v.Args[1]
  7508			if v_1.Op != OpMIPS64MOVVconst {
  7509				break
  7510			}
  7511			c := v_1.AuxInt
  7512			v.reset(OpOr16)
  7513			v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  7514			v0.AddArg(x)
  7515			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7516			v1.AuxInt = c & 15
  7517			v0.AddArg(v1)
  7518			v.AddArg(v0)
  7519			v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  7520			v2.AddArg(x)
  7521			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7522			v3.AuxInt = -c & 15
  7523			v2.AddArg(v3)
  7524			v.AddArg(v2)
  7525			return true
  7526		}
  7527		return false
  7528	}
  7529	func rewriteValueMIPS64_OpRotateLeft32_0(v *Value) bool {
  7530		b := v.Block
  7531		typ := &b.Func.Config.Types
  7532		// match: (RotateLeft32 <t> x (MOVVconst [c]))
  7533		// cond:
  7534		// result: (Or32 (Lsh32x64 <t> x (MOVVconst [c&31])) (Rsh32Ux64 <t> x (MOVVconst [-c&31])))
  7535		for {
  7536			t := v.Type
  7537			_ = v.Args[1]
  7538			x := v.Args[0]
  7539			v_1 := v.Args[1]
  7540			if v_1.Op != OpMIPS64MOVVconst {
  7541				break
  7542			}
  7543			c := v_1.AuxInt
  7544			v.reset(OpOr32)
  7545			v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
  7546			v0.AddArg(x)
  7547			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7548			v1.AuxInt = c & 31
  7549			v0.AddArg(v1)
  7550			v.AddArg(v0)
  7551			v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  7552			v2.AddArg(x)
  7553			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7554			v3.AuxInt = -c & 31
  7555			v2.AddArg(v3)
  7556			v.AddArg(v2)
  7557			return true
  7558		}
  7559		return false
  7560	}
  7561	func rewriteValueMIPS64_OpRotateLeft64_0(v *Value) bool {
  7562		b := v.Block
  7563		typ := &b.Func.Config.Types
  7564		// match: (RotateLeft64 <t> x (MOVVconst [c]))
  7565		// cond:
  7566		// result: (Or64 (Lsh64x64 <t> x (MOVVconst [c&63])) (Rsh64Ux64 <t> x (MOVVconst [-c&63])))
  7567		for {
  7568			t := v.Type
  7569			_ = v.Args[1]
  7570			x := v.Args[0]
  7571			v_1 := v.Args[1]
  7572			if v_1.Op != OpMIPS64MOVVconst {
  7573				break
  7574			}
  7575			c := v_1.AuxInt
  7576			v.reset(OpOr64)
  7577			v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  7578			v0.AddArg(x)
  7579			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7580			v1.AuxInt = c & 63
  7581			v0.AddArg(v1)
  7582			v.AddArg(v0)
  7583			v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  7584			v2.AddArg(x)
  7585			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7586			v3.AuxInt = -c & 63
  7587			v2.AddArg(v3)
  7588			v.AddArg(v2)
  7589			return true
  7590		}
  7591		return false
  7592	}
  7593	func rewriteValueMIPS64_OpRotateLeft8_0(v *Value) bool {
  7594		b := v.Block
  7595		typ := &b.Func.Config.Types
  7596		// match: (RotateLeft8 <t> x (MOVVconst [c]))
  7597		// cond:
  7598		// result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7])))
  7599		for {
  7600			t := v.Type
  7601			_ = v.Args[1]
  7602			x := v.Args[0]
  7603			v_1 := v.Args[1]
  7604			if v_1.Op != OpMIPS64MOVVconst {
  7605				break
  7606			}
  7607			c := v_1.AuxInt
  7608			v.reset(OpOr8)
  7609			v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  7610			v0.AddArg(x)
  7611			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7612			v1.AuxInt = c & 7
  7613			v0.AddArg(v1)
  7614			v.AddArg(v0)
  7615			v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  7616			v2.AddArg(x)
  7617			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7618			v3.AuxInt = -c & 7
  7619			v2.AddArg(v3)
  7620			v.AddArg(v2)
  7621			return true
  7622		}
  7623		return false
  7624	}
  7625	func rewriteValueMIPS64_OpRound32F_0(v *Value) bool {
  7626		// match: (Round32F x)
  7627		// cond:
  7628		// result: x
  7629		for {
  7630			x := v.Args[0]
  7631			v.reset(OpCopy)
  7632			v.Type = x.Type
  7633			v.AddArg(x)
  7634			return true
  7635		}
  7636	}
  7637	func rewriteValueMIPS64_OpRound64F_0(v *Value) bool {
  7638		// match: (Round64F x)
  7639		// cond:
  7640		// result: x
  7641		for {
  7642			x := v.Args[0]
  7643			v.reset(OpCopy)
  7644			v.Type = x.Type
  7645			v.AddArg(x)
  7646			return true
  7647		}
  7648	}
  7649	func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool {
  7650		b := v.Block
  7651		typ := &b.Func.Config.Types
  7652		// match: (Rsh16Ux16 <t> x y)
  7653		// cond:
  7654		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  7655		for {
  7656			t := v.Type
  7657			y := v.Args[1]
  7658			x := v.Args[0]
  7659			v.reset(OpMIPS64AND)
  7660			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7661			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7662			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7663			v2.AuxInt = 64
  7664			v1.AddArg(v2)
  7665			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7666			v3.AddArg(y)
  7667			v1.AddArg(v3)
  7668			v0.AddArg(v1)
  7669			v.AddArg(v0)
  7670			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7671			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7672			v5.AddArg(x)
  7673			v4.AddArg(v5)
  7674			v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7675			v6.AddArg(y)
  7676			v4.AddArg(v6)
  7677			v.AddArg(v4)
  7678			return true
  7679		}
  7680	}
  7681	func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool {
  7682		b := v.Block
  7683		typ := &b.Func.Config.Types
  7684		// match: (Rsh16Ux32 <t> x y)
  7685		// cond:
  7686		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
  7687		for {
  7688			t := v.Type
  7689			y := v.Args[1]
  7690			x := v.Args[0]
  7691			v.reset(OpMIPS64AND)
  7692			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7693			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7694			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7695			v2.AuxInt = 64
  7696			v1.AddArg(v2)
  7697			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7698			v3.AddArg(y)
  7699			v1.AddArg(v3)
  7700			v0.AddArg(v1)
  7701			v.AddArg(v0)
  7702			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7703			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7704			v5.AddArg(x)
  7705			v4.AddArg(v5)
  7706			v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7707			v6.AddArg(y)
  7708			v4.AddArg(v6)
  7709			v.AddArg(v4)
  7710			return true
  7711		}
  7712	}
  7713	func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool {
  7714		b := v.Block
  7715		typ := &b.Func.Config.Types
  7716		// match: (Rsh16Ux64 <t> x y)
  7717		// cond:
  7718		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
  7719		for {
  7720			t := v.Type
  7721			y := v.Args[1]
  7722			x := v.Args[0]
  7723			v.reset(OpMIPS64AND)
  7724			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7725			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7726			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7727			v2.AuxInt = 64
  7728			v1.AddArg(v2)
  7729			v1.AddArg(y)
  7730			v0.AddArg(v1)
  7731			v.AddArg(v0)
  7732			v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7733			v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7734			v4.AddArg(x)
  7735			v3.AddArg(v4)
  7736			v3.AddArg(y)
  7737			v.AddArg(v3)
  7738			return true
  7739		}
  7740	}
  7741	func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
  7742		b := v.Block
  7743		typ := &b.Func.Config.Types
  7744		// match: (Rsh16Ux8 <t> x y)
  7745		// cond:
  7746		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
  7747		for {
  7748			t := v.Type
  7749			y := v.Args[1]
  7750			x := v.Args[0]
  7751			v.reset(OpMIPS64AND)
  7752			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7753			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7754			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7755			v2.AuxInt = 64
  7756			v1.AddArg(v2)
  7757			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7758			v3.AddArg(y)
  7759			v1.AddArg(v3)
  7760			v0.AddArg(v1)
  7761			v.AddArg(v0)
  7762			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7763			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7764			v5.AddArg(x)
  7765			v4.AddArg(v5)
  7766			v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7767			v6.AddArg(y)
  7768			v4.AddArg(v6)
  7769			v.AddArg(v4)
  7770			return true
  7771		}
  7772	}
  7773	func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool {
  7774		b := v.Block
  7775		typ := &b.Func.Config.Types
  7776		// match: (Rsh16x16 <t> x y)
  7777		// cond:
  7778		// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  7779		for {
  7780			t := v.Type
  7781			y := v.Args[1]
  7782			x := v.Args[0]
  7783			v.reset(OpMIPS64SRAV)
  7784			v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7785			v0.AddArg(x)
  7786			v.AddArg(v0)
  7787			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7788			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7789			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7790			v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7791			v4.AddArg(y)
  7792			v3.AddArg(v4)
  7793			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7794			v5.AuxInt = 63
  7795			v3.AddArg(v5)
  7796			v2.AddArg(v3)
  7797			v1.AddArg(v2)
  7798			v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7799			v6.AddArg(y)
  7800			v1.AddArg(v6)
  7801			v.AddArg(v1)
  7802			return true
  7803		}
  7804	}
  7805	func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool {
  7806		b := v.Block
  7807		typ := &b.Func.Config.Types
  7808		// match: (Rsh16x32 <t> x y)
  7809		// cond:
  7810		// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  7811		for {
  7812			t := v.Type
  7813			y := v.Args[1]
  7814			x := v.Args[0]
  7815			v.reset(OpMIPS64SRAV)
  7816			v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7817			v0.AddArg(x)
  7818			v.AddArg(v0)
  7819			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7820			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7821			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7822			v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7823			v4.AddArg(y)
  7824			v3.AddArg(v4)
  7825			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7826			v5.AuxInt = 63
  7827			v3.AddArg(v5)
  7828			v2.AddArg(v3)
  7829			v1.AddArg(v2)
  7830			v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7831			v6.AddArg(y)
  7832			v1.AddArg(v6)
  7833			v.AddArg(v1)
  7834			return true
  7835		}
  7836	}
  7837	func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool {
  7838		b := v.Block
  7839		typ := &b.Func.Config.Types
  7840		// match: (Rsh16x64 <t> x y)
  7841		// cond:
  7842		// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  7843		for {
  7844			t := v.Type
  7845			y := v.Args[1]
  7846			x := v.Args[0]
  7847			v.reset(OpMIPS64SRAV)
  7848			v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7849			v0.AddArg(x)
  7850			v.AddArg(v0)
  7851			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7852			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7853			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7854			v3.AddArg(y)
  7855			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7856			v4.AuxInt = 63
  7857			v3.AddArg(v4)
  7858			v2.AddArg(v3)
  7859			v1.AddArg(v2)
  7860			v1.AddArg(y)
  7861			v.AddArg(v1)
  7862			return true
  7863		}
  7864	}
  7865	func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
  7866		b := v.Block
  7867		typ := &b.Func.Config.Types
  7868		// match: (Rsh16x8 <t> x y)
  7869		// cond:
  7870		// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  7871		for {
  7872			t := v.Type
  7873			y := v.Args[1]
  7874			x := v.Args[0]
  7875			v.reset(OpMIPS64SRAV)
  7876			v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7877			v0.AddArg(x)
  7878			v.AddArg(v0)
  7879			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  7880			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7881			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7882			v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7883			v4.AddArg(y)
  7884			v3.AddArg(v4)
  7885			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7886			v5.AuxInt = 63
  7887			v3.AddArg(v5)
  7888			v2.AddArg(v3)
  7889			v1.AddArg(v2)
  7890			v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7891			v6.AddArg(y)
  7892			v1.AddArg(v6)
  7893			v.AddArg(v1)
  7894			return true
  7895		}
  7896	}
  7897	func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool {
  7898		b := v.Block
  7899		typ := &b.Func.Config.Types
  7900		// match: (Rsh32Ux16 <t> x y)
  7901		// cond:
  7902		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
  7903		for {
  7904			t := v.Type
  7905			y := v.Args[1]
  7906			x := v.Args[0]
  7907			v.reset(OpMIPS64AND)
  7908			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7909			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7910			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7911			v2.AuxInt = 64
  7912			v1.AddArg(v2)
  7913			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7914			v3.AddArg(y)
  7915			v1.AddArg(v3)
  7916			v0.AddArg(v1)
  7917			v.AddArg(v0)
  7918			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7919			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7920			v5.AddArg(x)
  7921			v4.AddArg(v5)
  7922			v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7923			v6.AddArg(y)
  7924			v4.AddArg(v6)
  7925			v.AddArg(v4)
  7926			return true
  7927		}
  7928	}
  7929	func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool {
  7930		b := v.Block
  7931		typ := &b.Func.Config.Types
  7932		// match: (Rsh32Ux32 <t> x y)
  7933		// cond:
  7934		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  7935		for {
  7936			t := v.Type
  7937			y := v.Args[1]
  7938			x := v.Args[0]
  7939			v.reset(OpMIPS64AND)
  7940			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7941			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7942			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7943			v2.AuxInt = 64
  7944			v1.AddArg(v2)
  7945			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7946			v3.AddArg(y)
  7947			v1.AddArg(v3)
  7948			v0.AddArg(v1)
  7949			v.AddArg(v0)
  7950			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7951			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7952			v5.AddArg(x)
  7953			v4.AddArg(v5)
  7954			v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7955			v6.AddArg(y)
  7956			v4.AddArg(v6)
  7957			v.AddArg(v4)
  7958			return true
  7959		}
  7960	}
  7961	func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool {
  7962		b := v.Block
  7963		typ := &b.Func.Config.Types
  7964		// match: (Rsh32Ux64 <t> x y)
  7965		// cond:
  7966		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
  7967		for {
  7968			t := v.Type
  7969			y := v.Args[1]
  7970			x := v.Args[0]
  7971			v.reset(OpMIPS64AND)
  7972			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  7973			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  7974			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  7975			v2.AuxInt = 64
  7976			v1.AddArg(v2)
  7977			v1.AddArg(y)
  7978			v0.AddArg(v1)
  7979			v.AddArg(v0)
  7980			v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  7981			v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7982			v4.AddArg(x)
  7983			v3.AddArg(v4)
  7984			v3.AddArg(y)
  7985			v.AddArg(v3)
  7986			return true
  7987		}
  7988	}
  7989	func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
  7990		b := v.Block
  7991		typ := &b.Func.Config.Types
  7992		// match: (Rsh32Ux8 <t> x y)
  7993		// cond:
  7994		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
  7995		for {
  7996			t := v.Type
  7997			y := v.Args[1]
  7998			x := v.Args[0]
  7999			v.reset(OpMIPS64AND)
  8000			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8001			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8002			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8003			v2.AuxInt = 64
  8004			v1.AddArg(v2)
  8005			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8006			v3.AddArg(y)
  8007			v1.AddArg(v3)
  8008			v0.AddArg(v1)
  8009			v.AddArg(v0)
  8010			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8011			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8012			v5.AddArg(x)
  8013			v4.AddArg(v5)
  8014			v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8015			v6.AddArg(y)
  8016			v4.AddArg(v6)
  8017			v.AddArg(v4)
  8018			return true
  8019		}
  8020	}
  8021	func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool {
  8022		b := v.Block
  8023		typ := &b.Func.Config.Types
  8024		// match: (Rsh32x16 <t> x y)
  8025		// cond:
  8026		// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8027		for {
  8028			t := v.Type
  8029			y := v.Args[1]
  8030			x := v.Args[0]
  8031			v.reset(OpMIPS64SRAV)
  8032			v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8033			v0.AddArg(x)
  8034			v.AddArg(v0)
  8035			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8036			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8037			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8038			v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8039			v4.AddArg(y)
  8040			v3.AddArg(v4)
  8041			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8042			v5.AuxInt = 63
  8043			v3.AddArg(v5)
  8044			v2.AddArg(v3)
  8045			v1.AddArg(v2)
  8046			v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8047			v6.AddArg(y)
  8048			v1.AddArg(v6)
  8049			v.AddArg(v1)
  8050			return true
  8051		}
  8052	}
  8053	func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool {
  8054		b := v.Block
  8055		typ := &b.Func.Config.Types
  8056		// match: (Rsh32x32 <t> x y)
  8057		// cond:
  8058		// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8059		for {
  8060			t := v.Type
  8061			y := v.Args[1]
  8062			x := v.Args[0]
  8063			v.reset(OpMIPS64SRAV)
  8064			v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8065			v0.AddArg(x)
  8066			v.AddArg(v0)
  8067			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8068			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8069			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8070			v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8071			v4.AddArg(y)
  8072			v3.AddArg(v4)
  8073			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8074			v5.AuxInt = 63
  8075			v3.AddArg(v5)
  8076			v2.AddArg(v3)
  8077			v1.AddArg(v2)
  8078			v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8079			v6.AddArg(y)
  8080			v1.AddArg(v6)
  8081			v.AddArg(v1)
  8082			return true
  8083		}
  8084	}
  8085	func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool {
  8086		b := v.Block
  8087		typ := &b.Func.Config.Types
  8088		// match: (Rsh32x64 <t> x y)
  8089		// cond:
  8090		// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8091		for {
  8092			t := v.Type
  8093			y := v.Args[1]
  8094			x := v.Args[0]
  8095			v.reset(OpMIPS64SRAV)
  8096			v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8097			v0.AddArg(x)
  8098			v.AddArg(v0)
  8099			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8100			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8101			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8102			v3.AddArg(y)
  8103			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8104			v4.AuxInt = 63
  8105			v3.AddArg(v4)
  8106			v2.AddArg(v3)
  8107			v1.AddArg(v2)
  8108			v1.AddArg(y)
  8109			v.AddArg(v1)
  8110			return true
  8111		}
  8112	}
  8113	func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
  8114		b := v.Block
  8115		typ := &b.Func.Config.Types
  8116		// match: (Rsh32x8 <t> x y)
  8117		// cond:
  8118		// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  8119		for {
  8120			t := v.Type
  8121			y := v.Args[1]
  8122			x := v.Args[0]
  8123			v.reset(OpMIPS64SRAV)
  8124			v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  8125			v0.AddArg(x)
  8126			v.AddArg(v0)
  8127			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8128			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8129			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8130			v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8131			v4.AddArg(y)
  8132			v3.AddArg(v4)
  8133			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8134			v5.AuxInt = 63
  8135			v3.AddArg(v5)
  8136			v2.AddArg(v3)
  8137			v1.AddArg(v2)
  8138			v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8139			v6.AddArg(y)
  8140			v1.AddArg(v6)
  8141			v.AddArg(v1)
  8142			return true
  8143		}
  8144	}
  8145	func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool {
  8146		b := v.Block
  8147		typ := &b.Func.Config.Types
  8148		// match: (Rsh64Ux16 <t> x y)
  8149		// cond:
  8150		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
  8151		for {
  8152			t := v.Type
  8153			y := v.Args[1]
  8154			x := v.Args[0]
  8155			v.reset(OpMIPS64AND)
  8156			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8157			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8158			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8159			v2.AuxInt = 64
  8160			v1.AddArg(v2)
  8161			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8162			v3.AddArg(y)
  8163			v1.AddArg(v3)
  8164			v0.AddArg(v1)
  8165			v.AddArg(v0)
  8166			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8167			v4.AddArg(x)
  8168			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8169			v5.AddArg(y)
  8170			v4.AddArg(v5)
  8171			v.AddArg(v4)
  8172			return true
  8173		}
  8174	}
  8175	func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool {
  8176		b := v.Block
  8177		typ := &b.Func.Config.Types
  8178		// match: (Rsh64Ux32 <t> x y)
  8179		// cond:
  8180		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
  8181		for {
  8182			t := v.Type
  8183			y := v.Args[1]
  8184			x := v.Args[0]
  8185			v.reset(OpMIPS64AND)
  8186			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8187			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8188			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8189			v2.AuxInt = 64
  8190			v1.AddArg(v2)
  8191			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8192			v3.AddArg(y)
  8193			v1.AddArg(v3)
  8194			v0.AddArg(v1)
  8195			v.AddArg(v0)
  8196			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8197			v4.AddArg(x)
  8198			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8199			v5.AddArg(y)
  8200			v4.AddArg(v5)
  8201			v.AddArg(v4)
  8202			return true
  8203		}
  8204	}
  8205	func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool {
  8206		b := v.Block
  8207		typ := &b.Func.Config.Types
  8208		// match: (Rsh64Ux64 <t> x y)
  8209		// cond:
  8210		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
  8211		for {
  8212			t := v.Type
  8213			y := v.Args[1]
  8214			x := v.Args[0]
  8215			v.reset(OpMIPS64AND)
  8216			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8217			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8218			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8219			v2.AuxInt = 64
  8220			v1.AddArg(v2)
  8221			v1.AddArg(y)
  8222			v0.AddArg(v1)
  8223			v.AddArg(v0)
  8224			v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8225			v3.AddArg(x)
  8226			v3.AddArg(y)
  8227			v.AddArg(v3)
  8228			return true
  8229		}
  8230	}
  8231	func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
  8232		b := v.Block
  8233		typ := &b.Func.Config.Types
  8234		// match: (Rsh64Ux8 <t> x y)
  8235		// cond:
  8236		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
  8237		for {
  8238			t := v.Type
  8239			y := v.Args[1]
  8240			x := v.Args[0]
  8241			v.reset(OpMIPS64AND)
  8242			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8243			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8244			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8245			v2.AuxInt = 64
  8246			v1.AddArg(v2)
  8247			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8248			v3.AddArg(y)
  8249			v1.AddArg(v3)
  8250			v0.AddArg(v1)
  8251			v.AddArg(v0)
  8252			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8253			v4.AddArg(x)
  8254			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8255			v5.AddArg(y)
  8256			v4.AddArg(v5)
  8257			v.AddArg(v4)
  8258			return true
  8259		}
  8260	}
  8261	func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool {
  8262		b := v.Block
  8263		typ := &b.Func.Config.Types
  8264		// match: (Rsh64x16 <t> x y)
  8265		// cond:
  8266		// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8267		for {
  8268			t := v.Type
  8269			y := v.Args[1]
  8270			x := v.Args[0]
  8271			v.reset(OpMIPS64SRAV)
  8272			v.AddArg(x)
  8273			v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8274			v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8275			v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8276			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8277			v3.AddArg(y)
  8278			v2.AddArg(v3)
  8279			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8280			v4.AuxInt = 63
  8281			v2.AddArg(v4)
  8282			v1.AddArg(v2)
  8283			v0.AddArg(v1)
  8284			v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8285			v5.AddArg(y)
  8286			v0.AddArg(v5)
  8287			v.AddArg(v0)
  8288			return true
  8289		}
  8290	}
  8291	func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool {
  8292		b := v.Block
  8293		typ := &b.Func.Config.Types
  8294		// match: (Rsh64x32 <t> x y)
  8295		// cond:
  8296		// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8297		for {
  8298			t := v.Type
  8299			y := v.Args[1]
  8300			x := v.Args[0]
  8301			v.reset(OpMIPS64SRAV)
  8302			v.AddArg(x)
  8303			v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8304			v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8305			v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8306			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8307			v3.AddArg(y)
  8308			v2.AddArg(v3)
  8309			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8310			v4.AuxInt = 63
  8311			v2.AddArg(v4)
  8312			v1.AddArg(v2)
  8313			v0.AddArg(v1)
  8314			v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8315			v5.AddArg(y)
  8316			v0.AddArg(v5)
  8317			v.AddArg(v0)
  8318			return true
  8319		}
  8320	}
  8321	func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool {
  8322		b := v.Block
  8323		typ := &b.Func.Config.Types
  8324		// match: (Rsh64x64 <t> x y)
  8325		// cond:
  8326		// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8327		for {
  8328			t := v.Type
  8329			y := v.Args[1]
  8330			x := v.Args[0]
  8331			v.reset(OpMIPS64SRAV)
  8332			v.AddArg(x)
  8333			v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8334			v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8335			v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8336			v2.AddArg(y)
  8337			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8338			v3.AuxInt = 63
  8339			v2.AddArg(v3)
  8340			v1.AddArg(v2)
  8341			v0.AddArg(v1)
  8342			v0.AddArg(y)
  8343			v.AddArg(v0)
  8344			return true
  8345		}
  8346	}
  8347	func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
  8348		b := v.Block
  8349		typ := &b.Func.Config.Types
  8350		// match: (Rsh64x8 <t> x y)
  8351		// cond:
  8352		// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  8353		for {
  8354			t := v.Type
  8355			y := v.Args[1]
  8356			x := v.Args[0]
  8357			v.reset(OpMIPS64SRAV)
  8358			v.AddArg(x)
  8359			v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8360			v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8361			v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8362			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8363			v3.AddArg(y)
  8364			v2.AddArg(v3)
  8365			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8366			v4.AuxInt = 63
  8367			v2.AddArg(v4)
  8368			v1.AddArg(v2)
  8369			v0.AddArg(v1)
  8370			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8371			v5.AddArg(y)
  8372			v0.AddArg(v5)
  8373			v.AddArg(v0)
  8374			return true
  8375		}
  8376	}
  8377	func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool {
  8378		b := v.Block
  8379		typ := &b.Func.Config.Types
  8380		// match: (Rsh8Ux16 <t> x y)
  8381		// cond:
  8382		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
  8383		for {
  8384			t := v.Type
  8385			y := v.Args[1]
  8386			x := v.Args[0]
  8387			v.reset(OpMIPS64AND)
  8388			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8389			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8390			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8391			v2.AuxInt = 64
  8392			v1.AddArg(v2)
  8393			v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8394			v3.AddArg(y)
  8395			v1.AddArg(v3)
  8396			v0.AddArg(v1)
  8397			v.AddArg(v0)
  8398			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8399			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8400			v5.AddArg(x)
  8401			v4.AddArg(v5)
  8402			v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8403			v6.AddArg(y)
  8404			v4.AddArg(v6)
  8405			v.AddArg(v4)
  8406			return true
  8407		}
  8408	}
  8409	func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool {
  8410		b := v.Block
  8411		typ := &b.Func.Config.Types
  8412		// match: (Rsh8Ux32 <t> x y)
  8413		// cond:
  8414		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
  8415		for {
  8416			t := v.Type
  8417			y := v.Args[1]
  8418			x := v.Args[0]
  8419			v.reset(OpMIPS64AND)
  8420			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8421			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8422			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8423			v2.AuxInt = 64
  8424			v1.AddArg(v2)
  8425			v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8426			v3.AddArg(y)
  8427			v1.AddArg(v3)
  8428			v0.AddArg(v1)
  8429			v.AddArg(v0)
  8430			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8431			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8432			v5.AddArg(x)
  8433			v4.AddArg(v5)
  8434			v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8435			v6.AddArg(y)
  8436			v4.AddArg(v6)
  8437			v.AddArg(v4)
  8438			return true
  8439		}
  8440	}
  8441	func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool {
  8442		b := v.Block
  8443		typ := &b.Func.Config.Types
  8444		// match: (Rsh8Ux64 <t> x y)
  8445		// cond:
  8446		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
  8447		for {
  8448			t := v.Type
  8449			y := v.Args[1]
  8450			x := v.Args[0]
  8451			v.reset(OpMIPS64AND)
  8452			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8453			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8454			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8455			v2.AuxInt = 64
  8456			v1.AddArg(v2)
  8457			v1.AddArg(y)
  8458			v0.AddArg(v1)
  8459			v.AddArg(v0)
  8460			v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8461			v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8462			v4.AddArg(x)
  8463			v3.AddArg(v4)
  8464			v3.AddArg(y)
  8465			v.AddArg(v3)
  8466			return true
  8467		}
  8468	}
  8469	func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
  8470		b := v.Block
  8471		typ := &b.Func.Config.Types
  8472		// match: (Rsh8Ux8 <t> x y)
  8473		// cond:
  8474		// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  8475		for {
  8476			t := v.Type
  8477			y := v.Args[1]
  8478			x := v.Args[0]
  8479			v.reset(OpMIPS64AND)
  8480			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8481			v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8482			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8483			v2.AuxInt = 64
  8484			v1.AddArg(v2)
  8485			v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8486			v3.AddArg(y)
  8487			v1.AddArg(v3)
  8488			v0.AddArg(v1)
  8489			v.AddArg(v0)
  8490			v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
  8491			v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8492			v5.AddArg(x)
  8493			v4.AddArg(v5)
  8494			v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8495			v6.AddArg(y)
  8496			v4.AddArg(v6)
  8497			v.AddArg(v4)
  8498			return true
  8499		}
  8500	}
  8501	func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool {
  8502		b := v.Block
  8503		typ := &b.Func.Config.Types
  8504		// match: (Rsh8x16 <t> x y)
  8505		// cond:
  8506		// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
  8507		for {
  8508			t := v.Type
  8509			y := v.Args[1]
  8510			x := v.Args[0]
  8511			v.reset(OpMIPS64SRAV)
  8512			v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8513			v0.AddArg(x)
  8514			v.AddArg(v0)
  8515			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8516			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8517			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8518			v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8519			v4.AddArg(y)
  8520			v3.AddArg(v4)
  8521			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8522			v5.AuxInt = 63
  8523			v3.AddArg(v5)
  8524			v2.AddArg(v3)
  8525			v1.AddArg(v2)
  8526			v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8527			v6.AddArg(y)
  8528			v1.AddArg(v6)
  8529			v.AddArg(v1)
  8530			return true
  8531		}
  8532	}
  8533	func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool {
  8534		b := v.Block
  8535		typ := &b.Func.Config.Types
  8536		// match: (Rsh8x32 <t> x y)
  8537		// cond:
  8538		// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
  8539		for {
  8540			t := v.Type
  8541			y := v.Args[1]
  8542			x := v.Args[0]
  8543			v.reset(OpMIPS64SRAV)
  8544			v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8545			v0.AddArg(x)
  8546			v.AddArg(v0)
  8547			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8548			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8549			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8550			v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8551			v4.AddArg(y)
  8552			v3.AddArg(v4)
  8553			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8554			v5.AuxInt = 63
  8555			v3.AddArg(v5)
  8556			v2.AddArg(v3)
  8557			v1.AddArg(v2)
  8558			v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8559			v6.AddArg(y)
  8560			v1.AddArg(v6)
  8561			v.AddArg(v1)
  8562			return true
  8563		}
  8564	}
  8565	func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool {
  8566		b := v.Block
  8567		typ := &b.Func.Config.Types
  8568		// match: (Rsh8x64 <t> x y)
  8569		// cond:
  8570		// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
  8571		for {
  8572			t := v.Type
  8573			y := v.Args[1]
  8574			x := v.Args[0]
  8575			v.reset(OpMIPS64SRAV)
  8576			v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8577			v0.AddArg(x)
  8578			v.AddArg(v0)
  8579			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8580			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8581			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8582			v3.AddArg(y)
  8583			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8584			v4.AuxInt = 63
  8585			v3.AddArg(v4)
  8586			v2.AddArg(v3)
  8587			v1.AddArg(v2)
  8588			v1.AddArg(y)
  8589			v.AddArg(v1)
  8590			return true
  8591		}
  8592	}
  8593	func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
  8594		b := v.Block
  8595		typ := &b.Func.Config.Types
  8596		// match: (Rsh8x8 <t> x y)
  8597		// cond:
  8598		// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
  8599		for {
  8600			t := v.Type
  8601			y := v.Args[1]
  8602			x := v.Args[0]
  8603			v.reset(OpMIPS64SRAV)
  8604			v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8605			v0.AddArg(x)
  8606			v.AddArg(v0)
  8607			v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
  8608			v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  8609			v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
  8610			v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8611			v4.AddArg(y)
  8612			v3.AddArg(v4)
  8613			v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  8614			v5.AuxInt = 63
  8615			v3.AddArg(v5)
  8616			v2.AddArg(v3)
  8617			v1.AddArg(v2)
  8618			v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8619			v6.AddArg(y)
  8620			v1.AddArg(v6)
  8621			v.AddArg(v1)
  8622			return true
  8623		}
  8624	}
  8625	func rewriteValueMIPS64_OpSelect0_0(v *Value) bool {
  8626		// match: (Select0 (DIVVU _ (MOVVconst [1])))
  8627		// cond:
  8628		// result: (MOVVconst [0])
  8629		for {
  8630			v_0 := v.Args[0]
  8631			if v_0.Op != OpMIPS64DIVVU {
  8632				break
  8633			}
  8634			_ = v_0.Args[1]
  8635			v_0_1 := v_0.Args[1]
  8636			if v_0_1.Op != OpMIPS64MOVVconst {
  8637				break
  8638			}
  8639			if v_0_1.AuxInt != 1 {
  8640				break
  8641			}
  8642			v.reset(OpMIPS64MOVVconst)
  8643			v.AuxInt = 0
  8644			return true
  8645		}
  8646		// match: (Select0 (DIVVU x (MOVVconst [c])))
  8647		// cond: isPowerOfTwo(c)
  8648		// result: (ANDconst [c-1] x)
  8649		for {
  8650			v_0 := v.Args[0]
  8651			if v_0.Op != OpMIPS64DIVVU {
  8652				break
  8653			}
  8654			_ = v_0.Args[1]
  8655			x := v_0.Args[0]
  8656			v_0_1 := v_0.Args[1]
  8657			if v_0_1.Op != OpMIPS64MOVVconst {
  8658				break
  8659			}
  8660			c := v_0_1.AuxInt
  8661			if !(isPowerOfTwo(c)) {
  8662				break
  8663			}
  8664			v.reset(OpMIPS64ANDconst)
  8665			v.AuxInt = c - 1
  8666			v.AddArg(x)
  8667			return true
  8668		}
  8669		// match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  8670		// cond:
  8671		// result: (MOVVconst [c%d])
  8672		for {
  8673			v_0 := v.Args[0]
  8674			if v_0.Op != OpMIPS64DIVV {
  8675				break
  8676			}
  8677			_ = v_0.Args[1]
  8678			v_0_0 := v_0.Args[0]
  8679			if v_0_0.Op != OpMIPS64MOVVconst {
  8680				break
  8681			}
  8682			c := v_0_0.AuxInt
  8683			v_0_1 := v_0.Args[1]
  8684			if v_0_1.Op != OpMIPS64MOVVconst {
  8685				break
  8686			}
  8687			d := v_0_1.AuxInt
  8688			v.reset(OpMIPS64MOVVconst)
  8689			v.AuxInt = c % d
  8690			return true
  8691		}
  8692		// match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  8693		// cond:
  8694		// result: (MOVVconst [int64(uint64(c)%uint64(d))])
  8695		for {
  8696			v_0 := v.Args[0]
  8697			if v_0.Op != OpMIPS64DIVVU {
  8698				break
  8699			}
  8700			_ = v_0.Args[1]
  8701			v_0_0 := v_0.Args[0]
  8702			if v_0_0.Op != OpMIPS64MOVVconst {
  8703				break
  8704			}
  8705			c := v_0_0.AuxInt
  8706			v_0_1 := v_0.Args[1]
  8707			if v_0_1.Op != OpMIPS64MOVVconst {
  8708				break
  8709			}
  8710			d := v_0_1.AuxInt
  8711			v.reset(OpMIPS64MOVVconst)
  8712			v.AuxInt = int64(uint64(c) % uint64(d))
  8713			return true
  8714		}
  8715		return false
  8716	}
  8717	func rewriteValueMIPS64_OpSelect1_0(v *Value) bool {
  8718		// match: (Select1 (MULVU x (MOVVconst [-1])))
  8719		// cond:
  8720		// result: (NEGV x)
  8721		for {
  8722			v_0 := v.Args[0]
  8723			if v_0.Op != OpMIPS64MULVU {
  8724				break
  8725			}
  8726			_ = v_0.Args[1]
  8727			x := v_0.Args[0]
  8728			v_0_1 := v_0.Args[1]
  8729			if v_0_1.Op != OpMIPS64MOVVconst {
  8730				break
  8731			}
  8732			if v_0_1.AuxInt != -1 {
  8733				break
  8734			}
  8735			v.reset(OpMIPS64NEGV)
  8736			v.AddArg(x)
  8737			return true
  8738		}
  8739		// match: (Select1 (MULVU (MOVVconst [-1]) x))
  8740		// cond:
  8741		// result: (NEGV x)
  8742		for {
  8743			v_0 := v.Args[0]
  8744			if v_0.Op != OpMIPS64MULVU {
  8745				break
  8746			}
  8747			x := v_0.Args[1]
  8748			v_0_0 := v_0.Args[0]
  8749			if v_0_0.Op != OpMIPS64MOVVconst {
  8750				break
  8751			}
  8752			if v_0_0.AuxInt != -1 {
  8753				break
  8754			}
  8755			v.reset(OpMIPS64NEGV)
  8756			v.AddArg(x)
  8757			return true
  8758		}
  8759		// match: (Select1 (MULVU _ (MOVVconst [0])))
  8760		// cond:
  8761		// result: (MOVVconst [0])
  8762		for {
  8763			v_0 := v.Args[0]
  8764			if v_0.Op != OpMIPS64MULVU {
  8765				break
  8766			}
  8767			_ = v_0.Args[1]
  8768			v_0_1 := v_0.Args[1]
  8769			if v_0_1.Op != OpMIPS64MOVVconst {
  8770				break
  8771			}
  8772			if v_0_1.AuxInt != 0 {
  8773				break
  8774			}
  8775			v.reset(OpMIPS64MOVVconst)
  8776			v.AuxInt = 0
  8777			return true
  8778		}
  8779		// match: (Select1 (MULVU (MOVVconst [0]) _))
  8780		// cond:
  8781		// result: (MOVVconst [0])
  8782		for {
  8783			v_0 := v.Args[0]
  8784			if v_0.Op != OpMIPS64MULVU {
  8785				break
  8786			}
  8787			_ = v_0.Args[1]
  8788			v_0_0 := v_0.Args[0]
  8789			if v_0_0.Op != OpMIPS64MOVVconst {
  8790				break
  8791			}
  8792			if v_0_0.AuxInt != 0 {
  8793				break
  8794			}
  8795			v.reset(OpMIPS64MOVVconst)
  8796			v.AuxInt = 0
  8797			return true
  8798		}
  8799		// match: (Select1 (MULVU x (MOVVconst [1])))
  8800		// cond:
  8801		// result: x
  8802		for {
  8803			v_0 := v.Args[0]
  8804			if v_0.Op != OpMIPS64MULVU {
  8805				break
  8806			}
  8807			_ = v_0.Args[1]
  8808			x := v_0.Args[0]
  8809			v_0_1 := v_0.Args[1]
  8810			if v_0_1.Op != OpMIPS64MOVVconst {
  8811				break
  8812			}
  8813			if v_0_1.AuxInt != 1 {
  8814				break
  8815			}
  8816			v.reset(OpCopy)
  8817			v.Type = x.Type
  8818			v.AddArg(x)
  8819			return true
  8820		}
  8821		// match: (Select1 (MULVU (MOVVconst [1]) x))
  8822		// cond:
  8823		// result: x
  8824		for {
  8825			v_0 := v.Args[0]
  8826			if v_0.Op != OpMIPS64MULVU {
  8827				break
  8828			}
  8829			x := v_0.Args[1]
  8830			v_0_0 := v_0.Args[0]
  8831			if v_0_0.Op != OpMIPS64MOVVconst {
  8832				break
  8833			}
  8834			if v_0_0.AuxInt != 1 {
  8835				break
  8836			}
  8837			v.reset(OpCopy)
  8838			v.Type = x.Type
  8839			v.AddArg(x)
  8840			return true
  8841		}
  8842		// match: (Select1 (MULVU x (MOVVconst [c])))
  8843		// cond: isPowerOfTwo(c)
  8844		// result: (SLLVconst [log2(c)] x)
  8845		for {
  8846			v_0 := v.Args[0]
  8847			if v_0.Op != OpMIPS64MULVU {
  8848				break
  8849			}
  8850			_ = v_0.Args[1]
  8851			x := v_0.Args[0]
  8852			v_0_1 := v_0.Args[1]
  8853			if v_0_1.Op != OpMIPS64MOVVconst {
  8854				break
  8855			}
  8856			c := v_0_1.AuxInt
  8857			if !(isPowerOfTwo(c)) {
  8858				break
  8859			}
  8860			v.reset(OpMIPS64SLLVconst)
  8861			v.AuxInt = log2(c)
  8862			v.AddArg(x)
  8863			return true
  8864		}
  8865		// match: (Select1 (MULVU (MOVVconst [c]) x))
  8866		// cond: isPowerOfTwo(c)
  8867		// result: (SLLVconst [log2(c)] x)
  8868		for {
  8869			v_0 := v.Args[0]
  8870			if v_0.Op != OpMIPS64MULVU {
  8871				break
  8872			}
  8873			x := v_0.Args[1]
  8874			v_0_0 := v_0.Args[0]
  8875			if v_0_0.Op != OpMIPS64MOVVconst {
  8876				break
  8877			}
  8878			c := v_0_0.AuxInt
  8879			if !(isPowerOfTwo(c)) {
  8880				break
  8881			}
  8882			v.reset(OpMIPS64SLLVconst)
  8883			v.AuxInt = log2(c)
  8884			v.AddArg(x)
  8885			return true
  8886		}
  8887		// match: (Select1 (MULVU (MOVVconst [-1]) x))
  8888		// cond:
  8889		// result: (NEGV x)
  8890		for {
  8891			v_0 := v.Args[0]
  8892			if v_0.Op != OpMIPS64MULVU {
  8893				break
  8894			}
  8895			x := v_0.Args[1]
  8896			v_0_0 := v_0.Args[0]
  8897			if v_0_0.Op != OpMIPS64MOVVconst {
  8898				break
  8899			}
  8900			if v_0_0.AuxInt != -1 {
  8901				break
  8902			}
  8903			v.reset(OpMIPS64NEGV)
  8904			v.AddArg(x)
  8905			return true
  8906		}
  8907		// match: (Select1 (MULVU x (MOVVconst [-1])))
  8908		// cond:
  8909		// result: (NEGV x)
  8910		for {
  8911			v_0 := v.Args[0]
  8912			if v_0.Op != OpMIPS64MULVU {
  8913				break
  8914			}
  8915			_ = v_0.Args[1]
  8916			x := v_0.Args[0]
  8917			v_0_1 := v_0.Args[1]
  8918			if v_0_1.Op != OpMIPS64MOVVconst {
  8919				break
  8920			}
  8921			if v_0_1.AuxInt != -1 {
  8922				break
  8923			}
  8924			v.reset(OpMIPS64NEGV)
  8925			v.AddArg(x)
  8926			return true
  8927		}
  8928		return false
  8929	}
  8930	func rewriteValueMIPS64_OpSelect1_10(v *Value) bool {
  8931		// match: (Select1 (MULVU (MOVVconst [0]) _))
  8932		// cond:
  8933		// result: (MOVVconst [0])
  8934		for {
  8935			v_0 := v.Args[0]
  8936			if v_0.Op != OpMIPS64MULVU {
  8937				break
  8938			}
  8939			_ = v_0.Args[1]
  8940			v_0_0 := v_0.Args[0]
  8941			if v_0_0.Op != OpMIPS64MOVVconst {
  8942				break
  8943			}
  8944			if v_0_0.AuxInt != 0 {
  8945				break
  8946			}
  8947			v.reset(OpMIPS64MOVVconst)
  8948			v.AuxInt = 0
  8949			return true
  8950		}
  8951		// match: (Select1 (MULVU _ (MOVVconst [0])))
  8952		// cond:
  8953		// result: (MOVVconst [0])
  8954		for {
  8955			v_0 := v.Args[0]
  8956			if v_0.Op != OpMIPS64MULVU {
  8957				break
  8958			}
  8959			_ = v_0.Args[1]
  8960			v_0_1 := v_0.Args[1]
  8961			if v_0_1.Op != OpMIPS64MOVVconst {
  8962				break
  8963			}
  8964			if v_0_1.AuxInt != 0 {
  8965				break
  8966			}
  8967			v.reset(OpMIPS64MOVVconst)
  8968			v.AuxInt = 0
  8969			return true
  8970		}
  8971		// match: (Select1 (MULVU (MOVVconst [1]) x))
  8972		// cond:
  8973		// result: x
  8974		for {
  8975			v_0 := v.Args[0]
  8976			if v_0.Op != OpMIPS64MULVU {
  8977				break
  8978			}
  8979			x := v_0.Args[1]
  8980			v_0_0 := v_0.Args[0]
  8981			if v_0_0.Op != OpMIPS64MOVVconst {
  8982				break
  8983			}
  8984			if v_0_0.AuxInt != 1 {
  8985				break
  8986			}
  8987			v.reset(OpCopy)
  8988			v.Type = x.Type
  8989			v.AddArg(x)
  8990			return true
  8991		}
  8992		// match: (Select1 (MULVU x (MOVVconst [1])))
  8993		// cond:
  8994		// result: x
  8995		for {
  8996			v_0 := v.Args[0]
  8997			if v_0.Op != OpMIPS64MULVU {
  8998				break
  8999			}
  9000			_ = v_0.Args[1]
  9001			x := v_0.Args[0]
  9002			v_0_1 := v_0.Args[1]
  9003			if v_0_1.Op != OpMIPS64MOVVconst {
  9004				break
  9005			}
  9006			if v_0_1.AuxInt != 1 {
  9007				break
  9008			}
  9009			v.reset(OpCopy)
  9010			v.Type = x.Type
  9011			v.AddArg(x)
  9012			return true
  9013		}
  9014		// match: (Select1 (MULVU (MOVVconst [c]) x))
  9015		// cond: isPowerOfTwo(c)
  9016		// result: (SLLVconst [log2(c)] x)
  9017		for {
  9018			v_0 := v.Args[0]
  9019			if v_0.Op != OpMIPS64MULVU {
  9020				break
  9021			}
  9022			x := v_0.Args[1]
  9023			v_0_0 := v_0.Args[0]
  9024			if v_0_0.Op != OpMIPS64MOVVconst {
  9025				break
  9026			}
  9027			c := v_0_0.AuxInt
  9028			if !(isPowerOfTwo(c)) {
  9029				break
  9030			}
  9031			v.reset(OpMIPS64SLLVconst)
  9032			v.AuxInt = log2(c)
  9033			v.AddArg(x)
  9034			return true
  9035		}
  9036		// match: (Select1 (MULVU x (MOVVconst [c])))
  9037		// cond: isPowerOfTwo(c)
  9038		// result: (SLLVconst [log2(c)] x)
  9039		for {
  9040			v_0 := v.Args[0]
  9041			if v_0.Op != OpMIPS64MULVU {
  9042				break
  9043			}
  9044			_ = v_0.Args[1]
  9045			x := v_0.Args[0]
  9046			v_0_1 := v_0.Args[1]
  9047			if v_0_1.Op != OpMIPS64MOVVconst {
  9048				break
  9049			}
  9050			c := v_0_1.AuxInt
  9051			if !(isPowerOfTwo(c)) {
  9052				break
  9053			}
  9054			v.reset(OpMIPS64SLLVconst)
  9055			v.AuxInt = log2(c)
  9056			v.AddArg(x)
  9057			return true
  9058		}
  9059		// match: (Select1 (DIVVU x (MOVVconst [1])))
  9060		// cond:
  9061		// result: x
  9062		for {
  9063			v_0 := v.Args[0]
  9064			if v_0.Op != OpMIPS64DIVVU {
  9065				break
  9066			}
  9067			_ = v_0.Args[1]
  9068			x := v_0.Args[0]
  9069			v_0_1 := v_0.Args[1]
  9070			if v_0_1.Op != OpMIPS64MOVVconst {
  9071				break
  9072			}
  9073			if v_0_1.AuxInt != 1 {
  9074				break
  9075			}
  9076			v.reset(OpCopy)
  9077			v.Type = x.Type
  9078			v.AddArg(x)
  9079			return true
  9080		}
  9081		// match: (Select1 (DIVVU x (MOVVconst [c])))
  9082		// cond: isPowerOfTwo(c)
  9083		// result: (SRLVconst [log2(c)] x)
  9084		for {
  9085			v_0 := v.Args[0]
  9086			if v_0.Op != OpMIPS64DIVVU {
  9087				break
  9088			}
  9089			_ = v_0.Args[1]
  9090			x := v_0.Args[0]
  9091			v_0_1 := v_0.Args[1]
  9092			if v_0_1.Op != OpMIPS64MOVVconst {
  9093				break
  9094			}
  9095			c := v_0_1.AuxInt
  9096			if !(isPowerOfTwo(c)) {
  9097				break
  9098			}
  9099			v.reset(OpMIPS64SRLVconst)
  9100			v.AuxInt = log2(c)
  9101			v.AddArg(x)
  9102			return true
  9103		}
  9104		// match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d])))
  9105		// cond:
  9106		// result: (MOVVconst [c*d])
  9107		for {
  9108			v_0 := v.Args[0]
  9109			if v_0.Op != OpMIPS64MULVU {
  9110				break
  9111			}
  9112			_ = v_0.Args[1]
  9113			v_0_0 := v_0.Args[0]
  9114			if v_0_0.Op != OpMIPS64MOVVconst {
  9115				break
  9116			}
  9117			c := v_0_0.AuxInt
  9118			v_0_1 := v_0.Args[1]
  9119			if v_0_1.Op != OpMIPS64MOVVconst {
  9120				break
  9121			}
  9122			d := v_0_1.AuxInt
  9123			v.reset(OpMIPS64MOVVconst)
  9124			v.AuxInt = c * d
  9125			return true
  9126		}
  9127		// match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c])))
  9128		// cond:
  9129		// result: (MOVVconst [c*d])
  9130		for {
  9131			v_0 := v.Args[0]
  9132			if v_0.Op != OpMIPS64MULVU {
  9133				break
  9134			}
  9135			_ = v_0.Args[1]
  9136			v_0_0 := v_0.Args[0]
  9137			if v_0_0.Op != OpMIPS64MOVVconst {
  9138				break
  9139			}
  9140			d := v_0_0.AuxInt
  9141			v_0_1 := v_0.Args[1]
  9142			if v_0_1.Op != OpMIPS64MOVVconst {
  9143				break
  9144			}
  9145			c := v_0_1.AuxInt
  9146			v.reset(OpMIPS64MOVVconst)
  9147			v.AuxInt = c * d
  9148			return true
  9149		}
  9150		return false
  9151	}
  9152	func rewriteValueMIPS64_OpSelect1_20(v *Value) bool {
  9153		// match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d])))
  9154		// cond:
  9155		// result: (MOVVconst [c/d])
  9156		for {
  9157			v_0 := v.Args[0]
  9158			if v_0.Op != OpMIPS64DIVV {
  9159				break
  9160			}
  9161			_ = v_0.Args[1]
  9162			v_0_0 := v_0.Args[0]
  9163			if v_0_0.Op != OpMIPS64MOVVconst {
  9164				break
  9165			}
  9166			c := v_0_0.AuxInt
  9167			v_0_1 := v_0.Args[1]
  9168			if v_0_1.Op != OpMIPS64MOVVconst {
  9169				break
  9170			}
  9171			d := v_0_1.AuxInt
  9172			v.reset(OpMIPS64MOVVconst)
  9173			v.AuxInt = c / d
  9174			return true
  9175		}
  9176		// match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d])))
  9177		// cond:
  9178		// result: (MOVVconst [int64(uint64(c)/uint64(d))])
  9179		for {
  9180			v_0 := v.Args[0]
  9181			if v_0.Op != OpMIPS64DIVVU {
  9182				break
  9183			}
  9184			_ = v_0.Args[1]
  9185			v_0_0 := v_0.Args[0]
  9186			if v_0_0.Op != OpMIPS64MOVVconst {
  9187				break
  9188			}
  9189			c := v_0_0.AuxInt
  9190			v_0_1 := v_0.Args[1]
  9191			if v_0_1.Op != OpMIPS64MOVVconst {
  9192				break
  9193			}
  9194			d := v_0_1.AuxInt
  9195			v.reset(OpMIPS64MOVVconst)
  9196			v.AuxInt = int64(uint64(c) / uint64(d))
  9197			return true
  9198		}
  9199		return false
  9200	}
  9201	func rewriteValueMIPS64_OpSignExt16to32_0(v *Value) bool {
  9202		// match: (SignExt16to32 x)
  9203		// cond:
  9204		// result: (MOVHreg x)
  9205		for {
  9206			x := v.Args[0]
  9207			v.reset(OpMIPS64MOVHreg)
  9208			v.AddArg(x)
  9209			return true
  9210		}
  9211	}
  9212	func rewriteValueMIPS64_OpSignExt16to64_0(v *Value) bool {
  9213		// match: (SignExt16to64 x)
  9214		// cond:
  9215		// result: (MOVHreg x)
  9216		for {
  9217			x := v.Args[0]
  9218			v.reset(OpMIPS64MOVHreg)
  9219			v.AddArg(x)
  9220			return true
  9221		}
  9222	}
  9223	func rewriteValueMIPS64_OpSignExt32to64_0(v *Value) bool {
  9224		// match: (SignExt32to64 x)
  9225		// cond:
  9226		// result: (MOVWreg x)
  9227		for {
  9228			x := v.Args[0]
  9229			v.reset(OpMIPS64MOVWreg)
  9230			v.AddArg(x)
  9231			return true
  9232		}
  9233	}
  9234	func rewriteValueMIPS64_OpSignExt8to16_0(v *Value) bool {
  9235		// match: (SignExt8to16 x)
  9236		// cond:
  9237		// result: (MOVBreg x)
  9238		for {
  9239			x := v.Args[0]
  9240			v.reset(OpMIPS64MOVBreg)
  9241			v.AddArg(x)
  9242			return true
  9243		}
  9244	}
  9245	func rewriteValueMIPS64_OpSignExt8to32_0(v *Value) bool {
  9246		// match: (SignExt8to32 x)
  9247		// cond:
  9248		// result: (MOVBreg x)
  9249		for {
  9250			x := v.Args[0]
  9251			v.reset(OpMIPS64MOVBreg)
  9252			v.AddArg(x)
  9253			return true
  9254		}
  9255	}
  9256	func rewriteValueMIPS64_OpSignExt8to64_0(v *Value) bool {
  9257		// match: (SignExt8to64 x)
  9258		// cond:
  9259		// result: (MOVBreg x)
  9260		for {
  9261			x := v.Args[0]
  9262			v.reset(OpMIPS64MOVBreg)
  9263			v.AddArg(x)
  9264			return true
  9265		}
  9266	}
  9267	func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool {
  9268		b := v.Block
  9269		// match: (Slicemask <t> x)
  9270		// cond:
  9271		// result: (SRAVconst (NEGV <t> x) [63])
  9272		for {
  9273			t := v.Type
  9274			x := v.Args[0]
  9275			v.reset(OpMIPS64SRAVconst)
  9276			v.AuxInt = 63
  9277			v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
  9278			v0.AddArg(x)
  9279			v.AddArg(v0)
  9280			return true
  9281		}
  9282	}
  9283	func rewriteValueMIPS64_OpSqrt_0(v *Value) bool {
  9284		// match: (Sqrt x)
  9285		// cond:
  9286		// result: (SQRTD x)
  9287		for {
  9288			x := v.Args[0]
  9289			v.reset(OpMIPS64SQRTD)
  9290			v.AddArg(x)
  9291			return true
  9292		}
  9293	}
  9294	func rewriteValueMIPS64_OpStaticCall_0(v *Value) bool {
  9295		// match: (StaticCall [argwid] {target} mem)
  9296		// cond:
  9297		// result: (CALLstatic [argwid] {target} mem)
  9298		for {
  9299			argwid := v.AuxInt
  9300			target := v.Aux
  9301			mem := v.Args[0]
  9302			v.reset(OpMIPS64CALLstatic)
  9303			v.AuxInt = argwid
  9304			v.Aux = target
  9305			v.AddArg(mem)
  9306			return true
  9307		}
  9308	}
  9309	func rewriteValueMIPS64_OpStore_0(v *Value) bool {
  9310		// match: (Store {t} ptr val mem)
  9311		// cond: t.(*types.Type).Size() == 1
  9312		// result: (MOVBstore ptr val mem)
  9313		for {
  9314			t := v.Aux
  9315			mem := v.Args[2]
  9316			ptr := v.Args[0]
  9317			val := v.Args[1]
  9318			if !(t.(*types.Type).Size() == 1) {
  9319				break
  9320			}
  9321			v.reset(OpMIPS64MOVBstore)
  9322			v.AddArg(ptr)
  9323			v.AddArg(val)
  9324			v.AddArg(mem)
  9325			return true
  9326		}
  9327		// match: (Store {t} ptr val mem)
  9328		// cond: t.(*types.Type).Size() == 2
  9329		// result: (MOVHstore ptr val mem)
  9330		for {
  9331			t := v.Aux
  9332			mem := v.Args[2]
  9333			ptr := v.Args[0]
  9334			val := v.Args[1]
  9335			if !(t.(*types.Type).Size() == 2) {
  9336				break
  9337			}
  9338			v.reset(OpMIPS64MOVHstore)
  9339			v.AddArg(ptr)
  9340			v.AddArg(val)
  9341			v.AddArg(mem)
  9342			return true
  9343		}
  9344		// match: (Store {t} ptr val mem)
  9345		// cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)
  9346		// result: (MOVWstore ptr val mem)
  9347		for {
  9348			t := v.Aux
  9349			mem := v.Args[2]
  9350			ptr := v.Args[0]
  9351			val := v.Args[1]
  9352			if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) {
  9353				break
  9354			}
  9355			v.reset(OpMIPS64MOVWstore)
  9356			v.AddArg(ptr)
  9357			v.AddArg(val)
  9358			v.AddArg(mem)
  9359			return true
  9360		}
  9361		// match: (Store {t} ptr val mem)
  9362		// cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)
  9363		// result: (MOVVstore ptr val mem)
  9364		for {
  9365			t := v.Aux
  9366			mem := v.Args[2]
  9367			ptr := v.Args[0]
  9368			val := v.Args[1]
  9369			if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) {
  9370				break
  9371			}
  9372			v.reset(OpMIPS64MOVVstore)
  9373			v.AddArg(ptr)
  9374			v.AddArg(val)
  9375			v.AddArg(mem)
  9376			return true
  9377		}
  9378		// match: (Store {t} ptr val mem)
  9379		// cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)
  9380		// result: (MOVFstore ptr val mem)
  9381		for {
  9382			t := v.Aux
  9383			mem := v.Args[2]
  9384			ptr := v.Args[0]
  9385			val := v.Args[1]
  9386			if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) {
  9387				break
  9388			}
  9389			v.reset(OpMIPS64MOVFstore)
  9390			v.AddArg(ptr)
  9391			v.AddArg(val)
  9392			v.AddArg(mem)
  9393			return true
  9394		}
  9395		// match: (Store {t} ptr val mem)
  9396		// cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)
  9397		// result: (MOVDstore ptr val mem)
  9398		for {
  9399			t := v.Aux
  9400			mem := v.Args[2]
  9401			ptr := v.Args[0]
  9402			val := v.Args[1]
  9403			if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) {
  9404				break
  9405			}
  9406			v.reset(OpMIPS64MOVDstore)
  9407			v.AddArg(ptr)
  9408			v.AddArg(val)
  9409			v.AddArg(mem)
  9410			return true
  9411		}
  9412		return false
  9413	}
  9414	func rewriteValueMIPS64_OpSub16_0(v *Value) bool {
  9415		// match: (Sub16 x y)
  9416		// cond:
  9417		// result: (SUBV x y)
  9418		for {
  9419			y := v.Args[1]
  9420			x := v.Args[0]
  9421			v.reset(OpMIPS64SUBV)
  9422			v.AddArg(x)
  9423			v.AddArg(y)
  9424			return true
  9425		}
  9426	}
  9427	func rewriteValueMIPS64_OpSub32_0(v *Value) bool {
  9428		// match: (Sub32 x y)
  9429		// cond:
  9430		// result: (SUBV x y)
  9431		for {
  9432			y := v.Args[1]
  9433			x := v.Args[0]
  9434			v.reset(OpMIPS64SUBV)
  9435			v.AddArg(x)
  9436			v.AddArg(y)
  9437			return true
  9438		}
  9439	}
  9440	func rewriteValueMIPS64_OpSub32F_0(v *Value) bool {
  9441		// match: (Sub32F x y)
  9442		// cond:
  9443		// result: (SUBF x y)
  9444		for {
  9445			y := v.Args[1]
  9446			x := v.Args[0]
  9447			v.reset(OpMIPS64SUBF)
  9448			v.AddArg(x)
  9449			v.AddArg(y)
  9450			return true
  9451		}
  9452	}
  9453	func rewriteValueMIPS64_OpSub64_0(v *Value) bool {
  9454		// match: (Sub64 x y)
  9455		// cond:
  9456		// result: (SUBV x y)
  9457		for {
  9458			y := v.Args[1]
  9459			x := v.Args[0]
  9460			v.reset(OpMIPS64SUBV)
  9461			v.AddArg(x)
  9462			v.AddArg(y)
  9463			return true
  9464		}
  9465	}
  9466	func rewriteValueMIPS64_OpSub64F_0(v *Value) bool {
  9467		// match: (Sub64F x y)
  9468		// cond:
  9469		// result: (SUBD x y)
  9470		for {
  9471			y := v.Args[1]
  9472			x := v.Args[0]
  9473			v.reset(OpMIPS64SUBD)
  9474			v.AddArg(x)
  9475			v.AddArg(y)
  9476			return true
  9477		}
  9478	}
  9479	func rewriteValueMIPS64_OpSub8_0(v *Value) bool {
  9480		// match: (Sub8 x y)
  9481		// cond:
  9482		// result: (SUBV x y)
  9483		for {
  9484			y := v.Args[1]
  9485			x := v.Args[0]
  9486			v.reset(OpMIPS64SUBV)
  9487			v.AddArg(x)
  9488			v.AddArg(y)
  9489			return true
  9490		}
  9491	}
  9492	func rewriteValueMIPS64_OpSubPtr_0(v *Value) bool {
  9493		// match: (SubPtr x y)
  9494		// cond:
  9495		// result: (SUBV x y)
  9496		for {
  9497			y := v.Args[1]
  9498			x := v.Args[0]
  9499			v.reset(OpMIPS64SUBV)
  9500			v.AddArg(x)
  9501			v.AddArg(y)
  9502			return true
  9503		}
  9504	}
  9505	func rewriteValueMIPS64_OpTrunc16to8_0(v *Value) bool {
  9506		// match: (Trunc16to8 x)
  9507		// cond:
  9508		// result: x
  9509		for {
  9510			x := v.Args[0]
  9511			v.reset(OpCopy)
  9512			v.Type = x.Type
  9513			v.AddArg(x)
  9514			return true
  9515		}
  9516	}
  9517	func rewriteValueMIPS64_OpTrunc32to16_0(v *Value) bool {
  9518		// match: (Trunc32to16 x)
  9519		// cond:
  9520		// result: x
  9521		for {
  9522			x := v.Args[0]
  9523			v.reset(OpCopy)
  9524			v.Type = x.Type
  9525			v.AddArg(x)
  9526			return true
  9527		}
  9528	}
  9529	func rewriteValueMIPS64_OpTrunc32to8_0(v *Value) bool {
  9530		// match: (Trunc32to8 x)
  9531		// cond:
  9532		// result: x
  9533		for {
  9534			x := v.Args[0]
  9535			v.reset(OpCopy)
  9536			v.Type = x.Type
  9537			v.AddArg(x)
  9538			return true
  9539		}
  9540	}
  9541	func rewriteValueMIPS64_OpTrunc64to16_0(v *Value) bool {
  9542		// match: (Trunc64to16 x)
  9543		// cond:
  9544		// result: x
  9545		for {
  9546			x := v.Args[0]
  9547			v.reset(OpCopy)
  9548			v.Type = x.Type
  9549			v.AddArg(x)
  9550			return true
  9551		}
  9552	}
  9553	func rewriteValueMIPS64_OpTrunc64to32_0(v *Value) bool {
  9554		// match: (Trunc64to32 x)
  9555		// cond:
  9556		// result: x
  9557		for {
  9558			x := v.Args[0]
  9559			v.reset(OpCopy)
  9560			v.Type = x.Type
  9561			v.AddArg(x)
  9562			return true
  9563		}
  9564	}
  9565	func rewriteValueMIPS64_OpTrunc64to8_0(v *Value) bool {
  9566		// match: (Trunc64to8 x)
  9567		// cond:
  9568		// result: x
  9569		for {
  9570			x := v.Args[0]
  9571			v.reset(OpCopy)
  9572			v.Type = x.Type
  9573			v.AddArg(x)
  9574			return true
  9575		}
  9576	}
  9577	func rewriteValueMIPS64_OpWB_0(v *Value) bool {
  9578		// match: (WB {fn} destptr srcptr mem)
  9579		// cond:
  9580		// result: (LoweredWB {fn} destptr srcptr mem)
  9581		for {
  9582			fn := v.Aux
  9583			mem := v.Args[2]
  9584			destptr := v.Args[0]
  9585			srcptr := v.Args[1]
  9586			v.reset(OpMIPS64LoweredWB)
  9587			v.Aux = fn
  9588			v.AddArg(destptr)
  9589			v.AddArg(srcptr)
  9590			v.AddArg(mem)
  9591			return true
  9592		}
  9593	}
  9594	func rewriteValueMIPS64_OpXor16_0(v *Value) bool {
  9595		// match: (Xor16 x y)
  9596		// cond:
  9597		// result: (XOR x y)
  9598		for {
  9599			y := v.Args[1]
  9600			x := v.Args[0]
  9601			v.reset(OpMIPS64XOR)
  9602			v.AddArg(x)
  9603			v.AddArg(y)
  9604			return true
  9605		}
  9606	}
  9607	func rewriteValueMIPS64_OpXor32_0(v *Value) bool {
  9608		// match: (Xor32 x y)
  9609		// cond:
  9610		// result: (XOR x y)
  9611		for {
  9612			y := v.Args[1]
  9613			x := v.Args[0]
  9614			v.reset(OpMIPS64XOR)
  9615			v.AddArg(x)
  9616			v.AddArg(y)
  9617			return true
  9618		}
  9619	}
  9620	func rewriteValueMIPS64_OpXor64_0(v *Value) bool {
  9621		// match: (Xor64 x y)
  9622		// cond:
  9623		// result: (XOR x y)
  9624		for {
  9625			y := v.Args[1]
  9626			x := v.Args[0]
  9627			v.reset(OpMIPS64XOR)
  9628			v.AddArg(x)
  9629			v.AddArg(y)
  9630			return true
  9631		}
  9632	}
  9633	func rewriteValueMIPS64_OpXor8_0(v *Value) bool {
  9634		// match: (Xor8 x y)
  9635		// cond:
  9636		// result: (XOR x y)
  9637		for {
  9638			y := v.Args[1]
  9639			x := v.Args[0]
  9640			v.reset(OpMIPS64XOR)
  9641			v.AddArg(x)
  9642			v.AddArg(y)
  9643			return true
  9644		}
  9645	}
  9646	func rewriteValueMIPS64_OpZero_0(v *Value) bool {
  9647		b := v.Block
  9648		typ := &b.Func.Config.Types
  9649		// match: (Zero [0] _ mem)
  9650		// cond:
  9651		// result: mem
  9652		for {
  9653			if v.AuxInt != 0 {
  9654				break
  9655			}
  9656			mem := v.Args[1]
  9657			v.reset(OpCopy)
  9658			v.Type = mem.Type
  9659			v.AddArg(mem)
  9660			return true
  9661		}
  9662		// match: (Zero [1] ptr mem)
  9663		// cond:
  9664		// result: (MOVBstore ptr (MOVVconst [0]) mem)
  9665		for {
  9666			if v.AuxInt != 1 {
  9667				break
  9668			}
  9669			mem := v.Args[1]
  9670			ptr := v.Args[0]
  9671			v.reset(OpMIPS64MOVBstore)
  9672			v.AddArg(ptr)
  9673			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9674			v0.AuxInt = 0
  9675			v.AddArg(v0)
  9676			v.AddArg(mem)
  9677			return true
  9678		}
  9679		// match: (Zero [2] {t} ptr mem)
  9680		// cond: t.(*types.Type).Alignment()%2 == 0
  9681		// result: (MOVHstore ptr (MOVVconst [0]) mem)
  9682		for {
  9683			if v.AuxInt != 2 {
  9684				break
  9685			}
  9686			t := v.Aux
  9687			mem := v.Args[1]
  9688			ptr := v.Args[0]
  9689			if !(t.(*types.Type).Alignment()%2 == 0) {
  9690				break
  9691			}
  9692			v.reset(OpMIPS64MOVHstore)
  9693			v.AddArg(ptr)
  9694			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9695			v0.AuxInt = 0
  9696			v.AddArg(v0)
  9697			v.AddArg(mem)
  9698			return true
  9699		}
  9700		// match: (Zero [2] ptr mem)
  9701		// cond:
  9702		// result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))
  9703		for {
  9704			if v.AuxInt != 2 {
  9705				break
  9706			}
  9707			mem := v.Args[1]
  9708			ptr := v.Args[0]
  9709			v.reset(OpMIPS64MOVBstore)
  9710			v.AuxInt = 1
  9711			v.AddArg(ptr)
  9712			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9713			v0.AuxInt = 0
  9714			v.AddArg(v0)
  9715			v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  9716			v1.AuxInt = 0
  9717			v1.AddArg(ptr)
  9718			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9719			v2.AuxInt = 0
  9720			v1.AddArg(v2)
  9721			v1.AddArg(mem)
  9722			v.AddArg(v1)
  9723			return true
  9724		}
  9725		// match: (Zero [4] {t} ptr mem)
  9726		// cond: t.(*types.Type).Alignment()%4 == 0
  9727		// result: (MOVWstore ptr (MOVVconst [0]) mem)
  9728		for {
  9729			if v.AuxInt != 4 {
  9730				break
  9731			}
  9732			t := v.Aux
  9733			mem := v.Args[1]
  9734			ptr := v.Args[0]
  9735			if !(t.(*types.Type).Alignment()%4 == 0) {
  9736				break
  9737			}
  9738			v.reset(OpMIPS64MOVWstore)
  9739			v.AddArg(ptr)
  9740			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9741			v0.AuxInt = 0
  9742			v.AddArg(v0)
  9743			v.AddArg(mem)
  9744			return true
  9745		}
  9746		// match: (Zero [4] {t} ptr mem)
  9747		// cond: t.(*types.Type).Alignment()%2 == 0
  9748		// result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))
  9749		for {
  9750			if v.AuxInt != 4 {
  9751				break
  9752			}
  9753			t := v.Aux
  9754			mem := v.Args[1]
  9755			ptr := v.Args[0]
  9756			if !(t.(*types.Type).Alignment()%2 == 0) {
  9757				break
  9758			}
  9759			v.reset(OpMIPS64MOVHstore)
  9760			v.AuxInt = 2
  9761			v.AddArg(ptr)
  9762			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9763			v0.AuxInt = 0
  9764			v.AddArg(v0)
  9765			v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  9766			v1.AuxInt = 0
  9767			v1.AddArg(ptr)
  9768			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9769			v2.AuxInt = 0
  9770			v1.AddArg(v2)
  9771			v1.AddArg(mem)
  9772			v.AddArg(v1)
  9773			return true
  9774		}
  9775		// match: (Zero [4] ptr mem)
  9776		// cond:
  9777		// result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))))
  9778		for {
  9779			if v.AuxInt != 4 {
  9780				break
  9781			}
  9782			mem := v.Args[1]
  9783			ptr := v.Args[0]
  9784			v.reset(OpMIPS64MOVBstore)
  9785			v.AuxInt = 3
  9786			v.AddArg(ptr)
  9787			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9788			v0.AuxInt = 0
  9789			v.AddArg(v0)
  9790			v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  9791			v1.AuxInt = 2
  9792			v1.AddArg(ptr)
  9793			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9794			v2.AuxInt = 0
  9795			v1.AddArg(v2)
  9796			v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  9797			v3.AuxInt = 1
  9798			v3.AddArg(ptr)
  9799			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9800			v4.AuxInt = 0
  9801			v3.AddArg(v4)
  9802			v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  9803			v5.AuxInt = 0
  9804			v5.AddArg(ptr)
  9805			v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9806			v6.AuxInt = 0
  9807			v5.AddArg(v6)
  9808			v5.AddArg(mem)
  9809			v3.AddArg(v5)
  9810			v1.AddArg(v3)
  9811			v.AddArg(v1)
  9812			return true
  9813		}
  9814		// match: (Zero [8] {t} ptr mem)
  9815		// cond: t.(*types.Type).Alignment()%8 == 0
  9816		// result: (MOVVstore ptr (MOVVconst [0]) mem)
  9817		for {
  9818			if v.AuxInt != 8 {
  9819				break
  9820			}
  9821			t := v.Aux
  9822			mem := v.Args[1]
  9823			ptr := v.Args[0]
  9824			if !(t.(*types.Type).Alignment()%8 == 0) {
  9825				break
  9826			}
  9827			v.reset(OpMIPS64MOVVstore)
  9828			v.AddArg(ptr)
  9829			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9830			v0.AuxInt = 0
  9831			v.AddArg(v0)
  9832			v.AddArg(mem)
  9833			return true
  9834		}
  9835		// match: (Zero [8] {t} ptr mem)
  9836		// cond: t.(*types.Type).Alignment()%4 == 0
  9837		// result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))
  9838		for {
  9839			if v.AuxInt != 8 {
  9840				break
  9841			}
  9842			t := v.Aux
  9843			mem := v.Args[1]
  9844			ptr := v.Args[0]
  9845			if !(t.(*types.Type).Alignment()%4 == 0) {
  9846				break
  9847			}
  9848			v.reset(OpMIPS64MOVWstore)
  9849			v.AuxInt = 4
  9850			v.AddArg(ptr)
  9851			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9852			v0.AuxInt = 0
  9853			v.AddArg(v0)
  9854			v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
  9855			v1.AuxInt = 0
  9856			v1.AddArg(ptr)
  9857			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9858			v2.AuxInt = 0
  9859			v1.AddArg(v2)
  9860			v1.AddArg(mem)
  9861			v.AddArg(v1)
  9862			return true
  9863		}
  9864		// match: (Zero [8] {t} ptr mem)
  9865		// cond: t.(*types.Type).Alignment()%2 == 0
  9866		// result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))))
  9867		for {
  9868			if v.AuxInt != 8 {
  9869				break
  9870			}
  9871			t := v.Aux
  9872			mem := v.Args[1]
  9873			ptr := v.Args[0]
  9874			if !(t.(*types.Type).Alignment()%2 == 0) {
  9875				break
  9876			}
  9877			v.reset(OpMIPS64MOVHstore)
  9878			v.AuxInt = 6
  9879			v.AddArg(ptr)
  9880			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9881			v0.AuxInt = 0
  9882			v.AddArg(v0)
  9883			v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  9884			v1.AuxInt = 4
  9885			v1.AddArg(ptr)
  9886			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9887			v2.AuxInt = 0
  9888			v1.AddArg(v2)
  9889			v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  9890			v3.AuxInt = 2
  9891			v3.AddArg(ptr)
  9892			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9893			v4.AuxInt = 0
  9894			v3.AddArg(v4)
  9895			v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  9896			v5.AuxInt = 0
  9897			v5.AddArg(ptr)
  9898			v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9899			v6.AuxInt = 0
  9900			v5.AddArg(v6)
  9901			v5.AddArg(mem)
  9902			v3.AddArg(v5)
  9903			v1.AddArg(v3)
  9904			v.AddArg(v1)
  9905			return true
  9906		}
  9907		return false
  9908	}
  9909	func rewriteValueMIPS64_OpZero_10(v *Value) bool {
  9910		b := v.Block
  9911		config := b.Func.Config
  9912		typ := &b.Func.Config.Types
  9913		// match: (Zero [3] ptr mem)
  9914		// cond:
  9915		// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
  9916		for {
  9917			if v.AuxInt != 3 {
  9918				break
  9919			}
  9920			mem := v.Args[1]
  9921			ptr := v.Args[0]
  9922			v.reset(OpMIPS64MOVBstore)
  9923			v.AuxInt = 2
  9924			v.AddArg(ptr)
  9925			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9926			v0.AuxInt = 0
  9927			v.AddArg(v0)
  9928			v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  9929			v1.AuxInt = 1
  9930			v1.AddArg(ptr)
  9931			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9932			v2.AuxInt = 0
  9933			v1.AddArg(v2)
  9934			v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
  9935			v3.AuxInt = 0
  9936			v3.AddArg(ptr)
  9937			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9938			v4.AuxInt = 0
  9939			v3.AddArg(v4)
  9940			v3.AddArg(mem)
  9941			v1.AddArg(v3)
  9942			v.AddArg(v1)
  9943			return true
  9944		}
  9945		// match: (Zero [6] {t} ptr mem)
  9946		// cond: t.(*types.Type).Alignment()%2 == 0
  9947		// result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))
  9948		for {
  9949			if v.AuxInt != 6 {
  9950				break
  9951			}
  9952			t := v.Aux
  9953			mem := v.Args[1]
  9954			ptr := v.Args[0]
  9955			if !(t.(*types.Type).Alignment()%2 == 0) {
  9956				break
  9957			}
  9958			v.reset(OpMIPS64MOVHstore)
  9959			v.AuxInt = 4
  9960			v.AddArg(ptr)
  9961			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9962			v0.AuxInt = 0
  9963			v.AddArg(v0)
  9964			v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  9965			v1.AuxInt = 2
  9966			v1.AddArg(ptr)
  9967			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9968			v2.AuxInt = 0
  9969			v1.AddArg(v2)
  9970			v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
  9971			v3.AuxInt = 0
  9972			v3.AddArg(ptr)
  9973			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9974			v4.AuxInt = 0
  9975			v3.AddArg(v4)
  9976			v3.AddArg(mem)
  9977			v1.AddArg(v3)
  9978			v.AddArg(v1)
  9979			return true
  9980		}
  9981		// match: (Zero [12] {t} ptr mem)
  9982		// cond: t.(*types.Type).Alignment()%4 == 0
  9983		// result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)))
  9984		for {
  9985			if v.AuxInt != 12 {
  9986				break
  9987			}
  9988			t := v.Aux
  9989			mem := v.Args[1]
  9990			ptr := v.Args[0]
  9991			if !(t.(*types.Type).Alignment()%4 == 0) {
  9992				break
  9993			}
  9994			v.reset(OpMIPS64MOVWstore)
  9995			v.AuxInt = 8
  9996			v.AddArg(ptr)
  9997			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
  9998			v0.AuxInt = 0
  9999			v.AddArg(v0)
 10000			v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10001			v1.AuxInt = 4
 10002			v1.AddArg(ptr)
 10003			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10004			v2.AuxInt = 0
 10005			v1.AddArg(v2)
 10006			v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
 10007			v3.AuxInt = 0
 10008			v3.AddArg(ptr)
 10009			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10010			v4.AuxInt = 0
 10011			v3.AddArg(v4)
 10012			v3.AddArg(mem)
 10013			v1.AddArg(v3)
 10014			v.AddArg(v1)
 10015			return true
 10016		}
 10017		// match: (Zero [16] {t} ptr mem)
 10018		// cond: t.(*types.Type).Alignment()%8 == 0
 10019		// result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))
 10020		for {
 10021			if v.AuxInt != 16 {
 10022				break
 10023			}
 10024			t := v.Aux
 10025			mem := v.Args[1]
 10026			ptr := v.Args[0]
 10027			if !(t.(*types.Type).Alignment()%8 == 0) {
 10028				break
 10029			}
 10030			v.reset(OpMIPS64MOVVstore)
 10031			v.AuxInt = 8
 10032			v.AddArg(ptr)
 10033			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10034			v0.AuxInt = 0
 10035			v.AddArg(v0)
 10036			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10037			v1.AuxInt = 0
 10038			v1.AddArg(ptr)
 10039			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10040			v2.AuxInt = 0
 10041			v1.AddArg(v2)
 10042			v1.AddArg(mem)
 10043			v.AddArg(v1)
 10044			return true
 10045		}
 10046		// match: (Zero [24] {t} ptr mem)
 10047		// cond: t.(*types.Type).Alignment()%8 == 0
 10048		// result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)))
 10049		for {
 10050			if v.AuxInt != 24 {
 10051				break
 10052			}
 10053			t := v.Aux
 10054			mem := v.Args[1]
 10055			ptr := v.Args[0]
 10056			if !(t.(*types.Type).Alignment()%8 == 0) {
 10057				break
 10058			}
 10059			v.reset(OpMIPS64MOVVstore)
 10060			v.AuxInt = 16
 10061			v.AddArg(ptr)
 10062			v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10063			v0.AuxInt = 0
 10064			v.AddArg(v0)
 10065			v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10066			v1.AuxInt = 8
 10067			v1.AddArg(ptr)
 10068			v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10069			v2.AuxInt = 0
 10070			v1.AddArg(v2)
 10071			v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
 10072			v3.AuxInt = 0
 10073			v3.AddArg(ptr)
 10074			v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
 10075			v4.AuxInt = 0
 10076			v3.AddArg(v4)
 10077			v3.AddArg(mem)
 10078			v1.AddArg(v3)
 10079			v.AddArg(v1)
 10080			return true
 10081		}
 10082		// match: (Zero [s] {t} ptr mem)
 10083		// cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice
 10084		// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
 10085		for {
 10086			s := v.AuxInt
 10087			t := v.Aux
 10088			mem := v.Args[1]
 10089			ptr := v.Args[0]
 10090			if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice) {
 10091				break
 10092			}
 10093			v.reset(OpMIPS64DUFFZERO)
 10094			v.AuxInt = 8 * (128 - s/8)
 10095			v.AddArg(ptr)
 10096			v.AddArg(mem)
 10097			return true
 10098		}
 10099		// match: (Zero [s] {t} ptr mem)
 10100		// cond: (s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0
 10101		// result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem)
 10102		for {
 10103			s := v.AuxInt
 10104			t := v.Aux
 10105			mem := v.Args[1]
 10106			ptr := v.Args[0]
 10107			if !((s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0) {
 10108				break
 10109			}
 10110			v.reset(OpMIPS64LoweredZero)
 10111			v.AuxInt = t.(*types.Type).Alignment()
 10112			v.AddArg(ptr)
 10113			v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
 10114			v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config)
 10115			v0.AddArg(ptr)
 10116			v.AddArg(v0)
 10117			v.AddArg(mem)
 10118			return true
 10119		}
 10120		return false
 10121	}
 10122	func rewriteValueMIPS64_OpZeroExt16to32_0(v *Value) bool {
 10123		// match: (ZeroExt16to32 x)
 10124		// cond:
 10125		// result: (MOVHUreg x)
 10126		for {
 10127			x := v.Args[0]
 10128			v.reset(OpMIPS64MOVHUreg)
 10129			v.AddArg(x)
 10130			return true
 10131		}
 10132	}
 10133	func rewriteValueMIPS64_OpZeroExt16to64_0(v *Value) bool {
 10134		// match: (ZeroExt16to64 x)
 10135		// cond:
 10136		// result: (MOVHUreg x)
 10137		for {
 10138			x := v.Args[0]
 10139			v.reset(OpMIPS64MOVHUreg)
 10140			v.AddArg(x)
 10141			return true
 10142		}
 10143	}
 10144	func rewriteValueMIPS64_OpZeroExt32to64_0(v *Value) bool {
 10145		// match: (ZeroExt32to64 x)
 10146		// cond:
 10147		// result: (MOVWUreg x)
 10148		for {
 10149			x := v.Args[0]
 10150			v.reset(OpMIPS64MOVWUreg)
 10151			v.AddArg(x)
 10152			return true
 10153		}
 10154	}
 10155	func rewriteValueMIPS64_OpZeroExt8to16_0(v *Value) bool {
 10156		// match: (ZeroExt8to16 x)
 10157		// cond:
 10158		// result: (MOVBUreg x)
 10159		for {
 10160			x := v.Args[0]
 10161			v.reset(OpMIPS64MOVBUreg)
 10162			v.AddArg(x)
 10163			return true
 10164		}
 10165	}
 10166	func rewriteValueMIPS64_OpZeroExt8to32_0(v *Value) bool {
 10167		// match: (ZeroExt8to32 x)
 10168		// cond:
 10169		// result: (MOVBUreg x)
 10170		for {
 10171			x := v.Args[0]
 10172			v.reset(OpMIPS64MOVBUreg)
 10173			v.AddArg(x)
 10174			return true
 10175		}
 10176	}
 10177	func rewriteValueMIPS64_OpZeroExt8to64_0(v *Value) bool {
 10178		// match: (ZeroExt8to64 x)
 10179		// cond:
 10180		// result: (MOVBUreg x)
 10181		for {
 10182			x := v.Args[0]
 10183			v.reset(OpMIPS64MOVBUreg)
 10184			v.AddArg(x)
 10185			return true
 10186		}
 10187	}
 10188	func rewriteBlockMIPS64(b *Block) bool {
 10189		config := b.Func.Config
 10190		typ := &config.Types
 10191		_ = typ
 10192		v := b.Control
 10193		_ = v
 10194		switch b.Kind {
 10195		case BlockMIPS64EQ:
 10196			// match: (EQ (FPFlagTrue cmp) yes no)
 10197			// cond:
 10198			// result: (FPF cmp yes no)
 10199			for v.Op == OpMIPS64FPFlagTrue {
 10200				cmp := v.Args[0]
 10201				b.Kind = BlockMIPS64FPF
 10202				b.SetControl(cmp)
 10203				b.Aux = nil
 10204				return true
 10205			}
 10206			// match: (EQ (FPFlagFalse cmp) yes no)
 10207			// cond:
 10208			// result: (FPT cmp yes no)
 10209			for v.Op == OpMIPS64FPFlagFalse {
 10210				cmp := v.Args[0]
 10211				b.Kind = BlockMIPS64FPT
 10212				b.SetControl(cmp)
 10213				b.Aux = nil
 10214				return true
 10215			}
 10216			// match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no)
 10217			// cond:
 10218			// result: (NE cmp yes no)
 10219			for v.Op == OpMIPS64XORconst {
 10220				if v.AuxInt != 1 {
 10221					break
 10222				}
 10223				cmp := v.Args[0]
 10224				if cmp.Op != OpMIPS64SGT {
 10225					break
 10226				}
 10227				_ = cmp.Args[1]
 10228				b.Kind = BlockMIPS64NE
 10229				b.SetControl(cmp)
 10230				b.Aux = nil
 10231				return true
 10232			}
 10233			// match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no)
 10234			// cond:
 10235			// result: (NE cmp yes no)
 10236			for v.Op == OpMIPS64XORconst {
 10237				if v.AuxInt != 1 {
 10238					break
 10239				}
 10240				cmp := v.Args[0]
 10241				if cmp.Op != OpMIPS64SGTU {
 10242					break
 10243				}
 10244				_ = cmp.Args[1]
 10245				b.Kind = BlockMIPS64NE
 10246				b.SetControl(cmp)
 10247				b.Aux = nil
 10248				return true
 10249			}
 10250			// match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no)
 10251			// cond:
 10252			// result: (NE cmp yes no)
 10253			for v.Op == OpMIPS64XORconst {
 10254				if v.AuxInt != 1 {
 10255					break
 10256				}
 10257				cmp := v.Args[0]
 10258				if cmp.Op != OpMIPS64SGTconst {
 10259					break
 10260				}
 10261				b.Kind = BlockMIPS64NE
 10262				b.SetControl(cmp)
 10263				b.Aux = nil
 10264				return true
 10265			}
 10266			// match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no)
 10267			// cond:
 10268			// result: (NE cmp yes no)
 10269			for v.Op == OpMIPS64XORconst {
 10270				if v.AuxInt != 1 {
 10271					break
 10272				}
 10273				cmp := v.Args[0]
 10274				if cmp.Op != OpMIPS64SGTUconst {
 10275					break
 10276				}
 10277				b.Kind = BlockMIPS64NE
 10278				b.SetControl(cmp)
 10279				b.Aux = nil
 10280				return true
 10281			}
 10282			// match: (EQ (SGTUconst [1] x) yes no)
 10283			// cond:
 10284			// result: (NE x yes no)
 10285			for v.Op == OpMIPS64SGTUconst {
 10286				if v.AuxInt != 1 {
 10287					break
 10288				}
 10289				x := v.Args[0]
 10290				b.Kind = BlockMIPS64NE
 10291				b.SetControl(x)
 10292				b.Aux = nil
 10293				return true
 10294			}
 10295			// match: (EQ (SGTU x (MOVVconst [0])) yes no)
 10296			// cond:
 10297			// result: (EQ x yes no)
 10298			for v.Op == OpMIPS64SGTU {
 10299				_ = v.Args[1]
 10300				x := v.Args[0]
 10301				v_1 := v.Args[1]
 10302				if v_1.Op != OpMIPS64MOVVconst {
 10303					break
 10304				}
 10305				if v_1.AuxInt != 0 {
 10306					break
 10307				}
 10308				b.Kind = BlockMIPS64EQ
 10309				b.SetControl(x)
 10310				b.Aux = nil
 10311				return true
 10312			}
 10313			// match: (EQ (SGTconst [0] x) yes no)
 10314			// cond:
 10315			// result: (GEZ x yes no)
 10316			for v.Op == OpMIPS64SGTconst {
 10317				if v.AuxInt != 0 {
 10318					break
 10319				}
 10320				x := v.Args[0]
 10321				b.Kind = BlockMIPS64GEZ
 10322				b.SetControl(x)
 10323				b.Aux = nil
 10324				return true
 10325			}
 10326			// match: (EQ (SGT x (MOVVconst [0])) yes no)
 10327			// cond:
 10328			// result: (LEZ x yes no)
 10329			for v.Op == OpMIPS64SGT {
 10330				_ = v.Args[1]
 10331				x := v.Args[0]
 10332				v_1 := v.Args[1]
 10333				if v_1.Op != OpMIPS64MOVVconst {
 10334					break
 10335				}
 10336				if v_1.AuxInt != 0 {
 10337					break
 10338				}
 10339				b.Kind = BlockMIPS64LEZ
 10340				b.SetControl(x)
 10341				b.Aux = nil
 10342				return true
 10343			}
 10344			// match: (EQ (MOVVconst [0]) yes no)
 10345			// cond:
 10346			// result: (First nil yes no)
 10347			for v.Op == OpMIPS64MOVVconst {
 10348				if v.AuxInt != 0 {
 10349					break
 10350				}
 10351				b.Kind = BlockFirst
 10352				b.SetControl(nil)
 10353				b.Aux = nil
 10354				return true
 10355			}
 10356			// match: (EQ (MOVVconst [c]) yes no)
 10357			// cond: c != 0
 10358			// result: (First nil no yes)
 10359			for v.Op == OpMIPS64MOVVconst {
 10360				c := v.AuxInt
 10361				if !(c != 0) {
 10362					break
 10363				}
 10364				b.Kind = BlockFirst
 10365				b.SetControl(nil)
 10366				b.Aux = nil
 10367				b.swapSuccessors()
 10368				return true
 10369			}
 10370		case BlockMIPS64GEZ:
 10371			// match: (GEZ (MOVVconst [c]) yes no)
 10372			// cond: c >= 0
 10373			// result: (First nil yes no)
 10374			for v.Op == OpMIPS64MOVVconst {
 10375				c := v.AuxInt
 10376				if !(c >= 0) {
 10377					break
 10378				}
 10379				b.Kind = BlockFirst
 10380				b.SetControl(nil)
 10381				b.Aux = nil
 10382				return true
 10383			}
 10384			// match: (GEZ (MOVVconst [c]) yes no)
 10385			// cond: c < 0
 10386			// result: (First nil no yes)
 10387			for v.Op == OpMIPS64MOVVconst {
 10388				c := v.AuxInt
 10389				if !(c < 0) {
 10390					break
 10391				}
 10392				b.Kind = BlockFirst
 10393				b.SetControl(nil)
 10394				b.Aux = nil
 10395				b.swapSuccessors()
 10396				return true
 10397			}
 10398		case BlockMIPS64GTZ:
 10399			// match: (GTZ (MOVVconst [c]) yes no)
 10400			// cond: c > 0
 10401			// result: (First nil yes no)
 10402			for v.Op == OpMIPS64MOVVconst {
 10403				c := v.AuxInt
 10404				if !(c > 0) {
 10405					break
 10406				}
 10407				b.Kind = BlockFirst
 10408				b.SetControl(nil)
 10409				b.Aux = nil
 10410				return true
 10411			}
 10412			// match: (GTZ (MOVVconst [c]) yes no)
 10413			// cond: c <= 0
 10414			// result: (First nil no yes)
 10415			for v.Op == OpMIPS64MOVVconst {
 10416				c := v.AuxInt
 10417				if !(c <= 0) {
 10418					break
 10419				}
 10420				b.Kind = BlockFirst
 10421				b.SetControl(nil)
 10422				b.Aux = nil
 10423				b.swapSuccessors()
 10424				return true
 10425			}
 10426		case BlockIf:
 10427			// match: (If cond yes no)
 10428			// cond:
 10429			// result: (NE cond yes no)
 10430			for {
 10431				cond := b.Control
 10432				b.Kind = BlockMIPS64NE
 10433				b.SetControl(cond)
 10434				b.Aux = nil
 10435				return true
 10436			}
 10437		case BlockMIPS64LEZ:
 10438			// match: (LEZ (MOVVconst [c]) yes no)
 10439			// cond: c <= 0
 10440			// result: (First nil yes no)
 10441			for v.Op == OpMIPS64MOVVconst {
 10442				c := v.AuxInt
 10443				if !(c <= 0) {
 10444					break
 10445				}
 10446				b.Kind = BlockFirst
 10447				b.SetControl(nil)
 10448				b.Aux = nil
 10449				return true
 10450			}
 10451			// match: (LEZ (MOVVconst [c]) yes no)
 10452			// cond: c > 0
 10453			// result: (First nil no yes)
 10454			for v.Op == OpMIPS64MOVVconst {
 10455				c := v.AuxInt
 10456				if !(c > 0) {
 10457					break
 10458				}
 10459				b.Kind = BlockFirst
 10460				b.SetControl(nil)
 10461				b.Aux = nil
 10462				b.swapSuccessors()
 10463				return true
 10464			}
 10465		case BlockMIPS64LTZ:
 10466			// match: (LTZ (MOVVconst [c]) yes no)
 10467			// cond: c < 0
 10468			// result: (First nil yes no)
 10469			for v.Op == OpMIPS64MOVVconst {
 10470				c := v.AuxInt
 10471				if !(c < 0) {
 10472					break
 10473				}
 10474				b.Kind = BlockFirst
 10475				b.SetControl(nil)
 10476				b.Aux = nil
 10477				return true
 10478			}
 10479			// match: (LTZ (MOVVconst [c]) yes no)
 10480			// cond: c >= 0
 10481			// result: (First nil no yes)
 10482			for v.Op == OpMIPS64MOVVconst {
 10483				c := v.AuxInt
 10484				if !(c >= 0) {
 10485					break
 10486				}
 10487				b.Kind = BlockFirst
 10488				b.SetControl(nil)
 10489				b.Aux = nil
 10490				b.swapSuccessors()
 10491				return true
 10492			}
 10493		case BlockMIPS64NE:
 10494			// match: (NE (FPFlagTrue cmp) yes no)
 10495			// cond:
 10496			// result: (FPT cmp yes no)
 10497			for v.Op == OpMIPS64FPFlagTrue {
 10498				cmp := v.Args[0]
 10499				b.Kind = BlockMIPS64FPT
 10500				b.SetControl(cmp)
 10501				b.Aux = nil
 10502				return true
 10503			}
 10504			// match: (NE (FPFlagFalse cmp) yes no)
 10505			// cond:
 10506			// result: (FPF cmp yes no)
 10507			for v.Op == OpMIPS64FPFlagFalse {
 10508				cmp := v.Args[0]
 10509				b.Kind = BlockMIPS64FPF
 10510				b.SetControl(cmp)
 10511				b.Aux = nil
 10512				return true
 10513			}
 10514			// match: (NE (XORconst [1] cmp:(SGT _ _)) yes no)
 10515			// cond:
 10516			// result: (EQ cmp yes no)
 10517			for v.Op == OpMIPS64XORconst {
 10518				if v.AuxInt != 1 {
 10519					break
 10520				}
 10521				cmp := v.Args[0]
 10522				if cmp.Op != OpMIPS64SGT {
 10523					break
 10524				}
 10525				_ = cmp.Args[1]
 10526				b.Kind = BlockMIPS64EQ
 10527				b.SetControl(cmp)
 10528				b.Aux = nil
 10529				return true
 10530			}
 10531			// match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no)
 10532			// cond:
 10533			// result: (EQ cmp yes no)
 10534			for v.Op == OpMIPS64XORconst {
 10535				if v.AuxInt != 1 {
 10536					break
 10537				}
 10538				cmp := v.Args[0]
 10539				if cmp.Op != OpMIPS64SGTU {
 10540					break
 10541				}
 10542				_ = cmp.Args[1]
 10543				b.Kind = BlockMIPS64EQ
 10544				b.SetControl(cmp)
 10545				b.Aux = nil
 10546				return true
 10547			}
 10548			// match: (NE (XORconst [1] cmp:(SGTconst _)) yes no)
 10549			// cond:
 10550			// result: (EQ cmp yes no)
 10551			for v.Op == OpMIPS64XORconst {
 10552				if v.AuxInt != 1 {
 10553					break
 10554				}
 10555				cmp := v.Args[0]
 10556				if cmp.Op != OpMIPS64SGTconst {
 10557					break
 10558				}
 10559				b.Kind = BlockMIPS64EQ
 10560				b.SetControl(cmp)
 10561				b.Aux = nil
 10562				return true
 10563			}
 10564			// match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no)
 10565			// cond:
 10566			// result: (EQ cmp yes no)
 10567			for v.Op == OpMIPS64XORconst {
 10568				if v.AuxInt != 1 {
 10569					break
 10570				}
 10571				cmp := v.Args[0]
 10572				if cmp.Op != OpMIPS64SGTUconst {
 10573					break
 10574				}
 10575				b.Kind = BlockMIPS64EQ
 10576				b.SetControl(cmp)
 10577				b.Aux = nil
 10578				return true
 10579			}
 10580			// match: (NE (SGTUconst [1] x) yes no)
 10581			// cond:
 10582			// result: (EQ x yes no)
 10583			for v.Op == OpMIPS64SGTUconst {
 10584				if v.AuxInt != 1 {
 10585					break
 10586				}
 10587				x := v.Args[0]
 10588				b.Kind = BlockMIPS64EQ
 10589				b.SetControl(x)
 10590				b.Aux = nil
 10591				return true
 10592			}
 10593			// match: (NE (SGTU x (MOVVconst [0])) yes no)
 10594			// cond:
 10595			// result: (NE x yes no)
 10596			for v.Op == OpMIPS64SGTU {
 10597				_ = v.Args[1]
 10598				x := v.Args[0]
 10599				v_1 := v.Args[1]
 10600				if v_1.Op != OpMIPS64MOVVconst {
 10601					break
 10602				}
 10603				if v_1.AuxInt != 0 {
 10604					break
 10605				}
 10606				b.Kind = BlockMIPS64NE
 10607				b.SetControl(x)
 10608				b.Aux = nil
 10609				return true
 10610			}
 10611			// match: (NE (SGTconst [0] x) yes no)
 10612			// cond:
 10613			// result: (LTZ x yes no)
 10614			for v.Op == OpMIPS64SGTconst {
 10615				if v.AuxInt != 0 {
 10616					break
 10617				}
 10618				x := v.Args[0]
 10619				b.Kind = BlockMIPS64LTZ
 10620				b.SetControl(x)
 10621				b.Aux = nil
 10622				return true
 10623			}
 10624			// match: (NE (SGT x (MOVVconst [0])) yes no)
 10625			// cond:
 10626			// result: (GTZ x yes no)
 10627			for v.Op == OpMIPS64SGT {
 10628				_ = v.Args[1]
 10629				x := v.Args[0]
 10630				v_1 := v.Args[1]
 10631				if v_1.Op != OpMIPS64MOVVconst {
 10632					break
 10633				}
 10634				if v_1.AuxInt != 0 {
 10635					break
 10636				}
 10637				b.Kind = BlockMIPS64GTZ
 10638				b.SetControl(x)
 10639				b.Aux = nil
 10640				return true
 10641			}
 10642			// match: (NE (MOVVconst [0]) yes no)
 10643			// cond:
 10644			// result: (First nil no yes)
 10645			for v.Op == OpMIPS64MOVVconst {
 10646				if v.AuxInt != 0 {
 10647					break
 10648				}
 10649				b.Kind = BlockFirst
 10650				b.SetControl(nil)
 10651				b.Aux = nil
 10652				b.swapSuccessors()
 10653				return true
 10654			}
 10655			// match: (NE (MOVVconst [c]) yes no)
 10656			// cond: c != 0
 10657			// result: (First nil yes no)
 10658			for v.Op == OpMIPS64MOVVconst {
 10659				c := v.AuxInt
 10660				if !(c != 0) {
 10661					break
 10662				}
 10663				b.Kind = BlockFirst
 10664				b.SetControl(nil)
 10665				b.Aux = nil
 10666				return true
 10667			}
 10668		}
 10669		return false
 10670	}
 10671	

View as plain text