1
2
3
4
5 package mips64
6
7 import (
8 "math"
9
10 "cmd/compile/internal/base"
11 "cmd/compile/internal/ir"
12 "cmd/compile/internal/logopt"
13 "cmd/compile/internal/ssa"
14 "cmd/compile/internal/ssagen"
15 "cmd/compile/internal/types"
16 "cmd/internal/obj"
17 "cmd/internal/obj/mips"
18 )
19
20
21 func isFPreg(r int16) bool {
22 return mips.REG_F0 <= r && r <= mips.REG_F31
23 }
24
25
26 func isHILO(r int16) bool {
27 return r == mips.REG_HI || r == mips.REG_LO
28 }
29
30
31 func loadByType(t *types.Type, r int16) obj.As {
32 if isFPreg(r) {
33 if t.Size() == 4 {
34 return mips.AMOVF
35 } else {
36 return mips.AMOVD
37 }
38 } else {
39 switch t.Size() {
40 case 1:
41 if t.IsSigned() {
42 return mips.AMOVB
43 } else {
44 return mips.AMOVBU
45 }
46 case 2:
47 if t.IsSigned() {
48 return mips.AMOVH
49 } else {
50 return mips.AMOVHU
51 }
52 case 4:
53 if t.IsSigned() {
54 return mips.AMOVW
55 } else {
56 return mips.AMOVWU
57 }
58 case 8:
59 return mips.AMOVV
60 }
61 }
62 panic("bad load type")
63 }
64
65
66 func storeByType(t *types.Type, r int16) obj.As {
67 if isFPreg(r) {
68 if t.Size() == 4 {
69 return mips.AMOVF
70 } else {
71 return mips.AMOVD
72 }
73 } else {
74 switch t.Size() {
75 case 1:
76 return mips.AMOVB
77 case 2:
78 return mips.AMOVH
79 case 4:
80 return mips.AMOVW
81 case 8:
82 return mips.AMOVV
83 }
84 }
85 panic("bad store type")
86 }
87
88 func ssaGenValue(s *ssagen.State, v *ssa.Value) {
89 switch v.Op {
90 case ssa.OpCopy, ssa.OpMIPS64MOVVreg:
91 if v.Type.IsMemory() {
92 return
93 }
94 x := v.Args[0].Reg()
95 y := v.Reg()
96 if x == y {
97 return
98 }
99 as := mips.AMOVV
100 if isFPreg(x) && isFPreg(y) {
101 as = mips.AMOVD
102 }
103 p := s.Prog(as)
104 p.From.Type = obj.TYPE_REG
105 p.From.Reg = x
106 p.To.Type = obj.TYPE_REG
107 p.To.Reg = y
108 if isHILO(x) && isHILO(y) || isHILO(x) && isFPreg(y) || isFPreg(x) && isHILO(y) {
109
110 p.To.Reg = mips.REGTMP
111 p = s.Prog(mips.AMOVV)
112 p.From.Type = obj.TYPE_REG
113 p.From.Reg = mips.REGTMP
114 p.To.Type = obj.TYPE_REG
115 p.To.Reg = y
116 }
117 case ssa.OpMIPS64MOVVnop:
118
119 case ssa.OpLoadReg:
120 if v.Type.IsFlags() {
121 v.Fatalf("load flags not implemented: %v", v.LongString())
122 return
123 }
124 r := v.Reg()
125 p := s.Prog(loadByType(v.Type, r))
126 ssagen.AddrAuto(&p.From, v.Args[0])
127 p.To.Type = obj.TYPE_REG
128 p.To.Reg = r
129 if isHILO(r) {
130
131 p.To.Reg = mips.REGTMP
132 p = s.Prog(mips.AMOVV)
133 p.From.Type = obj.TYPE_REG
134 p.From.Reg = mips.REGTMP
135 p.To.Type = obj.TYPE_REG
136 p.To.Reg = r
137 }
138 case ssa.OpStoreReg:
139 if v.Type.IsFlags() {
140 v.Fatalf("store flags not implemented: %v", v.LongString())
141 return
142 }
143 r := v.Args[0].Reg()
144 if isHILO(r) {
145
146 p := s.Prog(mips.AMOVV)
147 p.From.Type = obj.TYPE_REG
148 p.From.Reg = r
149 p.To.Type = obj.TYPE_REG
150 p.To.Reg = mips.REGTMP
151 r = mips.REGTMP
152 }
153 p := s.Prog(storeByType(v.Type, r))
154 p.From.Type = obj.TYPE_REG
155 p.From.Reg = r
156 ssagen.AddrAuto(&p.To, v)
157 case ssa.OpMIPS64ADDV,
158 ssa.OpMIPS64SUBV,
159 ssa.OpMIPS64AND,
160 ssa.OpMIPS64OR,
161 ssa.OpMIPS64XOR,
162 ssa.OpMIPS64NOR,
163 ssa.OpMIPS64SLLV,
164 ssa.OpMIPS64SRLV,
165 ssa.OpMIPS64SRAV,
166 ssa.OpMIPS64ADDF,
167 ssa.OpMIPS64ADDD,
168 ssa.OpMIPS64SUBF,
169 ssa.OpMIPS64SUBD,
170 ssa.OpMIPS64MULF,
171 ssa.OpMIPS64MULD,
172 ssa.OpMIPS64DIVF,
173 ssa.OpMIPS64DIVD:
174 p := s.Prog(v.Op.Asm())
175 p.From.Type = obj.TYPE_REG
176 p.From.Reg = v.Args[1].Reg()
177 p.Reg = v.Args[0].Reg()
178 p.To.Type = obj.TYPE_REG
179 p.To.Reg = v.Reg()
180 case ssa.OpMIPS64SGT,
181 ssa.OpMIPS64SGTU:
182 p := s.Prog(v.Op.Asm())
183 p.From.Type = obj.TYPE_REG
184 p.From.Reg = v.Args[0].Reg()
185 p.Reg = v.Args[1].Reg()
186 p.To.Type = obj.TYPE_REG
187 p.To.Reg = v.Reg()
188 case ssa.OpMIPS64ADDVconst,
189 ssa.OpMIPS64SUBVconst,
190 ssa.OpMIPS64ANDconst,
191 ssa.OpMIPS64ORconst,
192 ssa.OpMIPS64XORconst,
193 ssa.OpMIPS64NORconst,
194 ssa.OpMIPS64SLLVconst,
195 ssa.OpMIPS64SRLVconst,
196 ssa.OpMIPS64SRAVconst,
197 ssa.OpMIPS64SGTconst,
198 ssa.OpMIPS64SGTUconst:
199 p := s.Prog(v.Op.Asm())
200 p.From.Type = obj.TYPE_CONST
201 p.From.Offset = v.AuxInt
202 p.Reg = v.Args[0].Reg()
203 p.To.Type = obj.TYPE_REG
204 p.To.Reg = v.Reg()
205 case ssa.OpMIPS64MULV,
206 ssa.OpMIPS64MULVU,
207 ssa.OpMIPS64DIVV,
208 ssa.OpMIPS64DIVVU:
209
210 p := s.Prog(v.Op.Asm())
211 p.From.Type = obj.TYPE_REG
212 p.From.Reg = v.Args[1].Reg()
213 p.Reg = v.Args[0].Reg()
214 case ssa.OpMIPS64MOVVconst:
215 r := v.Reg()
216 p := s.Prog(v.Op.Asm())
217 p.From.Type = obj.TYPE_CONST
218 p.From.Offset = v.AuxInt
219 p.To.Type = obj.TYPE_REG
220 p.To.Reg = r
221 if isFPreg(r) || isHILO(r) {
222
223 p.To.Reg = mips.REGTMP
224 p = s.Prog(mips.AMOVV)
225 p.From.Type = obj.TYPE_REG
226 p.From.Reg = mips.REGTMP
227 p.To.Type = obj.TYPE_REG
228 p.To.Reg = r
229 }
230 case ssa.OpMIPS64MOVFconst,
231 ssa.OpMIPS64MOVDconst:
232 p := s.Prog(v.Op.Asm())
233 p.From.Type = obj.TYPE_FCONST
234 p.From.Val = math.Float64frombits(uint64(v.AuxInt))
235 p.To.Type = obj.TYPE_REG
236 p.To.Reg = v.Reg()
237 case ssa.OpMIPS64CMPEQF,
238 ssa.OpMIPS64CMPEQD,
239 ssa.OpMIPS64CMPGEF,
240 ssa.OpMIPS64CMPGED,
241 ssa.OpMIPS64CMPGTF,
242 ssa.OpMIPS64CMPGTD:
243 p := s.Prog(v.Op.Asm())
244 p.From.Type = obj.TYPE_REG
245 p.From.Reg = v.Args[0].Reg()
246 p.Reg = v.Args[1].Reg()
247 case ssa.OpMIPS64MOVVaddr:
248 p := s.Prog(mips.AMOVV)
249 p.From.Type = obj.TYPE_ADDR
250 p.From.Reg = v.Args[0].Reg()
251 var wantreg string
252
253
254
255
256
257 switch v.Aux.(type) {
258 default:
259 v.Fatalf("aux is of unknown type %T", v.Aux)
260 case *obj.LSym:
261 wantreg = "SB"
262 ssagen.AddAux(&p.From, v)
263 case *ir.Name:
264 wantreg = "SP"
265 ssagen.AddAux(&p.From, v)
266 case nil:
267
268 wantreg = "SP"
269 p.From.Offset = v.AuxInt
270 }
271 if reg := v.Args[0].RegName(); reg != wantreg {
272 v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
273 }
274 p.To.Type = obj.TYPE_REG
275 p.To.Reg = v.Reg()
276 case ssa.OpMIPS64MOVBload,
277 ssa.OpMIPS64MOVBUload,
278 ssa.OpMIPS64MOVHload,
279 ssa.OpMIPS64MOVHUload,
280 ssa.OpMIPS64MOVWload,
281 ssa.OpMIPS64MOVWUload,
282 ssa.OpMIPS64MOVVload,
283 ssa.OpMIPS64MOVFload,
284 ssa.OpMIPS64MOVDload:
285 p := s.Prog(v.Op.Asm())
286 p.From.Type = obj.TYPE_MEM
287 p.From.Reg = v.Args[0].Reg()
288 ssagen.AddAux(&p.From, v)
289 p.To.Type = obj.TYPE_REG
290 p.To.Reg = v.Reg()
291 case ssa.OpMIPS64MOVBstore,
292 ssa.OpMIPS64MOVHstore,
293 ssa.OpMIPS64MOVWstore,
294 ssa.OpMIPS64MOVVstore,
295 ssa.OpMIPS64MOVFstore,
296 ssa.OpMIPS64MOVDstore:
297 p := s.Prog(v.Op.Asm())
298 p.From.Type = obj.TYPE_REG
299 p.From.Reg = v.Args[1].Reg()
300 p.To.Type = obj.TYPE_MEM
301 p.To.Reg = v.Args[0].Reg()
302 ssagen.AddAux(&p.To, v)
303 case ssa.OpMIPS64MOVBstorezero,
304 ssa.OpMIPS64MOVHstorezero,
305 ssa.OpMIPS64MOVWstorezero,
306 ssa.OpMIPS64MOVVstorezero:
307 p := s.Prog(v.Op.Asm())
308 p.From.Type = obj.TYPE_REG
309 p.From.Reg = mips.REGZERO
310 p.To.Type = obj.TYPE_MEM
311 p.To.Reg = v.Args[0].Reg()
312 ssagen.AddAux(&p.To, v)
313 case ssa.OpMIPS64MOVBreg,
314 ssa.OpMIPS64MOVBUreg,
315 ssa.OpMIPS64MOVHreg,
316 ssa.OpMIPS64MOVHUreg,
317 ssa.OpMIPS64MOVWreg,
318 ssa.OpMIPS64MOVWUreg:
319 a := v.Args[0]
320 for a.Op == ssa.OpCopy || a.Op == ssa.OpMIPS64MOVVreg {
321 a = a.Args[0]
322 }
323 if a.Op == ssa.OpLoadReg {
324 t := a.Type
325 switch {
326 case v.Op == ssa.OpMIPS64MOVBreg && t.Size() == 1 && t.IsSigned(),
327 v.Op == ssa.OpMIPS64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
328 v.Op == ssa.OpMIPS64MOVHreg && t.Size() == 2 && t.IsSigned(),
329 v.Op == ssa.OpMIPS64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
330 v.Op == ssa.OpMIPS64MOVWreg && t.Size() == 4 && t.IsSigned(),
331 v.Op == ssa.OpMIPS64MOVWUreg && t.Size() == 4 && !t.IsSigned():
332
333 if v.Reg() == v.Args[0].Reg() {
334 return
335 }
336 p := s.Prog(mips.AMOVV)
337 p.From.Type = obj.TYPE_REG
338 p.From.Reg = v.Args[0].Reg()
339 p.To.Type = obj.TYPE_REG
340 p.To.Reg = v.Reg()
341 return
342 default:
343 }
344 }
345 fallthrough
346 case ssa.OpMIPS64MOVWF,
347 ssa.OpMIPS64MOVWD,
348 ssa.OpMIPS64TRUNCFW,
349 ssa.OpMIPS64TRUNCDW,
350 ssa.OpMIPS64MOVVF,
351 ssa.OpMIPS64MOVVD,
352 ssa.OpMIPS64TRUNCFV,
353 ssa.OpMIPS64TRUNCDV,
354 ssa.OpMIPS64MOVFD,
355 ssa.OpMIPS64MOVDF,
356 ssa.OpMIPS64NEGF,
357 ssa.OpMIPS64NEGD,
358 ssa.OpMIPS64SQRTF,
359 ssa.OpMIPS64SQRTD:
360 p := s.Prog(v.Op.Asm())
361 p.From.Type = obj.TYPE_REG
362 p.From.Reg = v.Args[0].Reg()
363 p.To.Type = obj.TYPE_REG
364 p.To.Reg = v.Reg()
365 case ssa.OpMIPS64NEGV:
366
367 p := s.Prog(mips.ASUBVU)
368 p.From.Type = obj.TYPE_REG
369 p.From.Reg = v.Args[0].Reg()
370 p.Reg = mips.REGZERO
371 p.To.Type = obj.TYPE_REG
372 p.To.Reg = v.Reg()
373 case ssa.OpMIPS64DUFFZERO:
374
375 p := s.Prog(mips.ASUBVU)
376 p.From.Type = obj.TYPE_CONST
377 p.From.Offset = 8
378 p.Reg = v.Args[0].Reg()
379 p.To.Type = obj.TYPE_REG
380 p.To.Reg = mips.REG_R1
381 p = s.Prog(obj.ADUFFZERO)
382 p.To.Type = obj.TYPE_MEM
383 p.To.Name = obj.NAME_EXTERN
384 p.To.Sym = ir.Syms.Duffzero
385 p.To.Offset = v.AuxInt
386 case ssa.OpMIPS64LoweredZero:
387
388
389
390
391
392 var sz int64
393 var mov obj.As
394 switch {
395 case v.AuxInt%8 == 0:
396 sz = 8
397 mov = mips.AMOVV
398 case v.AuxInt%4 == 0:
399 sz = 4
400 mov = mips.AMOVW
401 case v.AuxInt%2 == 0:
402 sz = 2
403 mov = mips.AMOVH
404 default:
405 sz = 1
406 mov = mips.AMOVB
407 }
408 p := s.Prog(mips.ASUBVU)
409 p.From.Type = obj.TYPE_CONST
410 p.From.Offset = sz
411 p.To.Type = obj.TYPE_REG
412 p.To.Reg = mips.REG_R1
413 p2 := s.Prog(mov)
414 p2.From.Type = obj.TYPE_REG
415 p2.From.Reg = mips.REGZERO
416 p2.To.Type = obj.TYPE_MEM
417 p2.To.Reg = mips.REG_R1
418 p2.To.Offset = sz
419 p3 := s.Prog(mips.AADDVU)
420 p3.From.Type = obj.TYPE_CONST
421 p3.From.Offset = sz
422 p3.To.Type = obj.TYPE_REG
423 p3.To.Reg = mips.REG_R1
424 p4 := s.Prog(mips.ABNE)
425 p4.From.Type = obj.TYPE_REG
426 p4.From.Reg = v.Args[1].Reg()
427 p4.Reg = mips.REG_R1
428 p4.To.Type = obj.TYPE_BRANCH
429 p4.To.SetTarget(p2)
430 case ssa.OpMIPS64DUFFCOPY:
431 p := s.Prog(obj.ADUFFCOPY)
432 p.To.Type = obj.TYPE_MEM
433 p.To.Name = obj.NAME_EXTERN
434 p.To.Sym = ir.Syms.Duffcopy
435 p.To.Offset = v.AuxInt
436 case ssa.OpMIPS64LoweredMove:
437
438
439
440
441
442
443
444 var sz int64
445 var mov obj.As
446 switch {
447 case v.AuxInt%8 == 0:
448 sz = 8
449 mov = mips.AMOVV
450 case v.AuxInt%4 == 0:
451 sz = 4
452 mov = mips.AMOVW
453 case v.AuxInt%2 == 0:
454 sz = 2
455 mov = mips.AMOVH
456 default:
457 sz = 1
458 mov = mips.AMOVB
459 }
460 p := s.Prog(mips.ASUBVU)
461 p.From.Type = obj.TYPE_CONST
462 p.From.Offset = sz
463 p.To.Type = obj.TYPE_REG
464 p.To.Reg = mips.REG_R1
465 p2 := s.Prog(mov)
466 p2.From.Type = obj.TYPE_MEM
467 p2.From.Reg = mips.REG_R1
468 p2.From.Offset = sz
469 p2.To.Type = obj.TYPE_REG
470 p2.To.Reg = mips.REGTMP
471 p3 := s.Prog(mov)
472 p3.From.Type = obj.TYPE_REG
473 p3.From.Reg = mips.REGTMP
474 p3.To.Type = obj.TYPE_MEM
475 p3.To.Reg = mips.REG_R2
476 p4 := s.Prog(mips.AADDVU)
477 p4.From.Type = obj.TYPE_CONST
478 p4.From.Offset = sz
479 p4.To.Type = obj.TYPE_REG
480 p4.To.Reg = mips.REG_R1
481 p5 := s.Prog(mips.AADDVU)
482 p5.From.Type = obj.TYPE_CONST
483 p5.From.Offset = sz
484 p5.To.Type = obj.TYPE_REG
485 p5.To.Reg = mips.REG_R2
486 p6 := s.Prog(mips.ABNE)
487 p6.From.Type = obj.TYPE_REG
488 p6.From.Reg = v.Args[2].Reg()
489 p6.Reg = mips.REG_R1
490 p6.To.Type = obj.TYPE_BRANCH
491 p6.To.SetTarget(p2)
492 case ssa.OpMIPS64CALLstatic, ssa.OpMIPS64CALLclosure, ssa.OpMIPS64CALLinter:
493 s.Call(v)
494 case ssa.OpMIPS64LoweredWB:
495 p := s.Prog(obj.ACALL)
496 p.To.Type = obj.TYPE_MEM
497 p.To.Name = obj.NAME_EXTERN
498 p.To.Sym = v.Aux.(*obj.LSym)
499 case ssa.OpMIPS64LoweredPanicBoundsA, ssa.OpMIPS64LoweredPanicBoundsB, ssa.OpMIPS64LoweredPanicBoundsC:
500 p := s.Prog(obj.ACALL)
501 p.To.Type = obj.TYPE_MEM
502 p.To.Name = obj.NAME_EXTERN
503 p.To.Sym = ssagen.BoundsCheckFunc[v.AuxInt]
504 s.UseArgs(16)
505 case ssa.OpMIPS64LoweredAtomicLoad8, ssa.OpMIPS64LoweredAtomicLoad32, ssa.OpMIPS64LoweredAtomicLoad64:
506 as := mips.AMOVV
507 switch v.Op {
508 case ssa.OpMIPS64LoweredAtomicLoad8:
509 as = mips.AMOVB
510 case ssa.OpMIPS64LoweredAtomicLoad32:
511 as = mips.AMOVW
512 }
513 s.Prog(mips.ASYNC)
514 p := s.Prog(as)
515 p.From.Type = obj.TYPE_MEM
516 p.From.Reg = v.Args[0].Reg()
517 p.To.Type = obj.TYPE_REG
518 p.To.Reg = v.Reg0()
519 s.Prog(mips.ASYNC)
520 case ssa.OpMIPS64LoweredAtomicStore8, ssa.OpMIPS64LoweredAtomicStore32, ssa.OpMIPS64LoweredAtomicStore64:
521 as := mips.AMOVV
522 switch v.Op {
523 case ssa.OpMIPS64LoweredAtomicStore8:
524 as = mips.AMOVB
525 case ssa.OpMIPS64LoweredAtomicStore32:
526 as = mips.AMOVW
527 }
528 s.Prog(mips.ASYNC)
529 p := s.Prog(as)
530 p.From.Type = obj.TYPE_REG
531 p.From.Reg = v.Args[1].Reg()
532 p.To.Type = obj.TYPE_MEM
533 p.To.Reg = v.Args[0].Reg()
534 s.Prog(mips.ASYNC)
535 case ssa.OpMIPS64LoweredAtomicStorezero32, ssa.OpMIPS64LoweredAtomicStorezero64:
536 as := mips.AMOVV
537 if v.Op == ssa.OpMIPS64LoweredAtomicStorezero32 {
538 as = mips.AMOVW
539 }
540 s.Prog(mips.ASYNC)
541 p := s.Prog(as)
542 p.From.Type = obj.TYPE_REG
543 p.From.Reg = mips.REGZERO
544 p.To.Type = obj.TYPE_MEM
545 p.To.Reg = v.Args[0].Reg()
546 s.Prog(mips.ASYNC)
547 case ssa.OpMIPS64LoweredAtomicExchange32, ssa.OpMIPS64LoweredAtomicExchange64:
548
549
550
551
552
553
554 ll := mips.ALLV
555 sc := mips.ASCV
556 if v.Op == ssa.OpMIPS64LoweredAtomicExchange32 {
557 ll = mips.ALL
558 sc = mips.ASC
559 }
560 s.Prog(mips.ASYNC)
561 p := s.Prog(mips.AMOVV)
562 p.From.Type = obj.TYPE_REG
563 p.From.Reg = v.Args[1].Reg()
564 p.To.Type = obj.TYPE_REG
565 p.To.Reg = mips.REGTMP
566 p1 := s.Prog(ll)
567 p1.From.Type = obj.TYPE_MEM
568 p1.From.Reg = v.Args[0].Reg()
569 p1.To.Type = obj.TYPE_REG
570 p1.To.Reg = v.Reg0()
571 p2 := s.Prog(sc)
572 p2.From.Type = obj.TYPE_REG
573 p2.From.Reg = mips.REGTMP
574 p2.To.Type = obj.TYPE_MEM
575 p2.To.Reg = v.Args[0].Reg()
576 p3 := s.Prog(mips.ABEQ)
577 p3.From.Type = obj.TYPE_REG
578 p3.From.Reg = mips.REGTMP
579 p3.To.Type = obj.TYPE_BRANCH
580 p3.To.SetTarget(p)
581 s.Prog(mips.ASYNC)
582 case ssa.OpMIPS64LoweredAtomicAdd32, ssa.OpMIPS64LoweredAtomicAdd64:
583
584
585
586
587
588
589
590 ll := mips.ALLV
591 sc := mips.ASCV
592 if v.Op == ssa.OpMIPS64LoweredAtomicAdd32 {
593 ll = mips.ALL
594 sc = mips.ASC
595 }
596 s.Prog(mips.ASYNC)
597 p := s.Prog(ll)
598 p.From.Type = obj.TYPE_MEM
599 p.From.Reg = v.Args[0].Reg()
600 p.To.Type = obj.TYPE_REG
601 p.To.Reg = v.Reg0()
602 p1 := s.Prog(mips.AADDVU)
603 p1.From.Type = obj.TYPE_REG
604 p1.From.Reg = v.Args[1].Reg()
605 p1.Reg = v.Reg0()
606 p1.To.Type = obj.TYPE_REG
607 p1.To.Reg = mips.REGTMP
608 p2 := s.Prog(sc)
609 p2.From.Type = obj.TYPE_REG
610 p2.From.Reg = mips.REGTMP
611 p2.To.Type = obj.TYPE_MEM
612 p2.To.Reg = v.Args[0].Reg()
613 p3 := s.Prog(mips.ABEQ)
614 p3.From.Type = obj.TYPE_REG
615 p3.From.Reg = mips.REGTMP
616 p3.To.Type = obj.TYPE_BRANCH
617 p3.To.SetTarget(p)
618 s.Prog(mips.ASYNC)
619 p4 := s.Prog(mips.AADDVU)
620 p4.From.Type = obj.TYPE_REG
621 p4.From.Reg = v.Args[1].Reg()
622 p4.Reg = v.Reg0()
623 p4.To.Type = obj.TYPE_REG
624 p4.To.Reg = v.Reg0()
625 case ssa.OpMIPS64LoweredAtomicAddconst32, ssa.OpMIPS64LoweredAtomicAddconst64:
626
627
628
629
630
631
632
633 ll := mips.ALLV
634 sc := mips.ASCV
635 if v.Op == ssa.OpMIPS64LoweredAtomicAddconst32 {
636 ll = mips.ALL
637 sc = mips.ASC
638 }
639 s.Prog(mips.ASYNC)
640 p := s.Prog(ll)
641 p.From.Type = obj.TYPE_MEM
642 p.From.Reg = v.Args[0].Reg()
643 p.To.Type = obj.TYPE_REG
644 p.To.Reg = v.Reg0()
645 p1 := s.Prog(mips.AADDVU)
646 p1.From.Type = obj.TYPE_CONST
647 p1.From.Offset = v.AuxInt
648 p1.Reg = v.Reg0()
649 p1.To.Type = obj.TYPE_REG
650 p1.To.Reg = mips.REGTMP
651 p2 := s.Prog(sc)
652 p2.From.Type = obj.TYPE_REG
653 p2.From.Reg = mips.REGTMP
654 p2.To.Type = obj.TYPE_MEM
655 p2.To.Reg = v.Args[0].Reg()
656 p3 := s.Prog(mips.ABEQ)
657 p3.From.Type = obj.TYPE_REG
658 p3.From.Reg = mips.REGTMP
659 p3.To.Type = obj.TYPE_BRANCH
660 p3.To.SetTarget(p)
661 s.Prog(mips.ASYNC)
662 p4 := s.Prog(mips.AADDVU)
663 p4.From.Type = obj.TYPE_CONST
664 p4.From.Offset = v.AuxInt
665 p4.Reg = v.Reg0()
666 p4.To.Type = obj.TYPE_REG
667 p4.To.Reg = v.Reg0()
668 case ssa.OpMIPS64LoweredAtomicCas32, ssa.OpMIPS64LoweredAtomicCas64:
669
670
671
672
673
674
675
676
677 ll := mips.ALLV
678 sc := mips.ASCV
679 if v.Op == ssa.OpMIPS64LoweredAtomicCas32 {
680 ll = mips.ALL
681 sc = mips.ASC
682 }
683 p := s.Prog(mips.AMOVV)
684 p.From.Type = obj.TYPE_REG
685 p.From.Reg = mips.REGZERO
686 p.To.Type = obj.TYPE_REG
687 p.To.Reg = v.Reg0()
688 s.Prog(mips.ASYNC)
689 p1 := s.Prog(ll)
690 p1.From.Type = obj.TYPE_MEM
691 p1.From.Reg = v.Args[0].Reg()
692 p1.To.Type = obj.TYPE_REG
693 p1.To.Reg = mips.REGTMP
694 p2 := s.Prog(mips.ABNE)
695 p2.From.Type = obj.TYPE_REG
696 p2.From.Reg = v.Args[1].Reg()
697 p2.Reg = mips.REGTMP
698 p2.To.Type = obj.TYPE_BRANCH
699 p3 := s.Prog(mips.AMOVV)
700 p3.From.Type = obj.TYPE_REG
701 p3.From.Reg = v.Args[2].Reg()
702 p3.To.Type = obj.TYPE_REG
703 p3.To.Reg = v.Reg0()
704 p4 := s.Prog(sc)
705 p4.From.Type = obj.TYPE_REG
706 p4.From.Reg = v.Reg0()
707 p4.To.Type = obj.TYPE_MEM
708 p4.To.Reg = v.Args[0].Reg()
709 p5 := s.Prog(mips.ABEQ)
710 p5.From.Type = obj.TYPE_REG
711 p5.From.Reg = v.Reg0()
712 p5.To.Type = obj.TYPE_BRANCH
713 p5.To.SetTarget(p1)
714 p6 := s.Prog(mips.ASYNC)
715 p2.To.SetTarget(p6)
716 case ssa.OpMIPS64LoweredNilCheck:
717
718 p := s.Prog(mips.AMOVB)
719 p.From.Type = obj.TYPE_MEM
720 p.From.Reg = v.Args[0].Reg()
721 ssagen.AddAux(&p.From, v)
722 p.To.Type = obj.TYPE_REG
723 p.To.Reg = mips.REGTMP
724 if logopt.Enabled() {
725 logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
726 }
727 if base.Debug.Nil != 0 && v.Pos.Line() > 1 {
728 base.WarnfAt(v.Pos, "generated nil check")
729 }
730 case ssa.OpMIPS64FPFlagTrue,
731 ssa.OpMIPS64FPFlagFalse:
732
733
734
735 branch := mips.ABFPF
736 if v.Op == ssa.OpMIPS64FPFlagFalse {
737 branch = mips.ABFPT
738 }
739 p := s.Prog(mips.AMOVV)
740 p.From.Type = obj.TYPE_REG
741 p.From.Reg = mips.REGZERO
742 p.To.Type = obj.TYPE_REG
743 p.To.Reg = v.Reg()
744 p2 := s.Prog(branch)
745 p2.To.Type = obj.TYPE_BRANCH
746 p3 := s.Prog(mips.AMOVV)
747 p3.From.Type = obj.TYPE_CONST
748 p3.From.Offset = 1
749 p3.To.Type = obj.TYPE_REG
750 p3.To.Reg = v.Reg()
751 p4 := s.Prog(obj.ANOP)
752 p2.To.SetTarget(p4)
753 case ssa.OpMIPS64LoweredGetClosurePtr:
754
755 ssagen.CheckLoweredGetClosurePtr(v)
756 case ssa.OpMIPS64LoweredGetCallerSP:
757
758 p := s.Prog(mips.AMOVV)
759 p.From.Type = obj.TYPE_ADDR
760 p.From.Offset = -base.Ctxt.FixedFrameSize()
761 p.From.Name = obj.NAME_PARAM
762 p.To.Type = obj.TYPE_REG
763 p.To.Reg = v.Reg()
764 case ssa.OpMIPS64LoweredGetCallerPC:
765 p := s.Prog(obj.AGETCALLERPC)
766 p.To.Type = obj.TYPE_REG
767 p.To.Reg = v.Reg()
768 case ssa.OpClobber, ssa.OpClobberReg:
769
770 default:
771 v.Fatalf("genValue not implemented: %s", v.LongString())
772 }
773 }
774
775 var blockJump = map[ssa.BlockKind]struct {
776 asm, invasm obj.As
777 }{
778 ssa.BlockMIPS64EQ: {mips.ABEQ, mips.ABNE},
779 ssa.BlockMIPS64NE: {mips.ABNE, mips.ABEQ},
780 ssa.BlockMIPS64LTZ: {mips.ABLTZ, mips.ABGEZ},
781 ssa.BlockMIPS64GEZ: {mips.ABGEZ, mips.ABLTZ},
782 ssa.BlockMIPS64LEZ: {mips.ABLEZ, mips.ABGTZ},
783 ssa.BlockMIPS64GTZ: {mips.ABGTZ, mips.ABLEZ},
784 ssa.BlockMIPS64FPT: {mips.ABFPT, mips.ABFPF},
785 ssa.BlockMIPS64FPF: {mips.ABFPF, mips.ABFPT},
786 }
787
788 func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
789 switch b.Kind {
790 case ssa.BlockPlain:
791 if b.Succs[0].Block() != next {
792 p := s.Prog(obj.AJMP)
793 p.To.Type = obj.TYPE_BRANCH
794 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
795 }
796 case ssa.BlockDefer:
797
798
799
800 p := s.Prog(mips.ABNE)
801 p.From.Type = obj.TYPE_REG
802 p.From.Reg = mips.REGZERO
803 p.Reg = mips.REG_R1
804 p.To.Type = obj.TYPE_BRANCH
805 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[1].Block()})
806 if b.Succs[0].Block() != next {
807 p := s.Prog(obj.AJMP)
808 p.To.Type = obj.TYPE_BRANCH
809 s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
810 }
811 case ssa.BlockExit:
812 case ssa.BlockRet:
813 s.Prog(obj.ARET)
814 case ssa.BlockRetJmp:
815 p := s.Prog(obj.ARET)
816 p.To.Type = obj.TYPE_MEM
817 p.To.Name = obj.NAME_EXTERN
818 p.To.Sym = b.Aux.(*obj.LSym)
819 case ssa.BlockMIPS64EQ, ssa.BlockMIPS64NE,
820 ssa.BlockMIPS64LTZ, ssa.BlockMIPS64GEZ,
821 ssa.BlockMIPS64LEZ, ssa.BlockMIPS64GTZ,
822 ssa.BlockMIPS64FPT, ssa.BlockMIPS64FPF:
823 jmp := blockJump[b.Kind]
824 var p *obj.Prog
825 switch next {
826 case b.Succs[0].Block():
827 p = s.Br(jmp.invasm, b.Succs[1].Block())
828 case b.Succs[1].Block():
829 p = s.Br(jmp.asm, b.Succs[0].Block())
830 default:
831 if b.Likely != ssa.BranchUnlikely {
832 p = s.Br(jmp.asm, b.Succs[0].Block())
833 s.Br(obj.AJMP, b.Succs[1].Block())
834 } else {
835 p = s.Br(jmp.invasm, b.Succs[1].Block())
836 s.Br(obj.AJMP, b.Succs[0].Block())
837 }
838 }
839 if !b.Controls[0].Type.IsFlags() {
840 p.From.Type = obj.TYPE_REG
841 p.From.Reg = b.Controls[0].Reg()
842 }
843 default:
844 b.Fatalf("branch not implemented: %s", b.LongString())
845 }
846 }
847
View as plain text