Black Lives Matter. Support the Equal Justice Initiative.

Source file src/cmd/asm/internal/arch/arm64.go

Documentation: cmd/asm/internal/arch

     1  // Copyright 2015 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // This file encapsulates some of the odd characteristics of the ARM64
     6  // instruction set, to minimize its interaction with the core of the
     7  // assembler.
     8  
     9  package arch
    10  
    11  import (
    12  	"cmd/internal/obj"
    13  	"cmd/internal/obj/arm64"
    14  	"errors"
    15  )
    16  
    17  var arm64LS = map[string]uint8{
    18  	"P": arm64.C_XPOST,
    19  	"W": arm64.C_XPRE,
    20  }
    21  
    22  var arm64Jump = map[string]bool{
    23  	"B":     true,
    24  	"BL":    true,
    25  	"BEQ":   true,
    26  	"BNE":   true,
    27  	"BCS":   true,
    28  	"BHS":   true,
    29  	"BCC":   true,
    30  	"BLO":   true,
    31  	"BMI":   true,
    32  	"BPL":   true,
    33  	"BVS":   true,
    34  	"BVC":   true,
    35  	"BHI":   true,
    36  	"BLS":   true,
    37  	"BGE":   true,
    38  	"BLT":   true,
    39  	"BGT":   true,
    40  	"BLE":   true,
    41  	"CALL":  true,
    42  	"CBZ":   true,
    43  	"CBZW":  true,
    44  	"CBNZ":  true,
    45  	"CBNZW": true,
    46  	"JMP":   true,
    47  	"TBNZ":  true,
    48  	"TBZ":   true,
    49  }
    50  
    51  func jumpArm64(word string) bool {
    52  	return arm64Jump[word]
    53  }
    54  
    55  // IsARM64CMP reports whether the op (as defined by an arm.A* constant) is
    56  // one of the comparison instructions that require special handling.
    57  func IsARM64CMP(op obj.As) bool {
    58  	switch op {
    59  	case arm64.ACMN, arm64.ACMP, arm64.ATST,
    60  		arm64.ACMNW, arm64.ACMPW, arm64.ATSTW,
    61  		arm64.AFCMPS, arm64.AFCMPD,
    62  		arm64.AFCMPES, arm64.AFCMPED:
    63  		return true
    64  	}
    65  	return false
    66  }
    67  
    68  // IsARM64STLXR reports whether the op (as defined by an arm64.A*
    69  // constant) is one of the STLXR-like instructions that require special
    70  // handling.
    71  func IsARM64STLXR(op obj.As) bool {
    72  	switch op {
    73  	case arm64.ASTLXRB, arm64.ASTLXRH, arm64.ASTLXRW, arm64.ASTLXR,
    74  		arm64.ASTXRB, arm64.ASTXRH, arm64.ASTXRW, arm64.ASTXR,
    75  		arm64.ASTXP, arm64.ASTXPW, arm64.ASTLXP, arm64.ASTLXPW:
    76  		return true
    77  	}
    78  	// LDADDx/SWPx/CASx atomic instructions
    79  	if arm64.IsAtomicInstruction(op) {
    80  		return true
    81  	}
    82  	return false
    83  }
    84  
    85  // IsARM64TBL reports whether the op (as defined by an arm64.A*
    86  // constant) is one of the TBL-like instructions and one of its
    87  // inputs does not fit into prog.Reg, so require special handling.
    88  func IsARM64TBL(op obj.As) bool {
    89  	switch op {
    90  	case arm64.AVTBL, arm64.AVMOVQ:
    91  		return true
    92  	}
    93  	return false
    94  }
    95  
    96  // IsARM64CASP reports whether the op (as defined by an arm64.A*
    97  // constant) is one of the CASP-like instructions, and its 2nd
    98  // destination is a register pair that require special handling.
    99  func IsARM64CASP(op obj.As) bool {
   100  	switch op {
   101  	case arm64.ACASPD, arm64.ACASPW:
   102  		return true
   103  	}
   104  	return false
   105  }
   106  
   107  // ARM64Suffix handles the special suffix for the ARM64.
   108  // It returns a boolean to indicate success; failure means
   109  // cond was unrecognized.
   110  func ARM64Suffix(prog *obj.Prog, cond string) bool {
   111  	if cond == "" {
   112  		return true
   113  	}
   114  	bits, ok := parseARM64Suffix(cond)
   115  	if !ok {
   116  		return false
   117  	}
   118  	prog.Scond = bits
   119  	return true
   120  }
   121  
   122  // parseARM64Suffix parses the suffix attached to an ARM64 instruction.
   123  // The input is a single string consisting of period-separated condition
   124  // codes, such as ".P.W". An initial period is ignored.
   125  func parseARM64Suffix(cond string) (uint8, bool) {
   126  	if cond == "" {
   127  		return 0, true
   128  	}
   129  	return parseARMCondition(cond, arm64LS, nil)
   130  }
   131  
   132  func arm64RegisterNumber(name string, n int16) (int16, bool) {
   133  	switch name {
   134  	case "F":
   135  		if 0 <= n && n <= 31 {
   136  			return arm64.REG_F0 + n, true
   137  		}
   138  	case "R":
   139  		if 0 <= n && n <= 30 { // not 31
   140  			return arm64.REG_R0 + n, true
   141  		}
   142  	case "V":
   143  		if 0 <= n && n <= 31 {
   144  			return arm64.REG_V0 + n, true
   145  		}
   146  	}
   147  	return 0, false
   148  }
   149  
   150  // ARM64RegisterShift constructs an ARM64 register with shift operation.
   151  func ARM64RegisterShift(reg, op, count int16) (int64, error) {
   152  	// the base register of shift operations must be general register.
   153  	if reg > arm64.REG_R31 || reg < arm64.REG_R0 {
   154  		return 0, errors.New("invalid register for shift operation")
   155  	}
   156  	return int64(reg&31)<<16 | int64(op)<<22 | int64(uint16(count)), nil
   157  }
   158  
   159  // ARM64RegisterExtension constructs an ARM64 register with extension or arrangement.
   160  func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error {
   161  	Rnum := (reg & 31) + int16(num<<5)
   162  	if isAmount {
   163  		if num < 0 || num > 7 {
   164  			return errors.New("index shift amount is out of range")
   165  		}
   166  	}
   167  	if reg <= arm64.REG_R31 && reg >= arm64.REG_R0 {
   168  		switch ext {
   169  		case "UXTB":
   170  			if !isAmount {
   171  				return errors.New("invalid register extension")
   172  			}
   173  			if a.Type == obj.TYPE_MEM {
   174  				return errors.New("invalid shift for the register offset addressing mode")
   175  			}
   176  			a.Reg = arm64.REG_UXTB + Rnum
   177  		case "UXTH":
   178  			if !isAmount {
   179  				return errors.New("invalid register extension")
   180  			}
   181  			if a.Type == obj.TYPE_MEM {
   182  				return errors.New("invalid shift for the register offset addressing mode")
   183  			}
   184  			a.Reg = arm64.REG_UXTH + Rnum
   185  		case "UXTW":
   186  			if !isAmount {
   187  				return errors.New("invalid register extension")
   188  			}
   189  			// effective address of memory is a base register value and an offset register value.
   190  			if a.Type == obj.TYPE_MEM {
   191  				a.Index = arm64.REG_UXTW + Rnum
   192  			} else {
   193  				a.Reg = arm64.REG_UXTW + Rnum
   194  			}
   195  		case "UXTX":
   196  			if !isAmount {
   197  				return errors.New("invalid register extension")
   198  			}
   199  			if a.Type == obj.TYPE_MEM {
   200  				return errors.New("invalid shift for the register offset addressing mode")
   201  			}
   202  			a.Reg = arm64.REG_UXTX + Rnum
   203  		case "SXTB":
   204  			if !isAmount {
   205  				return errors.New("invalid register extension")
   206  			}
   207  			a.Reg = arm64.REG_SXTB + Rnum
   208  		case "SXTH":
   209  			if !isAmount {
   210  				return errors.New("invalid register extension")
   211  			}
   212  			if a.Type == obj.TYPE_MEM {
   213  				return errors.New("invalid shift for the register offset addressing mode")
   214  			}
   215  			a.Reg = arm64.REG_SXTH + Rnum
   216  		case "SXTW":
   217  			if !isAmount {
   218  				return errors.New("invalid register extension")
   219  			}
   220  			if a.Type == obj.TYPE_MEM {
   221  				a.Index = arm64.REG_SXTW + Rnum
   222  			} else {
   223  				a.Reg = arm64.REG_SXTW + Rnum
   224  			}
   225  		case "SXTX":
   226  			if !isAmount {
   227  				return errors.New("invalid register extension")
   228  			}
   229  			if a.Type == obj.TYPE_MEM {
   230  				a.Index = arm64.REG_SXTX + Rnum
   231  			} else {
   232  				a.Reg = arm64.REG_SXTX + Rnum
   233  			}
   234  		case "LSL":
   235  			if !isAmount {
   236  				return errors.New("invalid register extension")
   237  			}
   238  			a.Index = arm64.REG_LSL + Rnum
   239  		default:
   240  			return errors.New("unsupported general register extension type: " + ext)
   241  
   242  		}
   243  	} else if reg <= arm64.REG_V31 && reg >= arm64.REG_V0 {
   244  		switch ext {
   245  		case "B8":
   246  			if isIndex {
   247  				return errors.New("invalid register extension")
   248  			}
   249  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5)
   250  		case "B16":
   251  			if isIndex {
   252  				return errors.New("invalid register extension")
   253  			}
   254  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5)
   255  		case "H4":
   256  			if isIndex {
   257  				return errors.New("invalid register extension")
   258  			}
   259  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5)
   260  		case "H8":
   261  			if isIndex {
   262  				return errors.New("invalid register extension")
   263  			}
   264  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5)
   265  		case "S2":
   266  			if isIndex {
   267  				return errors.New("invalid register extension")
   268  			}
   269  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5)
   270  		case "S4":
   271  			if isIndex {
   272  				return errors.New("invalid register extension")
   273  			}
   274  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5)
   275  		case "D1":
   276  			if isIndex {
   277  				return errors.New("invalid register extension")
   278  			}
   279  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5)
   280  		case "D2":
   281  			if isIndex {
   282  				return errors.New("invalid register extension")
   283  			}
   284  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5)
   285  		case "Q1":
   286  			if isIndex {
   287  				return errors.New("invalid register extension")
   288  			}
   289  			a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5)
   290  		case "B":
   291  			if !isIndex {
   292  				return nil
   293  			}
   294  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5)
   295  			a.Index = num
   296  		case "H":
   297  			if !isIndex {
   298  				return nil
   299  			}
   300  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5)
   301  			a.Index = num
   302  		case "S":
   303  			if !isIndex {
   304  				return nil
   305  			}
   306  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5)
   307  			a.Index = num
   308  		case "D":
   309  			if !isIndex {
   310  				return nil
   311  			}
   312  			a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5)
   313  			a.Index = num
   314  		default:
   315  			return errors.New("unsupported simd register extension type: " + ext)
   316  		}
   317  	} else {
   318  		return errors.New("invalid register and extension combination")
   319  	}
   320  	return nil
   321  }
   322  
   323  // ARM64RegisterArrangement constructs an ARM64 vector register arrangement.
   324  func ARM64RegisterArrangement(reg int16, name, arng string) (int64, error) {
   325  	var curQ, curSize uint16
   326  	if name[0] != 'V' {
   327  		return 0, errors.New("expect V0 through V31; found: " + name)
   328  	}
   329  	if reg < 0 {
   330  		return 0, errors.New("invalid register number: " + name)
   331  	}
   332  	switch arng {
   333  	case "B8":
   334  		curSize = 0
   335  		curQ = 0
   336  	case "B16":
   337  		curSize = 0
   338  		curQ = 1
   339  	case "H4":
   340  		curSize = 1
   341  		curQ = 0
   342  	case "H8":
   343  		curSize = 1
   344  		curQ = 1
   345  	case "S2":
   346  		curSize = 2
   347  		curQ = 0
   348  	case "S4":
   349  		curSize = 2
   350  		curQ = 1
   351  	case "D1":
   352  		curSize = 3
   353  		curQ = 0
   354  	case "D2":
   355  		curSize = 3
   356  		curQ = 1
   357  	default:
   358  		return 0, errors.New("invalid arrangement in ARM64 register list")
   359  	}
   360  	return (int64(curQ) & 1 << 30) | (int64(curSize&3) << 10), nil
   361  }
   362  
   363  // ARM64RegisterListOffset generates offset encoding according to AArch64 specification.
   364  func ARM64RegisterListOffset(firstReg, regCnt int, arrangement int64) (int64, error) {
   365  	offset := int64(firstReg)
   366  	switch regCnt {
   367  	case 1:
   368  		offset |= 0x7 << 12
   369  	case 2:
   370  		offset |= 0xa << 12
   371  	case 3:
   372  		offset |= 0x6 << 12
   373  	case 4:
   374  		offset |= 0x2 << 12
   375  	default:
   376  		return 0, errors.New("invalid register numbers in ARM64 register list")
   377  	}
   378  	offset |= arrangement
   379  	// arm64 uses the 60th bit to differentiate from other archs
   380  	// For more details, refer to: obj/arm64/list7.go
   381  	offset |= 1 << 60
   382  	return offset, nil
   383  }
   384  

View as plain text