| # Copyright (C) 2015-2017 Apple Inc. All rights reserved. |
| # |
| # Redistribution and use in source and binary forms, with or without |
| # modification, are permitted provided that the following conditions |
| # are met: |
| # 1. Redistributions of source code must retain the above copyright |
| # notice, this list of conditions and the following disclaimer. |
| # 2. Redistributions in binary form must reproduce the above copyright |
| # notice, this list of conditions and the following disclaimer in the |
| # documentation and/or other materials provided with the distribution. |
| # |
| # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' |
| # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, |
| # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS |
| # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
| # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
| # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
| # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
| # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
| # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF |
| # THE POSSIBILITY OF SUCH DAMAGE. |
| |
| # Syllabus: |
| # |
| # Examples of some roles, types, and widths: |
| # U:G:32 => use of the low 32 bits of a general-purpose register or value |
| # D:G:32 => def of the low 32 bits of a general-purpose register or value |
| # UD:G:32 => use and def of the low 32 bits of a general-purpose register or value |
| # U:G:64 => use of the low 64 bits of a general-purpose register or value |
| # ZD:G:32 => def of all bits of a general-purpose register, where all but the low 32 bits are guaranteed to be zeroed. |
| # UA:G:Ptr => UseAddr (see comment in Arg.h) |
| # U:F:32 => use of a float register or value |
| # U:F:64 => use of a double register or value |
| # D:F:32 => def of a float register or value |
| # UD:F:32 => use and def of a float register or value |
| # S:F:32 => scratch float register. |
| # |
| # Argument kinds: |
| # Tmp => temporary or register |
| # Imm => 32-bit immediate int |
| # BigImm => TrustedImm64 |
| # Addr => address as temporary/register+offset |
| # Index => BaseIndex address |
| # Abs => AbsoluteAddress |
| # |
| # The parser views these things as keywords, and understands that they fall into two distinct classes |
| # of things. So, although this file uses a particular indentation style, none of the whitespace or |
| # even newlines are meaningful to the parser. For example, you could write: |
| # |
| # Foo42 U:G:32, UD:F:32 Imm, Tmp Addr, Tmp |
| # |
| # And the parser would know that this is the same as: |
| # |
| # Foo42 U:G:32, UD:F:32 |
| # Imm, Tmp |
| # Addr, Tmp |
| # |
| # I.e. a two-form instruction that uses a GPR or an int immediate and uses+defs a float register. |
| # |
| # Any opcode or opcode form can be preceded with an architecture list, which restricts the opcode to the |
| # union of those architectures. For example, if this is the only overload of the opcode, then it makes the |
| # opcode only available on x86_64: |
| # |
| # x86_64: Fuzz UD:G:64, D:G:64 |
| # Tmp, Tmp |
| # Tmp, Addr |
| # |
| # But this only restricts the two-operand form, the other form is allowed on all architectures: |
| # |
| # x86_64: Fuzz UD:G:64, D:G:64 |
| # Tmp, Tmp |
| # Tmp, Addr |
| # Fuzz UD:G:Ptr, D:G:Ptr, U:F:Ptr |
| # Tmp, Tmp, Tmp |
| # Tmp, Addr, Tmp |
| # |
| # And you can also restrict individual forms: |
| # |
| # Thingy UD:G:32, D:G:32 |
| # Tmp, Tmp |
| # arm64: Tmp, Addr |
| # |
| # Additionally, you can have an intersection between the architectures of the opcode overload and the |
| # form. In this example, the version that takes an address is only available on armv7 while the other |
| # versions are available on armv7 or x86_64: |
| # |
| # x86_64 armv7: Buzz U:G:32, UD:F:32 |
| # Tmp, Tmp |
| # Imm, Tmp |
| # armv7: Addr, Tmp |
| # |
| # Finally, you can specify architectures using helpful architecture groups. Here are all of the |
| # architecture keywords that we support: |
| # |
| # x86: means x86-32 or x86-64. |
| # x86_32: means just x86-32. |
| # x86_64: means just x86-64. |
| # arm: means armv7 or arm64. |
| # armv7: means just armv7. |
| # arm64: means just arm64. |
| # 32: means x86-32 or armv7. |
| # 64: means x86-64 or arm64. |
| |
| # Note that the opcodes here have a leading capital (Add32) but must correspond to MacroAssembler |
| # API that has a leading lower-case (add32). |
| |
| Nop |
| |
| Add32 U:G:32, U:G:32, ZD:G:32 |
| Imm, Tmp, Tmp |
| Tmp, Tmp, Tmp |
| |
| Add32 U:G:32, UZD:G:32 |
| Tmp, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| Imm, Tmp |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| |
| x86: Add8 U:G:8, UD:G:8 |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: Add16 U:G:16, UD:G:16 |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| 64: Add64 U:G:64, UD:G:64 |
| Tmp, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| Imm, Tmp |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| |
| 64: Add64 U:G:64, U:G:64, D:G:64 |
| Imm, Tmp, Tmp |
| Tmp, Tmp, Tmp |
| |
| AddDouble U:F:64, U:F:64, D:F:64 |
| Tmp, Tmp, Tmp |
| x86: Addr, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Index, Tmp, Tmp |
| |
| x86: AddDouble U:F:64, UD:F:64 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| AddFloat U:F:32, U:F:32, D:F:32 |
| Tmp, Tmp, Tmp |
| x86: Addr, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Index, Tmp, Tmp |
| |
| x86: AddFloat U:F:32, UD:F:32 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| Sub32 U:G:32, UZD:G:32 |
| Tmp, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| Imm, Tmp |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| |
| arm64: Sub32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| 64: Sub64 U:G:64, UD:G:64 |
| Tmp, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| Imm, Tmp |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| |
| arm64: Sub64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| SubDouble U:F:64, U:F:64, D:F:64 |
| arm64: Tmp, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Tmp, Index, Tmp |
| |
| x86: SubDouble U:F:64, UD:F:64 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| SubFloat U:F:32, U:F:32, D:F:32 |
| arm64: Tmp, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Tmp, Index, Tmp |
| |
| x86: SubFloat U:F:32, UD:F:32 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| Neg32 UZD:G:32 |
| Tmp |
| x86: Addr |
| x86: Index |
| |
| 64: Neg64 UD:G:64 |
| Tmp |
| x86: Addr |
| x86: Index |
| |
| arm64: NegateDouble U:F:64, D:F:64 |
| Tmp, Tmp |
| |
| arm64: NegateFloat U:F:32, D:F:32 |
| Tmp, Tmp |
| |
| Mul32 U:G:32, UZD:G:32 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| Mul32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| x86: Addr, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Imm, Tmp, Tmp |
| |
| 64: Mul64 U:G:64, UD:G:64 |
| Tmp, Tmp |
| |
| Mul64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: MultiplyAdd32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplyAdd64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplyAddSignExtend32 U:G:32, U:G:32, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplyAddZeroExtend32 U:G:32, U:G:32, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplySub32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplySub64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplySubSignExtend32 U:G:32, U:G:32, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplySubZeroExtend32 U:G:32, U:G:32, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp, Tmp |
| |
| arm64: MultiplyNeg32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| |
| arm64: MultiplyNeg64 U:G:64, U:G:64, ZD:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: MultiplyNegSignExtend32 U:G:32, U:G:32, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: MultiplyNegZeroExtend32 U:G:32, U:G:32, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: MultiplySignExtend32 U:G:32, U:G:32, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: MultiplyZeroExtend32 U:G:32, U:G:32, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: Div32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| |
| arm64: UDiv32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| |
| arm64: Div64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: UDiv64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| MulDouble U:F:64, U:F:64, D:F:64 |
| Tmp, Tmp, Tmp |
| x86: Addr, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Index, Tmp, Tmp |
| |
| x86: MulDouble U:F:64, UD:F:64 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| MulFloat U:F:32, U:F:32, D:F:32 |
| Tmp, Tmp, Tmp |
| x86: Addr, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Index, Tmp, Tmp |
| |
| x86: MulFloat U:F:32, UD:F:32 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| arm64: DivDouble U:F:64, U:F:32, D:F:64 |
| Tmp, Tmp, Tmp |
| |
| x86: DivDouble U:F:64, UD:F:64 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| arm64: DivFloat U:F:32, U:F:32, D:F:32 |
| Tmp, Tmp, Tmp |
| |
| x86: DivFloat U:F:32, UD:F:32 |
| Tmp, Tmp |
| Addr, Tmp |
| |
| x86: X86ConvertToDoubleWord32 U:G:32, ZD:G:32 |
| Tmp*, Tmp* |
| |
| x86_64: X86ConvertToQuadWord64 U:G:64, D:G:64 |
| Tmp*, Tmp* |
| |
| x86: X86Div32 UZD:G:32, UZD:G:32, U:G:32 |
| Tmp*, Tmp*, Tmp |
| |
| x86: X86UDiv32 UZD:G:32, UZD:G:32, U:G:32 |
| Tmp*, Tmp*, Tmp |
| |
| x86_64: X86Div64 UZD:G:64, UZD:G:64, U:G:64 |
| Tmp*, Tmp*, Tmp |
| |
| x86_64: X86UDiv64 UZD:G:64, UZD:G:64, U:G:64 |
| Tmp*, Tmp*, Tmp |
| |
| # If we add other things like Lea that are UA, we may need to lower |
| # them on arm64 similarly to how we do for Lea. In lowerStackArgs, |
| # we lower Lea to add on arm64. |
| Lea32 UA:G:32, D:G:32 |
| Addr, Tmp |
| x86: Index, Tmp as x86Lea32 |
| |
| Lea64 UA:G:64, D:G:64 |
| Addr, Tmp |
| x86: Index, Tmp as x86Lea64 |
| |
| And32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| arm64: BitImm, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Addr, Tmp, Tmp |
| |
| And32 U:G:32, UZD:G:32 |
| Tmp, Tmp |
| x86: Imm, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| |
| 64: And64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| arm64: BitImm64, Tmp, Tmp |
| |
| x86_64: And64 U:G:64, UD:G:64 |
| Tmp, Tmp |
| Imm, Tmp |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| Addr, Tmp |
| Index, Tmp |
| |
| AndDouble U:F:64, U:F:64, D:F:64 |
| Tmp, Tmp, Tmp |
| |
| x86: AndDouble U:F:64, UD:F:64 |
| Tmp, Tmp |
| |
| AndFloat U:F:32, U:F:32, D:F:32 |
| Tmp, Tmp, Tmp |
| |
| x86: AndFloat U:F:32, UD:F:32 |
| Tmp, Tmp |
| |
| OrDouble U:F:64, U:F:64, D:F:64 |
| Tmp, Tmp, Tmp |
| |
| x86: OrDouble U:F:64, UD:F:64 |
| Tmp, Tmp |
| |
| OrFloat U:F:32, U:F:32, D:F:32 |
| Tmp, Tmp, Tmp |
| |
| x86: OrFloat U:F:32, UD:F:32 |
| Tmp, Tmp |
| |
| x86: XorDouble U:F:64, U:F:64, D:F:64 |
| Tmp, Tmp, Tmp |
| |
| x86: XorDouble U:F:64, UD:F:64 |
| Tmp, Tmp |
| |
| x86: XorFloat U:F:32, U:F:32, D:F:32 |
| Tmp, Tmp, Tmp |
| |
| x86: XorFloat U:F:32, UD:F:32 |
| Tmp, Tmp |
| |
| arm64: Lshift32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86:Lshift32 U:G:32, UZD:G:32 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| arm64: Lshift64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86_64: Lshift64 U:G:64, UD:G:64 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| arm64: Rshift32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86: Rshift32 U:G:32, UZD:G:32 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| arm64: Rshift64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86_64: Rshift64 U:G:64, UD:G:64 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| arm64: Urshift32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86: Urshift32 U:G:32, UZD:G:32 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| arm64: Urshift64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86_64: Urshift64 U:G:64, UD:G:64 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| x86_64: RotateRight32 U:G:32, UZD:G:32 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| arm64: RotateRight32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86_64: RotateRight64 U:G:64, UD:G:64 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| arm64: RotateRight64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| Tmp, Imm, Tmp |
| |
| x86_64: RotateLeft32 U:G:32, UZD:G:32 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| x86_64: RotateLeft64 U:G:64, UD:G:64 |
| Tmp*, Tmp |
| Imm, Tmp |
| |
| Or32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| arm64: BitImm, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Addr, Tmp, Tmp |
| |
| Or32 U:G:32, UZD:G:32 |
| Tmp, Tmp |
| x86: Imm, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| |
| 64: Or64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| arm64: BitImm64, Tmp, Tmp |
| |
| 64: Or64 U:G:64, UD:G:64 |
| Tmp, Tmp |
| x86: Imm, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| |
| Xor32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| arm64: BitImm, Tmp, Tmp |
| x86: Tmp, Addr, Tmp |
| x86: Addr, Tmp, Tmp |
| |
| Xor32 U:G:32, UZD:G:32 |
| Tmp, Tmp |
| x86: Imm, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| |
| 64: Xor64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| arm64: BitImm64, Tmp, Tmp |
| |
| 64: Xor64 U:G:64, UD:G:64 |
| Tmp, Tmp |
| x86: Tmp, Addr |
| x86: Tmp, Index |
| x86: Addr, Tmp |
| x86: Index, Tmp |
| x86: Imm, Addr |
| x86: Imm, Index |
| x86: Imm, Tmp |
| |
| arm64: Not32 U:G:32, ZD:G:32 |
| Tmp, Tmp |
| |
| x86: Not32 UZD:G:32 |
| Tmp |
| Addr |
| Index |
| |
| arm64: Not64 U:G:64, D:G:64 |
| Tmp, Tmp |
| |
| x86_64: Not64 UD:G:64 |
| Tmp |
| Addr |
| Index |
| |
| arm64: AbsDouble U:F:64, D:F:64 |
| Tmp, Tmp |
| |
| arm64: AbsFloat U:F:32, D:F:32 |
| Tmp, Tmp |
| |
| CeilDouble U:F:64, D:F:64 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| CeilFloat U:F:32, D:F:32 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| FloorDouble U:F:64, D:F:64 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| FloorFloat U:F:32, D:F:32 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| SqrtDouble U:F:64, D:F:64 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| SqrtFloat U:F:32, D:F:32 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| ConvertInt32ToDouble U:G:32, D:F:64 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| 64: ConvertInt64ToDouble U:G:64, D:F:64 |
| Tmp, Tmp |
| x86_64: Addr, Tmp |
| |
| ConvertInt32ToFloat U:G:32, D:F:32 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| 64: ConvertInt64ToFloat U:G:64, D:F:32 |
| Tmp, Tmp |
| x86_64: Addr, Tmp |
| |
| CountLeadingZeros32 U:G:32, ZD:G:32 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| 64: CountLeadingZeros64 U:G:64, D:G:64 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| ConvertDoubleToFloat U:F:64, D:F:32 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| ConvertFloatToDouble U:F:32, D:F:64 |
| Tmp, Tmp |
| x86: Addr, Tmp |
| |
| # Note that Move operates over the full register size, which is either 32-bit or 64-bit depending on |
| # the platform. I'm not entirely sure that this is a good thing; it might be better to just have a |
| # Move64 instruction. OTOH, our MacroAssemblers already have this notion of "move()" that basically |
| # means movePtr. |
| Move U:G:Ptr, D:G:Ptr |
| Tmp, Tmp |
| Imm, Tmp as signExtend32ToPtr |
| BigImm, Tmp |
| Addr, Tmp as loadPtr # This means that "Move Addr, Tmp" is code-generated as "load" not "move". |
| Index, Tmp as loadPtr |
| Tmp, Addr as storePtr |
| Tmp, Index as storePtr |
| x86: Imm, Addr as storePtr |
| |
| # This is for moving between spill slots. |
| Move U:G:Ptr, D:G:Ptr, S:G:Ptr |
| Addr, Addr, Tmp |
| |
| x86: Swap32 UD:G:32, UD:G:32 |
| Tmp, Tmp |
| Tmp, Addr |
| |
| x86_64: Swap64 UD:G:64, UD:G:64 |
| Tmp, Tmp |
| Tmp, Addr |
| |
| arm64: MoveWithIncrement64 UD:G:64, D:G:64 |
| PreIndex, Tmp as load64 |
| PostIndex, Tmp as load64 |
| Tmp, PreIndex as store64 |
| Tmp, PostIndex as store64 |
| |
| Move32 U:G:32, ZD:G:32 |
| Tmp, Tmp as zeroExtend32ToWord |
| Addr, Tmp as load32 |
| Index, Tmp as load32 |
| Tmp, Addr as store32 |
| Tmp, Index as store32 |
| x86: Imm, Tmp as zeroExtend32ToWord |
| x86: Imm, Addr as store32 |
| x86: Imm, Index as store32 |
| |
| arm64: MoveWithIncrement32 UD:G:32, ZD:G:32 |
| PreIndex, Tmp as load32 |
| PostIndex, Tmp as load32 |
| Tmp, PreIndex as store32 |
| Tmp, PostIndex as store32 |
| |
| # This is for moving between spill slots. |
| Move32 U:G:32, ZD:G:32, S:G:32 |
| Addr, Addr, Tmp |
| |
| arm64: Store32 U:G:32, ZD:G:32 |
| ZeroReg, Addr |
| ZeroReg, Index |
| |
| arm64: Store64 U:G:64, D:G:64 |
| ZeroReg, Addr |
| ZeroReg, Index |
| |
| SignExtend32ToPtr U:G:32, D:G:Ptr |
| Tmp, Tmp |
| |
| ZeroExtend8To32 U:G:8, ZD:G:32 |
| Tmp, Tmp |
| x86: Addr, Tmp as load8 |
| x86: Index, Tmp as load8 |
| |
| SignExtend8To32 U:G:8, ZD:G:32 |
| Tmp, Tmp |
| x86: Addr, Tmp as load8SignedExtendTo32 |
| x86: Index, Tmp as load8SignedExtendTo32 |
| |
| ZeroExtend16To32 U:G:16, ZD:G:32 |
| Tmp, Tmp |
| x86: Addr, Tmp as load16 |
| x86: Index, Tmp as load16 |
| |
| SignExtend16To32 U:G:16, ZD:G:32 |
| Tmp, Tmp |
| x86: Addr, Tmp as load16SignedExtendTo32 |
| x86: Index, Tmp as load16SignedExtendTo32 |
| |
| MoveFloat U:F:32, D:F:32 |
| Tmp, Tmp as moveDouble |
| Addr, Tmp as loadFloat |
| Index, Tmp as loadFloat |
| Tmp, Addr as storeFloat |
| Tmp, Index as storeFloat |
| |
| MoveFloat U:F:32, D:F:32, S:F:32 |
| Addr, Addr, Tmp |
| |
| MoveDouble U:F:64, D:F:64 |
| Tmp, Tmp |
| Addr, Tmp as loadDouble |
| Index, Tmp as loadDouble |
| Tmp, Addr as storeDouble |
| Tmp, Index as storeDouble |
| |
| MoveDouble U:F:64, D:F:64, S:F:64 |
| Addr, Addr, Tmp |
| |
| MoveZeroToDouble D:F:64 |
| Tmp |
| |
| 64: Move64ToDouble U:G:64, D:F:64 |
| Tmp, Tmp |
| x86: Addr, Tmp as loadDouble |
| Index, Tmp as loadDouble |
| |
| Move32ToFloat U:G:32, D:F:32 |
| Tmp, Tmp |
| x86: Addr, Tmp as loadFloat |
| Index, Tmp as loadFloat |
| |
| 64: MoveDoubleTo64 U:F:64, D:G:64 |
| Tmp, Tmp |
| Addr, Tmp as load64 |
| Index, Tmp as load64 |
| |
| MoveFloatTo32 U:F:32, D:G:32 |
| Tmp, Tmp |
| Addr, Tmp as load32 |
| Index, Tmp as load32 |
| |
| Load8 U:G:8, ZD:G:32 |
| Addr, Tmp |
| Index, Tmp |
| |
| arm: LoadAcq8 U:G:8, ZD:G:32 /effects |
| SimpleAddr, Tmp |
| |
| Store8 U:G:8, D:G:8 |
| Tmp, Index |
| Tmp, Addr |
| x86: Imm, Index |
| x86: Imm, Addr |
| |
| arm: StoreRel8 U:G:8, D:G:8 /effects |
| Tmp, SimpleAddr |
| |
| Load8SignedExtendTo32 U:G:8, ZD:G:32 |
| Addr, Tmp |
| Index, Tmp |
| |
| arm: LoadAcq8SignedExtendTo32 U:G:8, ZD:G:32 /effects |
| SimpleAddr, Tmp |
| |
| Load16 U:G:16, ZD:G:32 |
| Addr, Tmp |
| Index, Tmp |
| |
| arm: LoadAcq16 U:G:16, ZD:G:32 /effects |
| SimpleAddr, Tmp |
| |
| Load16SignedExtendTo32 U:G:16, ZD:G:32 |
| Addr, Tmp |
| Index, Tmp |
| |
| arm: LoadAcq16SignedExtendTo32 U:G:16, ZD:G:32 /effects |
| SimpleAddr, Tmp |
| |
| Store16 U:G:16, D:G:16 |
| Tmp, Index |
| Tmp, Addr |
| x86: Imm, Index |
| x86: Imm, Addr |
| |
| arm: StoreRel16 U:G:16, D:G:16 /effects |
| Tmp, SimpleAddr |
| |
| arm: LoadAcq32 U:G:32, ZD:G:32 /effects |
| SimpleAddr, Tmp |
| |
| arm: StoreRel32 U:G:32, ZD:G:32 /effects |
| Tmp, SimpleAddr |
| |
| arm64: LoadAcq64 U:G:64, ZD:G:64 /effects |
| SimpleAddr, Tmp |
| |
| arm64: StoreRel64 U:G:64, ZD:G:64 /effects |
| Tmp, SimpleAddr |
| |
| x86: Xchg8 UD:G:8, UD:G:8 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: Xchg16 UD:G:16, UD:G:16 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: Xchg32 UD:G:32, UD:G:32 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: Xchg64 UD:G:64, UD:G:64 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| arm64: ExtractUnsignedBitfield32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: ExtractUnsignedBitfield64 U:G:64, U:G:32, U:G:32, D:G:64 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: InsertUnsignedBitfieldInZero32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: InsertUnsignedBitfieldInZero64 U:G:64, U:G:32, U:G:32, D:G:64 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: InsertBitField32 U:G:32, U:G:32, U:G:32, UZD:G:32 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: InsertBitField64 U:G:64, U:G:32, U:G:32, UD:G:64 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: ClearBitField32 U:G:32, U:G:32, ZD:G:32 |
| Imm, Imm, Tmp |
| |
| arm64: ClearBitField64 U:G:32, U:G:32, D:G:64 |
| Imm, Imm, Tmp |
| |
| arm64: ClearBitsWithMask32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| |
| arm64: ClearBitsWithMask64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: OrNot32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| |
| arm64: OrNot64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: XorNot32 U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Tmp |
| |
| arm64: XorNot64 U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Tmp |
| |
| arm64: XorNotLeftShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorNotRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorNotUnsignedRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorNotLeftShift64 U:G:64, U:G:64, U:G:32, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorNotRightShift64 U:G:64, U:G:64, U:G:32, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorNotUnsignedRightShift64 U:G:64, U:G:64, U:G:32, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: ExtractInsertBitfieldAtLowEnd32 U:G:32, U:G:32, U:G:32, UZD:G:32 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: ExtractInsertBitfieldAtLowEnd64 U:G:64, U:G:32, U:G:32, UD:G:64 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: InsertSignedBitfieldInZero32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: InsertSignedBitfieldInZero64 U:G:64, U:G:32, U:G:32, D:G:64 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: ExtractSignedBitfield32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: ExtractSignedBitfield64 U:G:64, U:G:32, U:G:32, D:G:64 |
| Tmp, Imm, Imm, Tmp |
| |
| arm64: ExtractRegister32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: ExtractRegister64 U:G:64, U:G:32, U:G:32, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AddLeftShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AddRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AddUnsignedRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AddLeftShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AddRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AddUnsignedRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: SubLeftShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: SubRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: SubUnsignedRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: SubLeftShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: SubRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: SubUnsignedRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AndLeftShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AndRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AndUnsignedRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AndLeftShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AndRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: AndUnsignedRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorLeftShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorUnsignedRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorLeftShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: XorUnsignedRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: OrLeftShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: OrRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: OrUnsignedRightShift32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: OrLeftShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: OrRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| arm64: OrUnsignedRightShift64 U:G:64, U:G:64, U:G:64, D:G:64 |
| Tmp, Tmp, Imm, Tmp |
| |
| # The first operand is rax. |
| # FIXME: This formulation means that the boolean result cannot be put in eax, even though all users |
| # of this would be OK with that. |
| # https://bugs.webkit.org/show_bug.cgi?id=169254 |
| x86: AtomicStrongCAS8 U:G:32, UD:G:8, U:G:8, UD:G:8, ZD:G:8 /effects |
| StatusCond, Tmp*, Tmp, Addr, Tmp |
| StatusCond, Tmp*, Tmp, Index, Tmp |
| |
| x86: AtomicStrongCAS16 U:G:32, UD:G:16, U:G:32, UD:G:16, ZD:G:8 /effects |
| StatusCond, Tmp*, Tmp, Addr, Tmp |
| StatusCond, Tmp*, Tmp, Index, Tmp |
| |
| x86: AtomicStrongCAS32 U:G:32, UD:G:32, U:G:32, UD:G:32, ZD:G:8 /effects |
| StatusCond, Tmp*, Tmp, Addr, Tmp |
| StatusCond, Tmp*, Tmp, Index, Tmp |
| |
| x86_64: AtomicStrongCAS64 U:G:32, UD:G:64, U:G:64, UD:G:64, ZD:G:8 /effects |
| StatusCond, Tmp*, Tmp, Addr, Tmp |
| StatusCond, Tmp*, Tmp, Index, Tmp |
| |
| x86 arm64e: AtomicStrongCAS8 UD:G:8, U:G:8, UD:G:8 /effects |
| x86: Tmp*, Tmp, Addr |
| x86: Tmp*, Tmp, Index |
| arm64e: Tmp, Tmp, SimpleAddr |
| |
| x86 arm64e: AtomicStrongCAS16 UD:G:16, U:G:32, UD:G:16 /effects |
| x86: Tmp*, Tmp, Addr |
| x86: Tmp*, Tmp, Index |
| arm64e: Tmp, Tmp, SimpleAddr |
| |
| x86 arm64e: AtomicStrongCAS32 UD:G:32, U:G:32, UD:G:32 /effects |
| x86: Tmp*, Tmp, Addr |
| x86: Tmp*, Tmp, Index |
| arm64e: Tmp, Tmp, SimpleAddr |
| |
| x86_64 arm64e: AtomicStrongCAS64 UD:G:64, U:G:64, UD:G:64 /effects |
| x86_64: Tmp*, Tmp, Addr |
| x86_64: Tmp*, Tmp, Index |
| arm64e: Tmp, Tmp, SimpleAddr |
| |
| x86: BranchAtomicStrongCAS8 U:G:32, UD:G:8, U:G:8, UD:G:8 /branch /effects |
| StatusCond, Tmp*, Tmp, Addr |
| StatusCond, Tmp*, Tmp, Index |
| |
| x86: BranchAtomicStrongCAS16 U:G:32, UD:G:16, U:G:32, UD:G:16 /branch /effects |
| StatusCond, Tmp*, Tmp, Addr |
| StatusCond, Tmp*, Tmp, Index |
| |
| x86: BranchAtomicStrongCAS32 U:G:32, UD:G:32, U:G:32, UD:G:32 /branch /effects |
| StatusCond, Tmp*, Tmp, Addr |
| StatusCond, Tmp*, Tmp, Index |
| |
| x86_64: BranchAtomicStrongCAS64 U:G:32, UD:G:64, U:G:64, UD:G:64 /branch /effects |
| StatusCond, Tmp*, Tmp, Addr |
| StatusCond, Tmp*, Tmp, Index |
| |
| x86: AtomicAdd8 U:G:8, UD:G:8 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicAdd16 U:G:16, UD:G:16 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicAdd32 U:G:32, UD:G:32 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: AtomicAdd64 U:G:64, UD:G:64 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicSub8 U:G:8, UD:G:8 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicSub16 U:G:16, UD:G:16 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicSub32 U:G:32, UD:G:32 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: AtomicSub64 U:G:64, UD:G:64 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicAnd8 U:G:8, UD:G:8 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicAnd16 U:G:16, UD:G:16 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicAnd32 U:G:32, UD:G:32 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: AtomicAnd64 U:G:64, UD:G:64 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicOr8 U:G:8, UD:G:8 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicOr16 U:G:16, UD:G:16 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicOr32 U:G:32, UD:G:32 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: AtomicOr64 U:G:64, UD:G:64 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXor8 U:G:8, UD:G:8 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXor16 U:G:16, UD:G:16 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXor32 U:G:32, UD:G:32 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: AtomicXor64 U:G:64, UD:G:64 /effects |
| Imm, Addr |
| Imm, Index |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicNeg8 UD:G:8 /effects |
| Addr |
| Index |
| |
| x86: AtomicNeg16 UD:G:16 /effects |
| Addr |
| Index |
| |
| x86: AtomicNeg32 UD:G:32 /effects |
| Addr |
| Index |
| |
| x86_64: AtomicNeg64 UD:G:64 /effects |
| Addr |
| Index |
| |
| x86: AtomicNot8 UD:G:8 /effects |
| Addr |
| Index |
| |
| x86: AtomicNot16 UD:G:16 /effects |
| Addr |
| Index |
| |
| x86: AtomicNot32 UD:G:32 /effects |
| Addr |
| Index |
| |
| x86_64: AtomicNot64 UD:G:64 /effects |
| Addr |
| Index |
| |
| x86: AtomicXchgAdd8 UD:G:8, UD:G:8 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXchgAdd16 UD:G:16, UD:G:16 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXchgAdd32 UD:G:32, UD:G:32 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: AtomicXchgAdd64 UD:G:64, UD:G:64 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXchg8 UD:G:8, UD:G:8 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXchg16 UD:G:16, UD:G:16 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86: AtomicXchg32 UD:G:32, UD:G:32 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| x86_64: AtomicXchg64 UD:G:64, UD:G:64 /effects |
| Tmp, Addr |
| Tmp, Index |
| |
| arm64: LoadLink8 U:G:8, ZD:G:8 /effects |
| SimpleAddr, Tmp |
| |
| arm64: LoadLinkAcq8 U:G:8, ZD:G:8 /effects |
| SimpleAddr, Tmp |
| |
| # Super confusing fact: this returns 0 to mean success, 1 to mean failure. |
| arm64: StoreCond8 U:G:8, D:G:8, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: StoreCondRel8 U:G:8, D:G:8, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: LoadLink16 U:G:16, ZD:G:16 /effects |
| SimpleAddr, Tmp |
| |
| arm64: LoadLinkAcq16 U:G:16, ZD:G:16 /effects |
| SimpleAddr, Tmp |
| |
| arm64: StoreCond16 U:G:16, D:G:16, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: StoreCondRel16 U:G:16, D:G:16, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: LoadLink32 U:G:32, ZD:G:32 /effects |
| SimpleAddr, Tmp |
| |
| arm64: LoadLinkAcq32 U:G:32, ZD:G:32 /effects |
| SimpleAddr, Tmp |
| |
| arm64: StoreCond32 U:G:32, D:G:32, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: StoreCondRel32 U:G:32, D:G:32, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: LoadLink64 U:G:64, ZD:G:64 /effects |
| SimpleAddr, Tmp |
| |
| arm64: LoadLinkAcq64 U:G:64, ZD:G:64 /effects |
| SimpleAddr, Tmp |
| |
| arm64: StoreCond64 U:G:64, D:G:64, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: StoreCondRel64 U:G:64, D:G:64, EZD:G:8 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64: Depend32 U:G:32, ZD:G:32 |
| Tmp, Tmp |
| |
| arm64: Depend64 U:G:64, ZD:G:64 |
| Tmp, Tmp |
| |
| arm64e: AtomicXchgAdd8 U:G:8, UD:G:8, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgAdd16 U:G:16, UD:G:16, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgAdd32 U:G:32, UD:G:32, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgAdd64 U:G:64, UD:G:64, ZD:G:64 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgOr8 U:G:8, UD:G:8, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgOr16 U:G:16, UD:G:16, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgOr32 U:G:32, UD:G:32, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgOr64 U:G:64, UD:G:64, ZD:G:64 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgClear8 U:G:8, UD:G:8, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgClear16 U:G:16, UD:G:16, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgClear32 U:G:32, UD:G:32, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgClear64 U:G:64, UD:G:64, ZD:G:64 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgXor8 U:G:8, UD:G:8, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgXor16 U:G:16, UD:G:16, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgXor32 U:G:32, UD:G:32, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchgXor64 U:G:64, UD:G:64, ZD:G:64 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchg8 U:G:8, UD:G:8, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchg16 U:G:16, UD:G:16, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchg32 U:G:32, UD:G:32, ZD:G:32 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| arm64e: AtomicXchg64 U:G:64, UD:G:64, ZD:G:64 /effects |
| Tmp, SimpleAddr, Tmp |
| |
| Compare32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| RelCond, Tmp, Tmp, Tmp |
| RelCond, Tmp, Imm, Tmp |
| |
| 64: Compare64 U:G:32, U:G:64, U:G:64, ZD:G:32 |
| RelCond, Tmp, Tmp, Tmp |
| x86: RelCond, Tmp, Imm, Tmp |
| |
| Test32 U:G:32, U:G:32, U:G:32, ZD:G:32 |
| x86: ResCond, Addr, Imm, Tmp |
| ResCond, Tmp, Tmp, Tmp |
| ResCond, Tmp, BitImm, Tmp |
| |
| 64: Test64 U:G:32, U:G:64, U:G:64, ZD:G:32 |
| x86: ResCond, Tmp, Imm, Tmp |
| ResCond, Tmp, Tmp, Tmp |
| |
| CompareDouble U:G:32, U:F:64, U:F:64, ZD:G:32 |
| DoubleCond, Tmp, Tmp, Tmp |
| |
| CompareFloat U:G:32, U:F:32, U:F:32, ZD:G:32 |
| DoubleCond, Tmp, Tmp, Tmp |
| |
| # Note that branches have some logic in AirOptimizeBlockOrder.cpp. If you add new branches, please make sure |
| # you opt them into the block order optimizations. |
| |
| Branch8 U:G:32, U:G:8, U:G:8 /branch |
| x86: RelCond, Addr, Imm |
| x86: RelCond, Index, Imm |
| |
| Branch32 U:G:32, U:G:32, U:G:32 /branch |
| x86: RelCond, Addr, Imm |
| RelCond, Tmp, Tmp |
| RelCond, Tmp, Imm |
| x86: RelCond, Tmp, Addr |
| x86: RelCond, Addr, Tmp |
| x86: RelCond, Index, Imm |
| |
| 64: Branch64 U:G:32, U:G:64, U:G:64 /branch |
| RelCond, Tmp, Tmp |
| RelCond, Tmp, Imm |
| x86: RelCond, Tmp, Addr |
| x86: RelCond, Addr, Tmp |
| x86: RelCond, Addr, Imm |
| x86: RelCond, Index, Tmp |
| |
| BranchTest8 U:G:32, U:G:8, U:G:8 /branch |
| x86: ResCond, Addr, BitImm |
| x86: ResCond, Index, BitImm |
| |
| BranchTest32 U:G:32, U:G:32, U:G:32 /branch |
| ResCond, Tmp, Tmp |
| ResCond, Tmp, BitImm |
| x86: ResCond, Addr, BitImm |
| x86: ResCond, Index, BitImm |
| |
| # Warning: forms that take an immediate will sign-extend their immediate. You probably want |
| # BranchTest32 in most cases where you use an immediate. |
| 64: BranchTest64 U:G:32, U:G:64, U:G:64 /branch |
| ResCond, Tmp, Tmp |
| arm64: ResCond, Tmp, BitImm64 |
| x86: ResCond, Tmp, BitImm |
| x86: ResCond, Addr, BitImm |
| x86: ResCond, Addr, Tmp |
| x86: ResCond, Index, BitImm |
| |
| x86_64: BranchTestBit64 U:G:32, U:G:64, U:G:8 /branch |
| ResCond, Tmp, Imm |
| ResCond, Addr, Imm |
| ResCond, Tmp, Tmp |
| |
| x86: BranchTestBit32 U:G:32, U:G:32, U:G:8 /branch |
| ResCond, Tmp, Imm |
| ResCond, Addr, Imm |
| ResCond, Tmp, Tmp |
| |
| BranchDouble U:G:32, U:F:64, U:F:64 /branch |
| DoubleCond, Tmp, Tmp |
| |
| BranchFloat U:G:32, U:F:32, U:F:32 /branch |
| DoubleCond, Tmp, Tmp |
| |
| BranchAdd32 U:G:32, U:G:32, U:G:32, ZD:G:32 /branch |
| ResCond, Tmp, Tmp, Tmp |
| x86:ResCond, Tmp, Addr, Tmp |
| x86:ResCond, Addr, Tmp, Tmp |
| |
| BranchAdd32 U:G:32, U:G:32, UZD:G:32 /branch |
| ResCond, Tmp, Tmp |
| ResCond, Imm, Tmp |
| x86: ResCond, Imm, Addr |
| x86: ResCond, Tmp, Addr |
| x86: ResCond, Addr, Tmp |
| |
| BranchAdd64 U:G:32, U:G:64, U:G:64, ZD:G:64 /branch |
| ResCond, Tmp, Tmp, Tmp |
| x86:ResCond, Tmp, Addr, Tmp |
| x86:ResCond, Addr, Tmp, Tmp |
| |
| 64: BranchAdd64 U:G:32, U:G:64, UD:G:64 /branch |
| ResCond, Imm, Tmp |
| ResCond, Tmp, Tmp |
| x86:ResCond, Addr, Tmp |
| |
| x86: BranchMul32 U:G:32, U:G:32, UZD:G:32 /branch |
| ResCond, Tmp, Tmp |
| ResCond, Addr, Tmp |
| |
| x86: BranchMul32 U:G:32, U:G:32, U:G:32, ZD:G:32 /branch |
| ResCond, Tmp, Imm, Tmp |
| |
| arm64: BranchMul32 U:G:32, U:G:32, U:G:32, S:G:32, S:G:32, ZD:G:32 /branch |
| ResCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| |
| x86_64: BranchMul64 U:G:32, U:G:64, UZD:G:64 /branch |
| ResCond, Tmp, Tmp |
| |
| arm64: BranchMul64 U:G:32, U:G:64, U:G:64, S:G:64, S:G:64, ZD:G:64 /branch |
| ResCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| |
| BranchSub32 U:G:32, U:G:32, UZD:G:32 /branch |
| ResCond, Tmp, Tmp |
| ResCond, Imm, Tmp |
| x86: ResCond, Imm, Addr |
| x86: ResCond, Tmp, Addr |
| x86: ResCond, Addr, Tmp |
| |
| 64: BranchSub64 U:G:32, U:G:64, UD:G:64 /branch |
| ResCond, Imm, Tmp |
| ResCond, Tmp, Tmp |
| |
| BranchNeg32 U:G:32, UZD:G:32 /branch |
| ResCond, Tmp |
| |
| 64: BranchNeg64 U:G:32, UZD:G:64 /branch |
| ResCond, Tmp |
| |
| MoveConditionally32 U:G:32, U:G:32, U:G:32, U:G:Ptr, UD:G:Ptr |
| RelCond, Tmp, Tmp, Tmp, Tmp |
| |
| MoveConditionally32 U:G:32, U:G:32, U:G:32, U:G:Ptr, U:G:Ptr, D:G:Ptr |
| RelCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| RelCond, Tmp, Imm, Tmp, Tmp, Tmp |
| |
| 64: MoveConditionally64 U:G:32, U:G:64, U:G:64, U:G:Ptr, UD:G:Ptr |
| RelCond, Tmp, Tmp, Tmp, Tmp |
| |
| 64: MoveConditionally64 U:G:32, U:G:64, U:G:64, U:G:Ptr, U:G:Ptr, D:G:Ptr |
| RelCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| RelCond, Tmp, Imm, Tmp, Tmp, Tmp |
| |
| MoveConditionallyTest32 U:G:32, U:G:32, U:G:32, U:G:Ptr, UD:G:Ptr |
| ResCond, Tmp, Tmp, Tmp, Tmp |
| x86: ResCond, Tmp, Imm, Tmp, Tmp |
| |
| MoveConditionallyTest32 U:G:32, U:G:32, U:G:32, U:G:Ptr, U:G:Ptr, D:G:Ptr |
| ResCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| ResCond, Tmp, BitImm, Tmp, Tmp, Tmp |
| |
| 64: MoveConditionallyTest64 U:G:32, U:G:64, U:G:64, U:G:Ptr, UD:G:Ptr |
| ResCond, Tmp, Tmp, Tmp, Tmp |
| x86: ResCond, Tmp, Imm, Tmp, Tmp |
| |
| 64: MoveConditionallyTest64 U:G:32, U:G:32, U:G:32, U:G:Ptr, U:G:Ptr, D:G:Ptr |
| ResCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| x86_64: ResCond, Tmp, Imm, Tmp, Tmp, Tmp |
| |
| MoveConditionallyDouble U:G:32, U:F:64, U:F:64, U:G:Ptr, U:G:Ptr, D:G:Ptr |
| DoubleCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| |
| MoveConditionallyDouble U:G:32, U:F:64, U:F:64, U:G:Ptr, UD:G:Ptr |
| DoubleCond, Tmp, Tmp, Tmp, Tmp |
| |
| MoveConditionallyFloat U:G:32, U:F:32, U:F:32, U:G:Ptr, U:G:Ptr, D:G:Ptr |
| DoubleCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| |
| MoveConditionallyFloat U:G:32, U:F:32, U:F:32, U:G:Ptr, UD:G:Ptr |
| DoubleCond, Tmp, Tmp, Tmp, Tmp |
| |
| MoveDoubleConditionally32 U:G:32, U:G:32, U:G:32, U:F:64, U:F:64, D:F:64 |
| RelCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| RelCond, Tmp, Imm, Tmp, Tmp, Tmp |
| x86: RelCond, Addr, Imm, Tmp, Tmp, Tmp |
| x86: RelCond, Tmp, Addr, Tmp, Tmp, Tmp |
| x86: RelCond, Addr, Tmp, Tmp, Tmp, Tmp |
| x86: RelCond, Index, Imm, Tmp, Tmp, Tmp |
| |
| 64: MoveDoubleConditionally64 U:G:32, U:G:64, U:G:64, U:F:64, U:F:64, D:F:64 |
| RelCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| RelCond, Tmp, Imm, Tmp, Tmp, Tmp |
| x86_64: RelCond, Tmp, Addr, Tmp, Tmp, Tmp |
| x86_64: RelCond, Addr, Tmp, Tmp, Tmp, Tmp |
| x86_64: RelCond, Addr, Imm, Tmp, Tmp, Tmp |
| x86_64: RelCond, Index, Tmp, Tmp, Tmp, Tmp |
| |
| MoveDoubleConditionallyTest32 U:G:32, U:G:32, U:G:32, U:F:64, U:F:64, D:F:64 |
| ResCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| ResCond, Tmp, BitImm, Tmp, Tmp, Tmp |
| x86: ResCond, Addr, Imm, Tmp, Tmp, Tmp |
| x86: ResCond, Index, Imm, Tmp, Tmp, Tmp |
| |
| # Warning: forms that take an immediate will sign-extend their immediate. You probably want |
| # MoveDoubleConditionallyTest32 in most cases where you use an immediate. |
| 64: MoveDoubleConditionallyTest64 U:G:32, U:G:64, U:G:64, U:F:64, U:F:64, D:F:64 |
| ResCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| x86_64: ResCond, Tmp, Imm, Tmp, Tmp, Tmp |
| x86_64: ResCond, Addr, Imm, Tmp, Tmp, Tmp |
| x86_64: ResCond, Addr, Tmp, Tmp, Tmp, Tmp |
| x86_64: ResCond, Index, Imm, Tmp, Tmp, Tmp |
| |
| MoveDoubleConditionallyDouble U:G:32, U:F:64, U:F:64, U:F:64, U:F:64, D:F:64 |
| DoubleCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| |
| MoveDoubleConditionallyFloat U:G:32, U:F:32, U:F:32, U:F:64, U:F:64, D:F:64 |
| DoubleCond, Tmp, Tmp, Tmp, Tmp, Tmp |
| |
| MemoryFence /effects |
| StoreFence /effects |
| LoadFence /effects |
| |
| Jump /branch |
| |
| RetVoid /return |
| |
| Ret32 U:G:32 /return |
| Tmp |
| |
| 64: Ret64 U:G:64 /return |
| Tmp |
| |
| RetFloat U:F:32 /return |
| Tmp |
| |
| RetDouble U:F:64 /return |
| Tmp |
| |
| Oops /terminal |
| |
| # This is a terminal but we express it as a Custom because we don't want it to have a code |
| # generator. |
| custom EntrySwitch |
| |
| # A Shuffle is a multi-source, multi-destination move. It simultaneously does multiple moves at once. |
| # The moves are specified as triplets of src, dst, and width. For example you can request a swap this |
| # way: |
| # Shuffle %tmp1, %tmp2, 64, %tmp2, %tmp1, 64 |
| custom Shuffle |
| |
| # Air allows for exotic behavior. A Patch's behavior is determined entirely by the Special operand, |
| # which must be the first operand. |
| custom Patch |
| |
| # Instructions used for lowering C calls. These don't make it to Air generation. They get lowered to |
| # something else first. The origin Value must be a CCallValue. |
| custom CCall |
| custom ColdCCall |
| |
| # This is a special wasm opcode that branches to a trap handler. This uses the generator located to Air::Code |
| # to produce the side-exit code. |
| custom WasmBoundsCheck |
| |