1/*
2 * Copyright (c) 2013, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 */
23package org.graalvm.compiler.core.aarch64;
24
25import static org.graalvm.compiler.lir.LIRValueUtil.asJavaConstant;
26import static org.graalvm.compiler.lir.LIRValueUtil.isJavaConstant;
27
28import java.util.function.Function;
29
30import org.graalvm.compiler.asm.aarch64.AArch64Address.AddressingMode;
31import org.graalvm.compiler.asm.aarch64.AArch64Assembler.ConditionFlag;
32import org.graalvm.compiler.asm.aarch64.AArch64MacroAssembler;
33import org.graalvm.compiler.core.common.LIRKind;
34import org.graalvm.compiler.core.common.calc.Condition;
35import org.graalvm.compiler.core.common.spi.LIRKindTool;
36import org.graalvm.compiler.debug.GraalError;
37import org.graalvm.compiler.lir.LIRFrameState;
38import org.graalvm.compiler.lir.LIRValueUtil;
39import org.graalvm.compiler.lir.LabelRef;
40import org.graalvm.compiler.lir.StandardOp;
41import org.graalvm.compiler.lir.SwitchStrategy;
42import org.graalvm.compiler.lir.Variable;
43import org.graalvm.compiler.lir.aarch64.AArch64AddressValue;
44import org.graalvm.compiler.lir.aarch64.AArch64ArithmeticOp;
45import org.graalvm.compiler.lir.aarch64.AArch64ArrayEqualsOp;
46import org.graalvm.compiler.lir.aarch64.AArch64ByteSwapOp;
47import org.graalvm.compiler.lir.aarch64.AArch64Compare;
48import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow;
49import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.BranchOp;
50import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.CondMoveOp;
51import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.StrategySwitchOp;
52import org.graalvm.compiler.lir.aarch64.AArch64ControlFlow.TableSwitchOp;
53import org.graalvm.compiler.lir.aarch64.AArch64Move;
54import org.graalvm.compiler.lir.aarch64.AArch64Move.CompareAndSwapOp;
55import org.graalvm.compiler.lir.aarch64.AArch64Move.MembarOp;
56import org.graalvm.compiler.lir.aarch64.AArch64PauseOp;
57import org.graalvm.compiler.lir.gen.LIRGenerationResult;
58import org.graalvm.compiler.lir.gen.LIRGenerator;
59import org.graalvm.compiler.phases.util.Providers;
60
61import jdk.vm.ci.aarch64.AArch64Kind;
62import jdk.vm.ci.code.CallingConvention;
63import jdk.vm.ci.code.RegisterValue;
64import jdk.vm.ci.meta.AllocatableValue;
65import jdk.vm.ci.meta.JavaConstant;
66import jdk.vm.ci.meta.JavaKind;
67import jdk.vm.ci.meta.PlatformKind;
68import jdk.vm.ci.meta.PrimitiveConstant;
69import jdk.vm.ci.meta.Value;
70import jdk.vm.ci.meta.ValueKind;
71
72public abstract class AArch64LIRGenerator extends LIRGenerator {
73
74    public AArch64LIRGenerator(LIRKindTool lirKindTool, AArch64ArithmeticLIRGenerator arithmeticLIRGen, MoveFactory moveFactory, Providers providers, LIRGenerationResult lirGenRes) {
75        super(lirKindTool, arithmeticLIRGen, moveFactory, providers, lirGenRes);
76    }
77
78    /**
79     * Checks whether the supplied constant can be used without loading it into a register for store
80     * operations, i.e., on the right hand side of a memory access.
81     *
82     * @param c The constant to check.
83     * @return True if the constant can be used directly, false if the constant needs to be in a
84     *         register.
85     */
86    protected static final boolean canStoreConstant(JavaConstant c) {
87        // Our own code never calls this since we can't make a definite statement about whether or
88        // not we can inline a constant without knowing what kind of operation we execute. Let's be
89        // optimistic here and fix up mistakes later.
90        return true;
91    }
92
93    /**
94     * AArch64 cannot use anything smaller than a word in any instruction other than load and store.
95     */
96    @Override
97    public <K extends ValueKind<K>> K toRegisterKind(K kind) {
98        switch ((AArch64Kind) kind.getPlatformKind()) {
99            case BYTE:
100            case WORD:
101                return kind.changeType(AArch64Kind.DWORD);
102            default:
103                return kind;
104        }
105    }
106
107    @Override
108    public void emitNullCheck(Value address, LIRFrameState state) {
109        append(new AArch64Move.NullCheckOp(asAddressValue(address), state));
110    }
111
112    @Override
113    public Variable emitAddress(AllocatableValue stackslot) {
114        Variable result = newVariable(LIRKind.value(target().arch.getWordKind()));
115        append(new AArch64Move.StackLoadAddressOp(result, stackslot));
116        return result;
117    }
118
119    public AArch64AddressValue asAddressValue(Value address) {
120        if (address instanceof AArch64AddressValue) {
121            return (AArch64AddressValue) address;
122        } else {
123            return new AArch64AddressValue(address.getValueKind(), asAllocatable(address), Value.ILLEGAL, 0, 1, AddressingMode.BASE_REGISTER_ONLY);
124        }
125    }
126
127    @Override
128    public Variable emitLogicCompareAndSwap(Value address, Value expectedValue, Value newValue, Value trueValue, Value falseValue) {
129        Variable prevValue = newVariable(expectedValue.getValueKind());
130        Variable scratch = newVariable(LIRKind.value(AArch64Kind.DWORD));
131        append(new CompareAndSwapOp(prevValue, loadReg(expectedValue), loadReg(newValue), asAllocatable(address), scratch));
132        assert trueValue.getValueKind().equals(falseValue.getValueKind());
133        Variable result = newVariable(trueValue.getValueKind());
134        append(new CondMoveOp(result, ConditionFlag.EQ, asAllocatable(trueValue), asAllocatable(falseValue)));
135        return result;
136    }
137
138    @Override
139    public Variable emitValueCompareAndSwap(Value address, Value expectedValue, Value newValue) {
140        Variable result = newVariable(newValue.getValueKind());
141        Variable scratch = newVariable(LIRKind.value(AArch64Kind.WORD));
142        append(new CompareAndSwapOp(result, loadNonCompareConst(expectedValue), loadReg(newValue), asAllocatable(address), scratch));
143        return result;
144    }
145
146    @Override
147    public void emitMembar(int barriers) {
148        int necessaryBarriers = target().arch.requiredBarriers(barriers);
149        if (target().isMP && necessaryBarriers != 0) {
150            append(new MembarOp(necessaryBarriers));
151        }
152    }
153
154    @Override
155    public void emitJump(LabelRef label) {
156        assert label != null;
157        append(new StandardOp.JumpOp(label));
158    }
159
160    @Override
161    public void emitOverflowCheckBranch(LabelRef overflow, LabelRef noOverflow, LIRKind cmpKind, double overflowProbability) {
162        append(new AArch64ControlFlow.BranchOp(ConditionFlag.VS, overflow, noOverflow, overflowProbability));
163    }
164
165    /**
166     * Branches to label if (left & right) == 0. If negated is true branchse on non-zero instead.
167     *
168     * @param left Integer kind. Non null.
169     * @param right Integer kind. Non null.
170     * @param trueDestination destination if left & right == 0. Non null.
171     * @param falseDestination destination if left & right != 0. Non null
172     * @param trueSuccessorProbability hoistoric probability that comparison is true
173     */
174    @Override
175    public void emitIntegerTestBranch(Value left, Value right, LabelRef trueDestination, LabelRef falseDestination, double trueSuccessorProbability) {
176        assert ((AArch64Kind) left.getPlatformKind()).isInteger() && left.getPlatformKind() == right.getPlatformKind();
177        ((AArch64ArithmeticLIRGenerator) getArithmetic()).emitBinary(LIRKind.combine(left, right), AArch64ArithmeticOp.ANDS, true, left, right);
178        append(new AArch64ControlFlow.BranchOp(ConditionFlag.EQ, trueDestination, falseDestination, trueSuccessorProbability));
179    }
180
181    /**
182     * Conditionally move trueValue into new variable if cond + unorderedIsTrue is true, else
183     * falseValue.
184     *
185     * @param left Arbitrary value. Has to have same type as right. Non null.
186     * @param right Arbitrary value. Has to have same type as left. Non null.
187     * @param cond condition that decides whether to move trueValue or falseValue into result. Non
188     *            null.
189     * @param unorderedIsTrue defines whether floating-point comparisons consider unordered true or
190     *            not. Ignored for integer comparisons.
191     * @param trueValue arbitrary value same type as falseValue. Non null.
192     * @param falseValue arbitrary value same type as trueValue. Non null.
193     * @return value containing trueValue if cond + unorderedIsTrue is true, else falseValue. Non
194     *         null.
195     */
196    @Override
197    public Variable emitConditionalMove(PlatformKind cmpKind, Value left, Value right, Condition cond, boolean unorderedIsTrue, Value trueValue, Value falseValue) {
198        boolean mirrored = emitCompare(cmpKind, left, right, cond, unorderedIsTrue);
199        Condition finalCondition = mirrored ? cond.mirror() : cond;
200        boolean finalUnorderedIsTrue = mirrored ? !unorderedIsTrue : unorderedIsTrue;
201        ConditionFlag cmpCondition = toConditionFlag(((AArch64Kind) cmpKind).isInteger(), finalCondition, finalUnorderedIsTrue);
202        Variable result = newVariable(trueValue.getValueKind());
203        append(new CondMoveOp(result, cmpCondition, loadReg(trueValue), loadReg(falseValue)));
204        return result;
205    }
206
207    @Override
208    public void emitCompareBranch(PlatformKind cmpKind, Value left, Value right, Condition cond, boolean unorderedIsTrue, LabelRef trueDestination, LabelRef falseDestination,
209                    double trueDestinationProbability) {
210        boolean mirrored = emitCompare(cmpKind, left, right, cond, unorderedIsTrue);
211        Condition finalCondition = mirrored ? cond.mirror() : cond;
212        boolean finalUnorderedIsTrue = mirrored ? !unorderedIsTrue : unorderedIsTrue;
213        ConditionFlag cmpCondition = toConditionFlag(((AArch64Kind) cmpKind).isInteger(), finalCondition, finalUnorderedIsTrue);
214        append(new BranchOp(cmpCondition, trueDestination, falseDestination, trueDestinationProbability));
215    }
216
217    private static ConditionFlag toConditionFlag(boolean isInt, Condition cond, boolean unorderedIsTrue) {
218        return isInt ? toIntConditionFlag(cond) : toFloatConditionFlag(cond, unorderedIsTrue);
219    }
220
221    /**
222     * Takes a Condition and unorderedIsTrue flag and returns the correct Aarch64 specific
223     * ConditionFlag. Note: This is only correct if the emitCompare code for floats has correctly
224     * handled the case of 'EQ && unorderedIsTrue', respectively 'NE && !unorderedIsTrue'!
225     */
226    private static ConditionFlag toFloatConditionFlag(Condition cond, boolean unorderedIsTrue) {
227        switch (cond) {
228            case LT:
229                return unorderedIsTrue ? ConditionFlag.LT : ConditionFlag.LO;
230            case LE:
231                return unorderedIsTrue ? ConditionFlag.LE : ConditionFlag.LS;
232            case GE:
233                return unorderedIsTrue ? ConditionFlag.PL : ConditionFlag.GE;
234            case GT:
235                return unorderedIsTrue ? ConditionFlag.HI : ConditionFlag.GT;
236            case EQ:
237                return ConditionFlag.EQ;
238            case NE:
239                return ConditionFlag.NE;
240            default:
241                throw GraalError.shouldNotReachHere();
242        }
243    }
244
245    /**
246     * Takes a Condition and returns the correct Aarch64 specific ConditionFlag.
247     */
248    private static ConditionFlag toIntConditionFlag(Condition cond) {
249        switch (cond) {
250            case EQ:
251                return ConditionFlag.EQ;
252            case NE:
253                return ConditionFlag.NE;
254            case LT:
255                return ConditionFlag.LT;
256            case LE:
257                return ConditionFlag.LE;
258            case GT:
259                return ConditionFlag.GT;
260            case GE:
261                return ConditionFlag.GE;
262            case AE:
263                return ConditionFlag.HS;
264            case BE:
265                return ConditionFlag.LS;
266            case AT:
267                return ConditionFlag.HI;
268            case BT:
269                return ConditionFlag.LO;
270            default:
271                throw GraalError.shouldNotReachHere();
272        }
273    }
274
275    /**
276     * This method emits the compare instruction, and may reorder the operands. It returns true if
277     * it did so.
278     *
279     * @param a the left operand of the comparison. Has to have same type as b. Non null.
280     * @param b the right operand of the comparison. Has to have same type as a. Non null.
281     * @return true if mirrored (i.e. "b cmp a" instead of "a cmp b" was done).
282     */
283    protected boolean emitCompare(PlatformKind cmpKind, Value a, Value b, Condition condition, boolean unorderedIsTrue) {
284        Value left;
285        Value right;
286        boolean mirrored;
287        AArch64Kind kind = (AArch64Kind) cmpKind;
288        if (kind.isInteger()) {
289            Value aExt = a;
290            Value bExt = b;
291
292            int compareBytes = cmpKind.getSizeInBytes();
293            // AArch64 compares 32 or 64 bits: sign extend a and b as required.
294            if (compareBytes < a.getPlatformKind().getSizeInBytes()) {
295                aExt = arithmeticLIRGen.emitSignExtend(a, compareBytes * 8, 64);
296            }
297            if (compareBytes < b.getPlatformKind().getSizeInBytes()) {
298                bExt = arithmeticLIRGen.emitSignExtend(b, compareBytes * 8, 64);
299            }
300
301            if (LIRValueUtil.isVariable(bExt)) {
302                left = load(bExt);
303                right = loadNonConst(aExt);
304                mirrored = true;
305            } else {
306                left = load(aExt);
307                right = loadNonConst(bExt);
308                mirrored = false;
309            }
310            append(new AArch64Compare.CompareOp(left, loadNonCompareConst(right)));
311        } else if (kind.isSIMD()) {
312            if (AArch64Compare.FloatCompareOp.isFloatCmpConstant(a, condition, unorderedIsTrue)) {
313                left = load(b);
314                right = a;
315                mirrored = true;
316            } else if (AArch64Compare.FloatCompareOp.isFloatCmpConstant(b, condition, unorderedIsTrue)) {
317                left = load(a);
318                right = b;
319                mirrored = false;
320            } else {
321                left = load(a);
322                right = loadReg(b);
323                mirrored = false;
324            }
325            append(new AArch64Compare.FloatCompareOp(left, asAllocatable(right), condition, unorderedIsTrue));
326        } else {
327            throw GraalError.shouldNotReachHere();
328        }
329        return mirrored;
330    }
331
332    /**
333     * If value is a constant that cannot be used directly with a gpCompare instruction load it into
334     * a register and return the register, otherwise return constant value unchanged.
335     */
336    protected Value loadNonCompareConst(Value value) {
337        if (!isCompareConstant(value)) {
338            return loadReg(value);
339        }
340        return value;
341    }
342
343    /**
344     * Checks whether value can be used directly with a gpCompare instruction. This is <b>not</b>
345     * the same as {@link AArch64ArithmeticLIRGenerator#isArithmeticConstant(JavaConstant)}, because
346     * 0.0 is a valid compare constant for floats, while there are no arithmetic constants for
347     * floats.
348     *
349     * @param value any type. Non null.
350     * @return true if value can be used directly in comparison instruction, false otherwise.
351     */
352    public boolean isCompareConstant(Value value) {
353        if (isJavaConstant(value)) {
354            JavaConstant constant = asJavaConstant(value);
355            if (constant instanceof PrimitiveConstant) {
356                final long longValue = constant.asLong();
357                long maskedValue;
358                switch (constant.getJavaKind()) {
359                    case Boolean:
360                    case Byte:
361                        maskedValue = longValue & 0xFF;
362                        break;
363                    case Char:
364                    case Short:
365                        maskedValue = longValue & 0xFFFF;
366                        break;
367                    case Int:
368                        maskedValue = longValue & 0xFFFF_FFFF;
369                        break;
370                    case Long:
371                        maskedValue = longValue;
372                        break;
373                    default:
374                        throw GraalError.shouldNotReachHere();
375                }
376                return AArch64MacroAssembler.isArithmeticImmediate(maskedValue);
377            } else {
378                return constant.isDefaultForKind();
379            }
380        }
381        return false;
382    }
383
384    /**
385     * Moves trueValue into result if (left & right) == 0, else falseValue.
386     *
387     * @param left Integer kind. Non null.
388     * @param right Integer kind. Non null.
389     * @param trueValue Integer kind. Non null.
390     * @param falseValue Integer kind. Non null.
391     * @return virtual register containing trueValue if (left & right) == 0, else falseValue.
392     */
393    @Override
394    public Variable emitIntegerTestMove(Value left, Value right, Value trueValue, Value falseValue) {
395        assert ((AArch64Kind) left.getPlatformKind()).isInteger() && ((AArch64Kind) right.getPlatformKind()).isInteger();
396        assert ((AArch64Kind) trueValue.getPlatformKind()).isInteger() && ((AArch64Kind) falseValue.getPlatformKind()).isInteger();
397        ((AArch64ArithmeticLIRGenerator) getArithmetic()).emitBinary(left.getValueKind(), AArch64ArithmeticOp.ANDS, true, left, right);
398        Variable result = newVariable(trueValue.getValueKind());
399        append(new CondMoveOp(result, ConditionFlag.EQ, load(trueValue), load(falseValue)));
400        return result;
401    }
402
403    @Override
404    public void emitStrategySwitch(SwitchStrategy strategy, Variable key, LabelRef[] keyTargets, LabelRef defaultTarget) {
405        append(createStrategySwitchOp(strategy, keyTargets, defaultTarget, key, newVariable(key.getValueKind()), AArch64LIRGenerator::toIntConditionFlag));
406    }
407
408    protected StrategySwitchOp createStrategySwitchOp(SwitchStrategy strategy, LabelRef[] keyTargets, LabelRef defaultTarget, Variable key, AllocatableValue scratchValue,
409                    Function<Condition, ConditionFlag> converter) {
410        return new StrategySwitchOp(strategy, keyTargets, defaultTarget, key, scratchValue, converter);
411    }
412
413    @Override
414    protected void emitTableSwitch(int lowKey, LabelRef defaultTarget, LabelRef[] targets, Value key) {
415        append(new TableSwitchOp(lowKey, defaultTarget, targets, key, newVariable(LIRKind.value(target().arch.getWordKind())), newVariable(key.getValueKind())));
416    }
417
418    @Override
419    public Variable emitByteSwap(Value input) {
420        Variable result = newVariable(LIRKind.combine(input));
421        append(new AArch64ByteSwapOp(result, input));
422        return result;
423    }
424
425    @Override
426    public Variable emitArrayEquals(JavaKind kind, Value array1, Value array2, Value length) {
427        Variable result = newVariable(LIRKind.value(AArch64Kind.DWORD));
428        append(new AArch64ArrayEqualsOp(this, kind, result, array1, array2, asAllocatable(length)));
429        return result;
430    }
431
432    @Override
433    protected JavaConstant zapValueForKind(PlatformKind kind) {
434        long dead = 0xDEADDEADDEADDEADL;
435        switch ((AArch64Kind) kind) {
436            case BYTE:
437                return JavaConstant.forByte((byte) dead);
438            case WORD:
439                return JavaConstant.forShort((short) dead);
440            case DWORD:
441                return JavaConstant.forInt((int) dead);
442            case QWORD:
443                return JavaConstant.forLong(dead);
444            case SINGLE:
445                return JavaConstant.forFloat(Float.intBitsToFloat((int) dead));
446            case DOUBLE:
447                return JavaConstant.forDouble(Double.longBitsToDouble(dead));
448            default:
449                throw GraalError.shouldNotReachHere();
450        }
451    }
452
453    /**
454     * Loads value into virtual register. Contrary to {@link #load(Value)} this handles
455     * RegisterValues (i.e. values corresponding to fixed physical registers) correctly, by not
456     * creating an unnecessary move into a virtual register.
457     *
458     * This avoids generating the following code: mov x0, x19 # x19 is fixed thread register ldr x0,
459     * [x0] instead of: ldr x0, [x19].
460     */
461    protected AllocatableValue loadReg(Value val) {
462        if (!(val instanceof Variable || val instanceof RegisterValue)) {
463            return emitMove(val);
464        }
465        return (AllocatableValue) val;
466    }
467
468    @Override
469    public void emitPause() {
470        append(new AArch64PauseOp());
471    }
472
473    public abstract void emitCCall(long address, CallingConvention nativeCallingConvention, Value[] args);
474}
475