1/* 2 * Copyright (C) 2013 Apple Inc. 3 * Copyright (C) 2009 University of Szeged 4 * All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 1. Redistributions of source code must retain the above copyright 10 * notice, this list of conditions and the following disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY 16 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 18 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR 19 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 20 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 22 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 23 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 */ 27 28#include "config.h" 29 30#if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL) 31 32#include "MacroAssemblerARM.h" 33 34#if USE(MASM_PROBE) 35#include <wtf/StdLibExtras.h> 36#endif 37 38#if OS(LINUX) 39#include <sys/types.h> 40#include <sys/stat.h> 41#include <fcntl.h> 42#include <unistd.h> 43#include <elf.h> 44#include <asm/hwcap.h> 45#endif 46 47namespace JSC { 48 49static bool isVFPPresent() 50{ 51#if OS(LINUX) 52 int fd = open("/proc/self/auxv", O_RDONLY); 53 if (fd > 0) { 54 Elf32_auxv_t aux; 55 while (read(fd, &aux, sizeof(Elf32_auxv_t))) { 56 if (aux.a_type == AT_HWCAP) { 57 close(fd); 58 return aux.a_un.a_val & HWCAP_VFP; 59 } 60 } 61 close(fd); 62 } 63#endif // OS(LINUX) 64 65#if (COMPILER(GCC) && defined(__VFP_FP__)) 66 return true; 67#else 68 return false; 69#endif 70} 71 72const bool MacroAssemblerARM::s_isVFPPresent = isVFPPresent(); 73 74#if CPU(ARMV5_OR_LOWER) 75/* On ARMv5 and below, natural alignment is required. */ 76void MacroAssemblerARM::load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest) 77{ 78 ARMWord op2; 79 80 ASSERT(address.scale >= 0 && address.scale <= 3); 81 op2 = m_assembler.lsl(address.index, static_cast<int>(address.scale)); 82 83 if (address.offset >= 0 && address.offset + 0x2 <= 0xff) { 84 m_assembler.add(ARMRegisters::S0, address.base, op2); 85 m_assembler.halfDtrUp(ARMAssembler::LoadUint16, dest, ARMRegisters::S0, ARMAssembler::getOp2Half(address.offset)); 86 m_assembler.halfDtrUp(ARMAssembler::LoadUint16, ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Half(address.offset + 0x2)); 87 } else if (address.offset < 0 && address.offset >= -0xff) { 88 m_assembler.add(ARMRegisters::S0, address.base, op2); 89 m_assembler.halfDtrDown(ARMAssembler::LoadUint16, dest, ARMRegisters::S0, ARMAssembler::getOp2Half(-address.offset)); 90 m_assembler.halfDtrDown(ARMAssembler::LoadUint16, ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Half(-address.offset - 0x2)); 91 } else { 92 m_assembler.moveImm(address.offset, ARMRegisters::S0); 93 m_assembler.add(ARMRegisters::S0, ARMRegisters::S0, op2); 94 m_assembler.halfDtrUpRegister(ARMAssembler::LoadUint16, dest, address.base, ARMRegisters::S0); 95 m_assembler.add(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::Op2Immediate | 0x2); 96 m_assembler.halfDtrUpRegister(ARMAssembler::LoadUint16, ARMRegisters::S0, address.base, ARMRegisters::S0); 97 } 98 m_assembler.orr(dest, dest, m_assembler.lsl(ARMRegisters::S0, 16)); 99} 100#endif // CPU(ARMV5_OR_LOWER) 101 102#if USE(MASM_PROBE) 103 104void MacroAssemblerARM::ProbeContext::dumpCPURegisters(const char* indentation) 105{ 106 #define DUMP_GPREGISTER(_type, _regName) { \ 107 int32_t value = reinterpret_cast<int32_t>(cpu._regName); \ 108 dataLogF("%s %5s: 0x%08x %d\n", indentation, #_regName, value, value) ; \ 109 } 110 FOR_EACH_CPU_GPREGISTER(DUMP_GPREGISTER) 111 FOR_EACH_CPU_SPECIAL_REGISTER(DUMP_GPREGISTER) 112 #undef DUMP_GPREGISTER 113 114 #define DUMP_FPREGISTER(_type, _regName) { \ 115 uint32_t* u = reinterpret_cast<uint32_t*>(&cpu._regName); \ 116 double* d = reinterpret_cast<double*>(&cpu._regName); \ 117 dataLogF("%s %5s: 0x %08x %08x %12g\n", \ 118 indentation, #_regName, u[1], u[0], d[0]); \ 119 } 120 FOR_EACH_CPU_FPREGISTER(DUMP_FPREGISTER) 121 #undef DUMP_FPREGISTER 122} 123 124void MacroAssemblerARM::ProbeContext::dump(const char* indentation) 125{ 126 if (!indentation) 127 indentation = ""; 128 129 dataLogF("%sProbeContext %p {\n", indentation, this); 130 dataLogF("%s probeFunction: %p\n", indentation, probeFunction); 131 dataLogF("%s arg1: %p %llu\n", indentation, arg1, reinterpret_cast<int64_t>(arg1)); 132 dataLogF("%s arg2: %p %llu\n", indentation, arg2, reinterpret_cast<int64_t>(arg2)); 133 dataLogF("%s cpu: {\n", indentation); 134 135 dumpCPURegisters(indentation); 136 137 dataLogF("%s }\n", indentation); 138 dataLogF("%s}\n", indentation); 139} 140 141 142extern "C" void ctiMasmProbeTrampoline(); 143 144// For details on "What code is emitted for the probe?" and "What values are in 145// the saved registers?", see comment for MacroAssemblerX86::probe() in 146// MacroAssemblerX86_64.h. 147 148void MacroAssemblerARM::probe(MacroAssemblerARM::ProbeFunction function, void* arg1, void* arg2) 149{ 150 push(RegisterID::sp); 151 push(RegisterID::lr); 152 push(RegisterID::ip); 153 push(RegisterID::S0); 154 // The following uses RegisterID::S0. So, they must come after we push S0 above. 155 push(trustedImm32FromPtr(arg2)); 156 push(trustedImm32FromPtr(arg1)); 157 push(trustedImm32FromPtr(function)); 158 159 move(trustedImm32FromPtr(ctiMasmProbeTrampoline), RegisterID::S0); 160 m_assembler.blx(RegisterID::S0); 161 162} 163#endif // USE(MASM_PROBE) 164 165} // namespace JSC 166 167#endif // ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL) 168