summaryrefslogtreecommitdiffstats
path: root/JavaScriptCore/assembler/MacroAssemblerARM.cpp
diff options
context:
space:
mode:
authorSteve Block <steveblock@google.com>2009-10-08 17:19:54 +0100
committerSteve Block <steveblock@google.com>2009-10-20 00:41:58 +0100
commit231d4e3152a9c27a73b6ac7badbe6be673aa3ddf (patch)
treea6c7e2d6cd7bfa7011cc39abbb436142d7a4a7c8 /JavaScriptCore/assembler/MacroAssemblerARM.cpp
parente196732677050bd463301566a68a643b6d14b907 (diff)
downloadexternal_webkit-231d4e3152a9c27a73b6ac7badbe6be673aa3ddf.zip
external_webkit-231d4e3152a9c27a73b6ac7badbe6be673aa3ddf.tar.gz
external_webkit-231d4e3152a9c27a73b6ac7badbe6be673aa3ddf.tar.bz2
Merge webkit.org at R49305 : Automatic merge by git.
Change-Id: I8968561bc1bfd72b8923b7118d3728579c6dbcc7
Diffstat (limited to 'JavaScriptCore/assembler/MacroAssemblerARM.cpp')
-rw-r--r--JavaScriptCore/assembler/MacroAssemblerARM.cpp94
1 files changed, 94 insertions, 0 deletions
diff --git a/JavaScriptCore/assembler/MacroAssemblerARM.cpp b/JavaScriptCore/assembler/MacroAssemblerARM.cpp
new file mode 100644
index 0000000..d726ecd
--- /dev/null
+++ b/JavaScriptCore/assembler/MacroAssemblerARM.cpp
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2009 University of Szeged
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+
+#if ENABLE(ASSEMBLER) && PLATFORM(ARM_TRADITIONAL)
+
+#include "MacroAssemblerARM.h"
+
+#if PLATFORM(LINUX)
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <elf.h>
+#include <asm/hwcap.h>
+#endif
+
+namespace JSC {
+
+static bool isVFPPresent()
+{
+#if PLATFORM(LINUX)
+ int fd = open("/proc/self/auxv", O_RDONLY);
+ if (fd > 0) {
+ Elf32_auxv_t aux;
+ while (read(fd, &aux, sizeof(Elf32_auxv_t))) {
+ if (aux.a_type == AT_HWCAP) {
+ close(fd);
+ return aux.a_un.a_val & HWCAP_VFP;
+ }
+ }
+ close(fd);
+ }
+#endif
+
+ return false;
+}
+
+const bool MacroAssemblerARM::s_isVFPPresent = isVFPPresent();
+
+#if defined(ARM_REQUIRE_NATURAL_ALIGNMENT) && ARM_REQUIRE_NATURAL_ALIGNMENT
+void MacroAssemblerARM::load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest)
+{
+ ARMWord op2;
+
+ ASSERT(address.scale >= 0 && address.scale <= 3);
+ op2 = m_assembler.lsl(address.index, static_cast<int>(address.scale));
+
+ if (address.offset >= 0 && address.offset + 0x2 <= 0xff) {
+ m_assembler.add_r(ARMRegisters::S0, address.base, op2);
+ m_assembler.ldrh_u(dest, ARMRegisters::S0, ARMAssembler::getOp2Byte(address.offset));
+ m_assembler.ldrh_u(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Byte(address.offset + 0x2));
+ } else if (address.offset < 0 && address.offset >= -0xff) {
+ m_assembler.add_r(ARMRegisters::S0, address.base, op2);
+ m_assembler.ldrh_d(dest, ARMRegisters::S0, ARMAssembler::getOp2Byte(-address.offset));
+ m_assembler.ldrh_d(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Byte(-address.offset - 0x2));
+ } else {
+ m_assembler.ldr_un_imm(ARMRegisters::S0, address.offset);
+ m_assembler.add_r(ARMRegisters::S0, ARMRegisters::S0, op2);
+ m_assembler.ldrh_r(dest, address.base, ARMRegisters::S0);
+ m_assembler.add_r(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::OP2_IMM | 0x2);
+ m_assembler.ldrh_r(ARMRegisters::S0, address.base, ARMRegisters::S0);
+ }
+ m_assembler.orr_r(dest, dest, m_assembler.lsl(ARMRegisters::S0, 16));
+}
+#endif
+
+}
+
+#endif // ENABLE(ASSEMBLER) && PLATFORM(ARM_TRADITIONAL)