aboutsummaryrefslogtreecommitdiff
path: root/arch/arm/kernel
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/kernel')
-rw-r--r--arch/arm/kernel/Makefile13
-rw-r--r--arch/arm/kernel/arch_timer.c14
-rw-r--r--arch/arm/kernel/armksyms.c7
-rw-r--r--arch/arm/kernel/asm-offsets.c1
-rw-r--r--arch/arm/kernel/atags_parse.c5
-rw-r--r--arch/arm/kernel/bios32.c58
-rw-r--r--arch/arm/kernel/calls.S3
-rw-r--r--arch/arm/kernel/crash_dump.c2
-rw-r--r--arch/arm/kernel/devtree.c128
-rw-r--r--arch/arm/kernel/entry-armv.S27
-rw-r--r--arch/arm/kernel/entry-common.S16
-rw-r--r--arch/arm/kernel/entry-header.S27
-rw-r--r--arch/arm/kernel/entry-v7m.S2
-rw-r--r--arch/arm/kernel/etm.c6
-rw-r--r--arch/arm/kernel/ftrace.c17
-rw-r--r--arch/arm/kernel/head-common.S15
-rw-r--r--arch/arm/kernel/head-nommu.S4
-rw-r--r--arch/arm/kernel/head.S126
-rw-r--r--arch/arm/kernel/hibernate.c107
-rw-r--r--arch/arm/kernel/hw_breakpoint.c25
-rw-r--r--arch/arm/kernel/io.c35
-rw-r--r--arch/arm/kernel/irq.c12
-rw-r--r--arch/arm/kernel/isa.c6
-rw-r--r--arch/arm/kernel/iwmmxt.S29
-rw-r--r--arch/arm/kernel/kgdb.c4
-rw-r--r--arch/arm/kernel/kprobes-arm.c806
-rw-r--r--arch/arm/kernel/kprobes-common.c473
-rw-r--r--arch/arm/kernel/kprobes-test-arm.c634
-rw-r--r--arch/arm/kernel/kprobes-test-thumb.c447
-rw-r--r--arch/arm/kernel/kprobes-test.c35
-rw-r--r--arch/arm/kernel/kprobes-test.h2
-rw-r--r--arch/arm/kernel/kprobes-thumb.c1165
-rw-r--r--arch/arm/kernel/kprobes.c42
-rw-r--r--arch/arm/kernel/kprobes.h400
-rw-r--r--arch/arm/kernel/machine_kexec.c24
-rw-r--r--arch/arm/kernel/module.c59
-rw-r--r--arch/arm/kernel/perf_event.c36
-rw-r--r--arch/arm/kernel/perf_event_cpu.c126
-rw-r--r--arch/arm/kernel/perf_event_v7.c729
-rw-r--r--arch/arm/kernel/perf_regs.c30
-rw-r--r--arch/arm/kernel/pj4-cp0.c46
-rw-r--r--arch/arm/kernel/probes-arm.c734
-rw-r--r--arch/arm/kernel/probes-arm.h73
-rw-r--r--arch/arm/kernel/probes-thumb.c882
-rw-r--r--arch/arm/kernel/probes-thumb.h97
-rw-r--r--arch/arm/kernel/probes.c456
-rw-r--r--arch/arm/kernel/probes.h407
-rw-r--r--arch/arm/kernel/process.c35
-rw-r--r--arch/arm/kernel/psci.c196
-rw-r--r--arch/arm/kernel/psci_smp.c34
-rw-r--r--arch/arm/kernel/ptrace.c7
-rw-r--r--arch/arm/kernel/relocate_kernel.S8
-rw-r--r--arch/arm/kernel/setup.c128
-rw-r--r--arch/arm/kernel/signal.c42
-rw-r--r--arch/arm/kernel/sigreturn_codes.S102
-rw-r--r--arch/arm/kernel/sleep.S32
-rw-r--r--arch/arm/kernel/smp.c51
-rw-r--r--arch/arm/kernel/smp_scu.c14
-rw-r--r--arch/arm/kernel/smp_tlb.c36
-rw-r--r--arch/arm/kernel/smp_twd.c26
-rw-r--r--arch/arm/kernel/stacktrace.c62
-rw-r--r--arch/arm/kernel/suspend.c8
-rw-r--r--arch/arm/kernel/sys_oabi-compat.c3
-rw-r--r--arch/arm/kernel/tcm.c4
-rw-r--r--arch/arm/kernel/time.c29
-rw-r--r--arch/arm/kernel/topology.c98
-rw-r--r--arch/arm/kernel/traps.c47
-rw-r--r--arch/arm/kernel/unwind.c139
-rw-r--r--arch/arm/kernel/uprobes-arm.c234
-rw-r--r--arch/arm/kernel/uprobes.c230
-rw-r--r--arch/arm/kernel/uprobes.h35
71 files changed, 6277 insertions, 3715 deletions
diff --git a/arch/arm/kernel/Makefile b/arch/arm/kernel/Makefile
index 5140df5f23a..38ddd9f83d0 100644
--- a/arch/arm/kernel/Makefile
+++ b/arch/arm/kernel/Makefile
@@ -17,7 +17,8 @@ CFLAGS_REMOVE_return_address.o = -pg
obj-y := elf.o entry-common.o irq.o opcodes.o \
process.o ptrace.o return_address.o \
- setup.o signal.o stacktrace.o sys_arm.o time.o traps.o
+ setup.o signal.o sigreturn_codes.o \
+ stacktrace.o sys_arm.o time.o traps.o
obj-$(CONFIG_ATAGS) += atags_parse.o
obj-$(CONFIG_ATAGS_PROC) += atags_proc.o
@@ -38,6 +39,7 @@ obj-$(CONFIG_ARTHUR) += arthur.o
obj-$(CONFIG_ISA_DMA) += dma-isa.o
obj-$(CONFIG_PCI) += bios32.o isa.o
obj-$(CONFIG_ARM_CPU_SUSPEND) += sleep.o suspend.o
+obj-$(CONFIG_HIBERNATION) += hibernate.o
obj-$(CONFIG_SMP) += smp.o
ifdef CONFIG_MMU
obj-$(CONFIG_SMP) += smp_tlb.o
@@ -49,11 +51,12 @@ obj-$(CONFIG_DYNAMIC_FTRACE) += ftrace.o insn.o
obj-$(CONFIG_FUNCTION_GRAPH_TRACER) += ftrace.o insn.o
obj-$(CONFIG_JUMP_LABEL) += jump_label.o insn.o patch.o
obj-$(CONFIG_KEXEC) += machine_kexec.o relocate_kernel.o
-obj-$(CONFIG_KPROBES) += kprobes.o kprobes-common.o patch.o
+obj-$(CONFIG_UPROBES) += probes.o probes-arm.o uprobes.o uprobes-arm.o
+obj-$(CONFIG_KPROBES) += probes.o kprobes.o kprobes-common.o patch.o
ifdef CONFIG_THUMB2_KERNEL
-obj-$(CONFIG_KPROBES) += kprobes-thumb.o
+obj-$(CONFIG_KPROBES) += kprobes-thumb.o probes-thumb.o
else
-obj-$(CONFIG_KPROBES) += kprobes-arm.o
+obj-$(CONFIG_KPROBES) += kprobes-arm.o probes-arm.o
endif
obj-$(CONFIG_ARM_KPROBES_TEST) += test-kprobes.o
test-kprobes-objs := kprobes-test.o
@@ -77,7 +80,9 @@ obj-$(CONFIG_CPU_XSCALE) += xscale-cp0.o
obj-$(CONFIG_CPU_XSC3) += xscale-cp0.o
obj-$(CONFIG_CPU_MOHAWK) += xscale-cp0.o
obj-$(CONFIG_CPU_PJ4) += pj4-cp0.o
+obj-$(CONFIG_CPU_PJ4B) += pj4-cp0.o
obj-$(CONFIG_IWMMXT) += iwmmxt.o
+obj-$(CONFIG_PERF_EVENTS) += perf_regs.o
obj-$(CONFIG_HW_PERF_EVENTS) += perf_event.o perf_event_cpu.o
AFLAGS_iwmmxt.o := -Wa,-mcpu=iwmmxt
obj-$(CONFIG_ARM_CPU_TOPOLOGY) += topology.o
diff --git a/arch/arm/kernel/arch_timer.c b/arch/arm/kernel/arch_timer.c
index 221f07b11cc..1791f12c180 100644
--- a/arch/arm/kernel/arch_timer.c
+++ b/arch/arm/kernel/arch_timer.c
@@ -11,7 +11,6 @@
#include <linux/init.h>
#include <linux/types.h>
#include <linux/errno.h>
-#include <linux/sched_clock.h>
#include <asm/delay.h>
@@ -22,13 +21,6 @@ static unsigned long arch_timer_read_counter_long(void)
return arch_timer_read_counter();
}
-static u32 sched_clock_mult __read_mostly;
-
-static unsigned long long notrace arch_timer_sched_clock(void)
-{
- return arch_timer_read_counter() * sched_clock_mult;
-}
-
static struct delay_timer arch_delay_timer;
static void __init arch_timer_delay_timer_register(void)
@@ -48,11 +40,5 @@ int __init arch_timer_arch_init(void)
arch_timer_delay_timer_register();
- /* Cache the sched_clock multiplier to save a divide in the hot path. */
- sched_clock_mult = NSEC_PER_SEC / arch_timer_rate;
- sched_clock_func = arch_timer_sched_clock;
- pr_info("sched_clock: ARM arch timer >56 bits at %ukHz, resolution %uns\n",
- arch_timer_rate / 1000, sched_clock_mult);
-
return 0;
}
diff --git a/arch/arm/kernel/armksyms.c b/arch/arm/kernel/armksyms.c
index 60d3b738d42..f7b450f97e6 100644
--- a/arch/arm/kernel/armksyms.c
+++ b/arch/arm/kernel/armksyms.c
@@ -35,6 +35,8 @@ extern void __ucmpdi2(void);
extern void __udivsi3(void);
extern void __umodsi3(void);
extern void __do_div64(void);
+extern void __bswapsi2(void);
+extern void __bswapdi2(void);
extern void __aeabi_idiv(void);
extern void __aeabi_idivmod(void);
@@ -114,6 +116,8 @@ EXPORT_SYMBOL(__ucmpdi2);
EXPORT_SYMBOL(__udivsi3);
EXPORT_SYMBOL(__umodsi3);
EXPORT_SYMBOL(__do_div64);
+EXPORT_SYMBOL(__bswapsi2);
+EXPORT_SYMBOL(__bswapdi2);
#ifdef CONFIG_AEABI
EXPORT_SYMBOL(__aeabi_idiv);
@@ -154,5 +158,6 @@ EXPORT_SYMBOL(__gnu_mcount_nc);
#endif
#ifdef CONFIG_ARM_PATCH_PHYS_VIRT
-EXPORT_SYMBOL(__pv_phys_offset);
+EXPORT_SYMBOL(__pv_phys_pfn_offset);
+EXPORT_SYMBOL(__pv_offset);
#endif
diff --git a/arch/arm/kernel/asm-offsets.c b/arch/arm/kernel/asm-offsets.c
index ded041711be..85598b5d1ef 100644
--- a/arch/arm/kernel/asm-offsets.c
+++ b/arch/arm/kernel/asm-offsets.c
@@ -174,6 +174,7 @@ int main(void)
DEFINE(VCPU_FIQ_REGS, offsetof(struct kvm_vcpu, arch.regs.fiq_regs));
DEFINE(VCPU_PC, offsetof(struct kvm_vcpu, arch.regs.usr_regs.ARM_pc));
DEFINE(VCPU_CPSR, offsetof(struct kvm_vcpu, arch.regs.usr_regs.ARM_cpsr));
+ DEFINE(VCPU_HCR, offsetof(struct kvm_vcpu, arch.hcr));
DEFINE(VCPU_IRQ_LINES, offsetof(struct kvm_vcpu, arch.irq_lines));
DEFINE(VCPU_HSR, offsetof(struct kvm_vcpu, arch.fault.hsr));
DEFINE(VCPU_HxFAR, offsetof(struct kvm_vcpu, arch.fault.hxfar));
diff --git a/arch/arm/kernel/atags_parse.c b/arch/arm/kernel/atags_parse.c
index 8c14de8180c..7807ef58a2a 100644
--- a/arch/arm/kernel/atags_parse.c
+++ b/arch/arm/kernel/atags_parse.c
@@ -22,6 +22,7 @@
#include <linux/fs.h>
#include <linux/root_dev.h>
#include <linux/screen_info.h>
+#include <linux/memblock.h>
#include <asm/setup.h>
#include <asm/system_info.h>
@@ -222,10 +223,10 @@ setup_machine_tags(phys_addr_t __atags_pointer, unsigned int machine_nr)
}
if (mdesc->fixup)
- mdesc->fixup(tags, &from, &meminfo);
+ mdesc->fixup(tags, &from);
if (tags->hdr.tag == ATAG_CORE) {
- if (meminfo.nr_banks != 0)
+ if (memblock_phys_mem_size())
squash_mem_tags(tags);
save_atags(tags);
parse_tags(tags);
diff --git a/arch/arm/kernel/bios32.c b/arch/arm/kernel/bios32.c
index 317da88ae65..17a26c17f7f 100644
--- a/arch/arm/kernel/bios32.c
+++ b/arch/arm/kernel/bios32.c
@@ -19,7 +19,7 @@
static int debug_pci;
/*
- * We can't use pci_find_device() here since we are
+ * We can't use pci_get_device() here since we are
* called from interrupt context.
*/
static void pcibios_bus_report_status(struct pci_bus *bus, u_int status_mask, int warn)
@@ -57,13 +57,10 @@ static void pcibios_bus_report_status(struct pci_bus *bus, u_int status_mask, in
void pcibios_report_status(u_int status_mask, int warn)
{
- struct list_head *l;
-
- list_for_each(l, &pci_root_buses) {
- struct pci_bus *bus = pci_bus_b(l);
+ struct pci_bus *bus;
+ list_for_each_entry(bus, &pci_root_buses, node)
pcibios_bus_report_status(bus, status_mask, warn);
- }
}
/*
@@ -548,6 +545,18 @@ void pci_common_init_dev(struct device *parent, struct hw_pci *hw)
*/
pci_bus_add_devices(bus);
}
+
+ list_for_each_entry(sys, &head, node) {
+ struct pci_bus *bus = sys->bus;
+
+ /* Configure PCI Express settings */
+ if (bus && !pci_has_flag(PCI_PROBE_ONLY)) {
+ struct pci_bus *child;
+
+ list_for_each_entry(child, &bus->children, node)
+ pcie_bus_configure_settings(child);
+ }
+ }
}
#ifndef CONFIG_PCI_HOST_ITE8152
@@ -608,41 +617,10 @@ resource_size_t pcibios_align_resource(void *data, const struct resource *res,
*/
int pcibios_enable_device(struct pci_dev *dev, int mask)
{
- u16 cmd, old_cmd;
- int idx;
- struct resource *r;
+ if (pci_has_flag(PCI_PROBE_ONLY))
+ return 0;
- pci_read_config_word(dev, PCI_COMMAND, &cmd);
- old_cmd = cmd;
- for (idx = 0; idx < 6; idx++) {
- /* Only set up the requested stuff */
- if (!(mask & (1 << idx)))
- continue;
-
- r = dev->resource + idx;
- if (!r->start && r->end) {
- printk(KERN_ERR "PCI: Device %s not available because"
- " of resource collisions\n", pci_name(dev));
- return -EINVAL;
- }
- if (r->flags & IORESOURCE_IO)
- cmd |= PCI_COMMAND_IO;
- if (r->flags & IORESOURCE_MEM)
- cmd |= PCI_COMMAND_MEMORY;
- }
-
- /*
- * Bridges (eg, cardbus bridges) need to be fully enabled
- */
- if ((dev->class >> 16) == PCI_BASE_CLASS_BRIDGE)
- cmd |= PCI_COMMAND_IO | PCI_COMMAND_MEMORY;
-
- if (cmd != old_cmd) {
- printk("PCI: enabling device %s (%04x -> %04x)\n",
- pci_name(dev), old_cmd, cmd);
- pci_write_config_word(dev, PCI_COMMAND, cmd);
- }
- return 0;
+ return pci_enable_resources(dev, mask);
}
int pci_mmap_page_range(struct pci_dev *dev, struct vm_area_struct *vma,
diff --git a/arch/arm/kernel/calls.S b/arch/arm/kernel/calls.S
index c6ca7e37677..8f51bdcdacb 100644
--- a/arch/arm/kernel/calls.S
+++ b/arch/arm/kernel/calls.S
@@ -389,6 +389,9 @@
CALL(sys_process_vm_writev)
CALL(sys_kcmp)
CALL(sys_finit_module)
+/* 380 */ CALL(sys_sched_setattr)
+ CALL(sys_sched_getattr)
+ CALL(sys_renameat2)
#ifndef syscalls_counted
.equ syscalls_padding, ((NR_syscalls + 3) & ~3) - NR_syscalls
#define syscalls_counted
diff --git a/arch/arm/kernel/crash_dump.c b/arch/arm/kernel/crash_dump.c
index 90c50d4b43f..5d1286d5115 100644
--- a/arch/arm/kernel/crash_dump.c
+++ b/arch/arm/kernel/crash_dump.c
@@ -39,7 +39,7 @@ ssize_t copy_oldmem_page(unsigned long pfn, char *buf,
if (!csize)
return 0;
- vaddr = ioremap(pfn << PAGE_SHIFT, PAGE_SIZE);
+ vaddr = ioremap(__pfn_to_phys(pfn), PAGE_SIZE);
if (!vaddr)
return -ENOMEM;
diff --git a/arch/arm/kernel/devtree.c b/arch/arm/kernel/devtree.c
index f35906b3d8c..11c54de9f8c 100644
--- a/arch/arm/kernel/devtree.c
+++ b/arch/arm/kernel/devtree.c
@@ -18,6 +18,7 @@
#include <linux/of_fdt.h>
#include <linux/of_irq.h>
#include <linux/of_platform.h>
+#include <linux/smp.h>
#include <asm/cputype.h>
#include <asm/setup.h>
@@ -26,42 +27,37 @@
#include <asm/mach/arch.h>
#include <asm/mach-types.h>
-void __init early_init_dt_add_memory_arch(u64 base, u64 size)
-{
- arm_add_memory(base, size);
-}
-void * __init early_init_dt_alloc_memory_arch(u64 size, u64 align)
-{
- return alloc_bootmem_align(size, align);
-}
+#ifdef CONFIG_SMP
+extern struct of_cpu_method __cpu_method_of_table[];
-void __init arm_dt_memblock_reserve(void)
+static const struct of_cpu_method __cpu_method_of_table_sentinel
+ __used __section(__cpu_method_of_table_end);
+
+
+static int __init set_smp_ops_by_method(struct device_node *node)
{
- u64 *reserve_map, base, size;
+ const char *method;
+ struct of_cpu_method *m = __cpu_method_of_table;
- if (!initial_boot_params)
- return;
+ if (of_property_read_string(node, "enable-method", &method))
+ return 0;
- /* Reserve the dtb region */
- memblock_reserve(virt_to_phys(initial_boot_params),
- be32_to_cpu(initial_boot_params->totalsize));
+ for (; m->method; m++)
+ if (!strcmp(m->method, method)) {
+ smp_set_ops(m->ops);
+ return 1;
+ }
- /*
- * Process the reserve map. This will probably overlap the initrd
- * and dtb locations which are already reserved, but overlaping
- * doesn't hurt anything
- */
- reserve_map = ((void*)initial_boot_params) +
- be32_to_cpu(initial_boot_params->off_mem_rsvmap);
- while (1) {
- base = be64_to_cpup(reserve_map++);
- size = be64_to_cpup(reserve_map++);
- if (!size)
- break;
- memblock_reserve(base, size);
- }
+ return 0;
}
+#else
+static inline int set_smp_ops_by_method(struct device_node *node)
+{
+ return 1;
+}
+#endif
+
/*
* arm_dt_init_cpu_maps - Function retrieves cpu nodes from the device tree
@@ -79,6 +75,7 @@ void __init arm_dt_init_cpu_maps(void)
* read as 0.
*/
struct device_node *cpu, *cpus;
+ int found_method = 0;
u32 i, j, cpuidx = 1;
u32 mpidr = is_smp() ? read_cpuid_mpidr() & MPIDR_HWID_BITMASK : 0;
@@ -150,8 +147,18 @@ void __init arm_dt_init_cpu_maps(void)
}
tmp_map[i] = hwid;
+
+ if (!found_method)
+ found_method = set_smp_ops_by_method(cpu);
}
+ /*
+ * Fallback to an enable-method in the cpus node if nothing found in
+ * a cpu node.
+ */
+ if (!found_method)
+ set_smp_ops_by_method(cpus);
+
if (!bootcpu_valid) {
pr_warn("DT missing boot CPU MPIDR[23:0], fall back to default cpu_logical_map\n");
return;
@@ -171,7 +178,20 @@ void __init arm_dt_init_cpu_maps(void)
bool arch_match_cpu_phys_id(int cpu, u64 phys_id)
{
- return (phys_id & MPIDR_HWID_BITMASK) == cpu_logical_map(cpu);
+ return phys_id == cpu_logical_map(cpu);
+}
+
+static const void * __init arch_get_next_mach(const char *const **match)
+{
+ static const struct machine_desc *mdesc = __arch_info_begin;
+ const struct machine_desc *m = mdesc;
+
+ if (m >= __arch_info_end)
+ return NULL;
+
+ mdesc++;
+ *match = m->dt_compat;
+ return m;
}
/**
@@ -183,11 +203,7 @@ bool arch_match_cpu_phys_id(int cpu, u64 phys_id)
*/
const struct machine_desc * __init setup_machine_fdt(unsigned int dt_phys)
{
- struct boot_param_header *devtree;
const struct machine_desc *mdesc, *mdesc_best = NULL;
- unsigned int score, mdesc_score = ~1;
- unsigned long dt_root;
- const char *model;
#ifdef CONFIG_ARCH_MULTIPLATFORM
DT_MACHINE_START(GENERIC_DT, "Generic DT based system")
@@ -196,32 +212,20 @@ const struct machine_desc * __init setup_machine_fdt(unsigned int dt_phys)
mdesc_best = &__mach_desc_GENERIC_DT;
#endif
- if (!dt_phys)
+ if (!dt_phys || !early_init_dt_verify(phys_to_virt(dt_phys)))
return NULL;
- devtree = phys_to_virt(dt_phys);
+ mdesc = of_flat_dt_match_machine(mdesc_best, arch_get_next_mach);
- /* check device tree validity */
- if (be32_to_cpu(devtree->magic) != OF_DT_HEADER)
- return NULL;
-
- /* Search the mdescs for the 'best' compatible value match */
- initial_boot_params = devtree;
- dt_root = of_get_flat_dt_root();
- for_each_machine_desc(mdesc) {
- score = of_flat_dt_match(dt_root, mdesc->dt_compat);
- if (score > 0 && score < mdesc_score) {
- mdesc_best = mdesc;
- mdesc_score = score;
- }
- }
- if (!mdesc_best) {
+ if (!mdesc) {
const char *prop;
- long size;
+ int size;
+ unsigned long dt_root;
early_print("\nError: unrecognized/unsupported "
"device tree compatible list:\n[ ");
+ dt_root = of_get_flat_dt_root();
prop = of_get_flat_dt_prop(dt_root, "compatible", &size);
while (size > 0) {
early_print("'%s' ", prop);
@@ -233,22 +237,14 @@ const struct machine_desc * __init setup_machine_fdt(unsigned int dt_phys)
dump_machine_table(); /* does not return */
}
- model = of_get_flat_dt_prop(dt_root, "model", NULL);
- if (!model)
- model = of_get_flat_dt_prop(dt_root, "compatible", NULL);
- if (!model)
- model = "<unknown>";
- pr_info("Machine: %s, model: %s\n", mdesc_best->name, model);
+ /* We really don't want to do this, but sometimes firmware provides buggy data */
+ if (mdesc->dt_fixup)
+ mdesc->dt_fixup();
- /* Retrieve various information from the /chosen node */
- of_scan_flat_dt(early_init_dt_scan_chosen, boot_command_line);
- /* Initialize {size,address}-cells info */
- of_scan_flat_dt(early_init_dt_scan_root, NULL);
- /* Setup memory, calling early_init_dt_add_memory_arch */
- of_scan_flat_dt(early_init_dt_scan_memory, NULL);
+ early_init_dt_scan_nodes();
/* Change machine number to match the mdesc we're using */
- __machine_arch_type = mdesc_best->nr;
+ __machine_arch_type = mdesc->nr;
- return mdesc_best;
+ return mdesc;
}
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index 9cbe70c8b0e..52a949a8077 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -192,6 +192,7 @@ __dabt_svc:
svc_entry
mov r2, sp
dabt_helper
+ THUMB( ldr r5, [sp, #S_PSR] ) @ potentially updated CPSR
svc_exit r5 @ return from exception
UNWIND(.fnend )
ENDPROC(__dabt_svc)
@@ -343,7 +344,7 @@ ENDPROC(__pabt_svc)
@
@ Enable the alignment trap while in kernel mode
@
- alignment_trap r0
+ alignment_trap r0, .LCcralign
@
@ Clear FP to mark the first stack frame
@@ -412,13 +413,17 @@ __und_usr:
@
adr r9, BSYM(ret_from_exception)
+ @ IRQs must be enabled before attempting to read the instruction from
+ @ user space since that could cause a page/translation fault if the
+ @ page table was modified by another CPU.
+ enable_irq
+
tst r3, #PSR_T_BIT @ Thumb mode?
bne __und_usr_thumb
sub r4, r2, #4 @ ARM instr at LR - 4
1: ldrt r0, [r4]
-#ifdef CONFIG_CPU_ENDIAN_BE8
- rev r0, r0 @ little endian instruction
-#endif
+ ARM_BE8(rev r0, r0) @ little endian instruction
+
@ r0 = 32-bit ARM instruction which caused the exception
@ r2 = PC value for the following instruction (:= regs->ARM_pc)
@ r4 = PC value for the faulting instruction
@@ -451,9 +456,11 @@ __und_usr_thumb:
.arch armv6t2
#endif
2: ldrht r5, [r4]
+ARM_BE8(rev16 r5, r5) @ little endian instruction
cmp r5, #0xe800 @ 32bit instruction if xx != 0
blo __und_usr_fault_16 @ 16bit undefined instruction
3: ldrht r0, [r2]
+ARM_BE8(rev16 r0, r0) @ little endian instruction
add r2, r2, #2 @ r2 is PC + 2, make it PC + 4
str r2, [sp, #S_PC] @ it's a 2x16bit instr, update
orr r0, r0, r5, lsl #16
@@ -482,7 +489,8 @@ ENDPROC(__und_usr)
*/
.pushsection .fixup, "ax"
.align 2
-4: mov pc, r9
+4: str r4, [sp, #S_PC] @ retry current instruction
+ mov pc, r9
.popsection
.pushsection __ex_table,"a"
.long 1b, 4b
@@ -515,7 +523,7 @@ ENDPROC(__und_usr)
* r9 = normal "successful" return address
* r10 = this threads thread_info structure
* lr = unrecognised instruction return address
- * IRQs disabled, FIQs enabled.
+ * IRQs enabled, FIQs enabled.
*/
@
@ Fall-through from Thumb-2 __und_usr
@@ -622,7 +630,6 @@ call_fpe:
#endif
do_fpe:
- enable_irq
ldr r4, .LCfp
add r10, r10, #TI_FPSTATE @ r10 = workspace
ldr pc, [r4] @ Call FP module USR entry point
@@ -650,8 +657,7 @@ __und_usr_fault_32:
b 1f
__und_usr_fault_16:
mov r1, #2
-1: enable_irq
- mov r0, sp
+1: mov r0, sp
adr lr, BSYM(ret_from_exception)
b __und_fault
ENDPROC(__und_usr_fault_32)
@@ -1141,11 +1147,8 @@ __vectors_start:
.data
.globl cr_alignment
- .globl cr_no_alignment
cr_alignment:
.space 4
-cr_no_alignment:
- .space 4
#ifdef CONFIG_MULTI_IRQ_HANDLER
.globl handle_arch_irq
diff --git a/arch/arm/kernel/entry-common.S b/arch/arm/kernel/entry-common.S
index 74ad15d1a06..7139d4a7dea 100644
--- a/arch/arm/kernel/entry-common.S
+++ b/arch/arm/kernel/entry-common.S
@@ -365,13 +365,7 @@ ENTRY(vector_swi)
str r0, [sp, #S_OLD_R0] @ Save OLD_R0
#endif
zero_fp
-
-#ifdef CONFIG_ALIGNMENT_TRAP
- ldr ip, __cr_alignment
- ldr ip, [ip]
- mcr p15, 0, ip, c1, c0 @ update control register
-#endif
-
+ alignment_trap ip, __cr_alignment
enable_irq
ct_user_exit
get_thread_info tsk
@@ -393,9 +387,7 @@ ENTRY(vector_swi)
#else
USER( ldr r10, [lr, #-4] ) @ get SWI instruction
#endif
-#ifdef CONFIG_CPU_ENDIAN_BE8
- rev r10, r10 @ little endian instruction
-#endif
+ ARM_BE8(rev r10, r10) @ little endian instruction
#elif defined(CONFIG_AEABI)
@@ -442,10 +434,10 @@ local_restart:
ldrcc pc, [tbl, scno, lsl #2] @ call sys_* routine
add r1, sp, #S_OFF
- cmp scno, #(__ARM_NR_BASE - __NR_SYSCALL_BASE)
+2: cmp scno, #(__ARM_NR_BASE - __NR_SYSCALL_BASE)
eor r0, scno, #__NR_SYSCALL_BASE @ put OS number back
bcs arm_syscall
-2: mov why, #0 @ no longer a real syscall
+ mov why, #0 @ no longer a real syscall
b sys_ni_syscall @ not private func
#if defined(CONFIG_OABI_COMPAT) || !defined(CONFIG_AEABI)
diff --git a/arch/arm/kernel/entry-header.S b/arch/arm/kernel/entry-header.S
index de23a9beed1..5d702f8900b 100644
--- a/arch/arm/kernel/entry-header.S
+++ b/arch/arm/kernel/entry-header.S
@@ -37,9 +37,9 @@
#endif
.endm
- .macro alignment_trap, rtemp
+ .macro alignment_trap, rtemp, label
#ifdef CONFIG_ALIGNMENT_TRAP
- ldr \rtemp, .LCcralign
+ ldr \rtemp, \label
ldr \rtemp, [\rtemp]
mcr p15, 0, \rtemp, c1, c0
#endif
@@ -132,6 +132,10 @@
orrne r5, V7M_xPSR_FRAMEPTRALIGN
biceq r5, V7M_xPSR_FRAMEPTRALIGN
+ @ ensure bit 0 is cleared in the PC, otherwise behaviour is
+ @ unpredictable
+ bic r4, #1
+
@ write basic exception frame
stmdb r2!, {r1, r3-r5}
ldmia sp, {r1, r3-r5}
@@ -236,11 +240,6 @@
movs pc, lr @ return & move spsr_svc into cpsr
.endm
- .macro get_thread_info, rd
- mov \rd, sp, lsr #13
- mov \rd, \rd, lsl #13
- .endm
-
@
@ 32-bit wide "mov pc, reg"
@
@@ -306,12 +305,6 @@
.endm
#endif /* ifdef CONFIG_CPU_V7M / else */
- .macro get_thread_info, rd
- mov \rd, sp
- lsr \rd, \rd, #13
- mov \rd, \rd, lsl #13
- .endm
-
@
@ 32-bit wide "mov pc, reg"
@
@@ -329,10 +322,10 @@
#ifdef CONFIG_CONTEXT_TRACKING
.if \save
stmdb sp!, {r0-r3, ip, lr}
- bl user_exit
+ bl context_tracking_user_exit
ldmia sp!, {r0-r3, ip, lr}
.else
- bl user_exit
+ bl context_tracking_user_exit
.endif
#endif
.endm
@@ -341,10 +334,10 @@
#ifdef CONFIG_CONTEXT_TRACKING
.if \save
stmdb sp!, {r0-r3, ip, lr}
- bl user_enter
+ bl context_tracking_user_enter
ldmia sp!, {r0-r3, ip, lr}
.else
- bl user_enter
+ bl context_tracking_user_enter
.endif
#endif
.endm
diff --git a/arch/arm/kernel/entry-v7m.S b/arch/arm/kernel/entry-v7m.S
index 52b26432c9a..2260f185582 100644
--- a/arch/arm/kernel/entry-v7m.S
+++ b/arch/arm/kernel/entry-v7m.S
@@ -14,8 +14,6 @@
#include <asm/thread_notify.h>
#include <asm/v7m.h>
-#include <mach/entry-macro.S>
-
#include "entry-header.S"
#ifdef CONFIG_TRACE_IRQFLAGS
diff --git a/arch/arm/kernel/etm.c b/arch/arm/kernel/etm.c
index 8ff0ecdc637..131a6ab5f35 100644
--- a/arch/arm/kernel/etm.c
+++ b/arch/arm/kernel/etm.c
@@ -385,7 +385,6 @@ out:
return ret;
out_unmap:
- amba_set_drvdata(dev, NULL);
iounmap(t->etb_regs);
out_release:
@@ -398,8 +397,6 @@ static int etb_remove(struct amba_device *dev)
{
struct tracectx *t = amba_get_drvdata(dev);
- amba_set_drvdata(dev, NULL);
-
iounmap(t->etb_regs);
t->etb_regs = NULL;
@@ -588,7 +585,6 @@ out:
return ret;
out_unmap:
- amba_set_drvdata(dev, NULL);
iounmap(t->etm_regs);
out_release:
@@ -601,8 +597,6 @@ static int etm_remove(struct amba_device *dev)
{
struct tracectx *t = amba_get_drvdata(dev);
- amba_set_drvdata(dev, NULL);
-
iounmap(t->etm_regs);
t->etm_regs = NULL;
diff --git a/arch/arm/kernel/ftrace.c b/arch/arm/kernel/ftrace.c
index 34e56647dce..af9a8a927a4 100644
--- a/arch/arm/kernel/ftrace.c
+++ b/arch/arm/kernel/ftrace.c
@@ -14,6 +14,7 @@
#include <linux/ftrace.h>
#include <linux/uaccess.h>
+#include <linux/module.h>
#include <asm/cacheflush.h>
#include <asm/opcodes.h>
@@ -63,6 +64,18 @@ static unsigned long adjust_address(struct dyn_ftrace *rec, unsigned long addr)
}
#endif
+int ftrace_arch_code_modify_prepare(void)
+{
+ set_all_modules_text_rw();
+ return 0;
+}
+
+int ftrace_arch_code_modify_post_process(void)
+{
+ set_all_modules_text_ro();
+ return 0;
+}
+
static unsigned long ftrace_call_replace(unsigned long pc, unsigned long addr)
{
return arm_gen_branch_link(pc, addr);
@@ -156,10 +169,8 @@ int ftrace_make_nop(struct module *mod,
return ret;
}
-int __init ftrace_dyn_arch_init(void *data)
+int __init ftrace_dyn_arch_init(void)
{
- *(unsigned long *)data = 0;
-
return 0;
}
#endif /* CONFIG_DYNAMIC_FTRACE */
diff --git a/arch/arm/kernel/head-common.S b/arch/arm/kernel/head-common.S
index 47cd974e57e..572a38335c9 100644
--- a/arch/arm/kernel/head-common.S
+++ b/arch/arm/kernel/head-common.S
@@ -99,8 +99,7 @@ __mmap_switched:
str r1, [r5] @ Save machine type
str r2, [r6] @ Save atags pointer
cmp r7, #0
- bicne r4, r0, #CR_A @ Clear 'A' bit
- stmneia r7, {r0, r4} @ Save control register values
+ strne r0, [r7] @ Save control register values
b start_kernel
ENDPROC(__mmap_switched)
@@ -177,6 +176,18 @@ __lookup_processor_type_data:
.long __proc_info_end
.size __lookup_processor_type_data, . - __lookup_processor_type_data
+__error_lpae:
+#ifdef CONFIG_DEBUG_LL
+ adr r0, str_lpae
+ bl printascii
+ b __error
+str_lpae: .asciz "\nError: Kernel with LPAE support, but CPU does not support LPAE.\n"
+#else
+ b __error
+#endif
+ .align
+ENDPROC(__error_lpae)
+
__error_p:
#ifdef CONFIG_DEBUG_LL
adr r0, str_p1
diff --git a/arch/arm/kernel/head-nommu.S b/arch/arm/kernel/head-nommu.S
index 14235ba64a9..716249cc2ee 100644
--- a/arch/arm/kernel/head-nommu.S
+++ b/arch/arm/kernel/head-nommu.S
@@ -68,7 +68,7 @@ ENTRY(stext)
#ifdef CONFIG_ARM_MPU
/* Calculate the size of a region covering just the kernel */
- ldr r5, =PHYS_OFFSET @ Region start: PHYS_OFFSET
+ ldr r5, =PLAT_PHYS_OFFSET @ Region start: PHYS_OFFSET
ldr r6, =(_end) @ Cover whole kernel
sub r6, r6, r5 @ Minimum size of region to map
clz r6, r6 @ Region size must be 2^N...
@@ -213,7 +213,7 @@ ENTRY(__setup_mpu)
set_region_nr r0, #MPU_RAM_REGION
isb
/* Full access from PL0, PL1, shared for CONFIG_SMP, cacheable */
- ldr r0, =PHYS_OFFSET @ RAM starts at PHYS_OFFSET
+ ldr r0, =PLAT_PHYS_OFFSET @ RAM starts at PHYS_OFFSET
ldr r5,=(MPU_AP_PL1RW_PL0RW | MPU_RGN_NORMAL)
setup_region r0, r5, r6, MPU_DATA_SIDE @ PHYS_OFFSET, shared, enabled
diff --git a/arch/arm/kernel/head.S b/arch/arm/kernel/head.S
index 2c7cc1e0347..2c35f0ff2fd 100644
--- a/arch/arm/kernel/head.S
+++ b/arch/arm/kernel/head.S
@@ -52,7 +52,8 @@
.equ swapper_pg_dir, KERNEL_RAM_VADDR - PG_DIR_SIZE
.macro pgtbl, rd, phys
- add \rd, \phys, #TEXT_OFFSET - PG_DIR_SIZE
+ add \rd, \phys, #TEXT_OFFSET
+ sub \rd, \rd, #PG_DIR_SIZE
.endm
/*
@@ -77,6 +78,7 @@
__HEAD
ENTRY(stext)
+ ARM_BE8(setend be ) @ ensure we are in BE8 mode
THUMB( adr r9, BSYM(1f) ) @ Kernel is always entered in ARM.
THUMB( bx r9 ) @ If this is a Thumb-2 kernel,
@@ -100,7 +102,7 @@ ENTRY(stext)
and r3, r3, #0xf @ extract VMSA support
cmp r3, #5 @ long-descriptor translation table format?
THUMB( it lo ) @ force fixup-able long branch encoding
- blo __error_p @ only classic page table format
+ blo __error_lpae @ only classic page table format
#endif
#ifndef CONFIG_XIP_KERNEL
@@ -109,7 +111,7 @@ ENTRY(stext)
sub r4, r3, r4 @ (PHYS_OFFSET - PAGE_OFFSET)
add r8, r8, r4 @ PHYS_OFFSET
#else
- ldr r8, =PHYS_OFFSET @ always constant in this case
+ ldr r8, =PLAT_PHYS_OFFSET @ always constant in this case
#endif
/*
@@ -352,6 +354,9 @@ ENTRY(secondary_startup)
* the processor type - there is no need to check the machine type
* as it has already been validated by the primary processor.
*/
+
+ ARM_BE8(setend be) @ ensure we are in BE8 mode
+
#ifdef CONFIG_ARM_VIRT_EXT
bl __hyp_stub_install_secondary
#endif
@@ -470,7 +475,7 @@ ENDPROC(__turn_mmu_on)
#ifdef CONFIG_SMP_ON_UP
- __INIT
+ __HEAD
__fixup_smp:
and r3, r9, #0x000f0000 @ architecture version
teq r3, #0x000f0000 @ CPU ID supported?
@@ -487,7 +492,27 @@ __fixup_smp:
mrc p15, 0, r0, c0, c0, 5 @ read MPIDR
and r0, r0, #0xc0000000 @ multiprocessing extensions and
teq r0, #0x80000000 @ not part of a uniprocessor system?
- moveq pc, lr @ yes, assume SMP
+ bne __fixup_smp_on_up @ no, assume UP
+
+ @ Core indicates it is SMP. Check for Aegis SOC where a single
+ @ Cortex-A9 CPU is present but SMP operations fault.
+ mov r4, #0x41000000
+ orr r4, r4, #0x0000c000
+ orr r4, r4, #0x00000090
+ teq r3, r4 @ Check for ARM Cortex-A9
+ movne pc, lr @ Not ARM Cortex-A9,
+
+ @ If a future SoC *does* use 0x0 as the PERIPH_BASE, then the
+ @ below address check will need to be #ifdef'd or equivalent
+ @ for the Aegis platform.
+ mrc p15, 4, r0, c15, c0 @ get SCU base address
+ teq r0, #0x0 @ '0' on actual UP A9 hardware
+ beq __fixup_smp_on_up @ So its an A9 UP
+ ldr r0, [r0, #4] @ read SCU Config
+ARM_BE8(rev r0, r0) @ byteswap if big endian
+ and r0, r0, #0x3 @ number of CPUs
+ teq r0, #0x0 @ is 1?
+ movne pc, lr
__fixup_smp_on_up:
adr r0, 1f
@@ -536,6 +561,14 @@ ENTRY(fixup_smp)
ldmfd sp!, {r4 - r6, pc}
ENDPROC(fixup_smp)
+#ifdef __ARMEB__
+#define LOW_OFFSET 0x4
+#define HIGH_OFFSET 0x0
+#else
+#define LOW_OFFSET 0x0
+#define HIGH_OFFSET 0x4
+#endif
+
#ifdef CONFIG_ARM_PATCH_PHYS_VIRT
/* __fixup_pv_table - patch the stub instructions with the delta between
@@ -546,17 +579,21 @@ ENDPROC(fixup_smp)
__HEAD
__fixup_pv_table:
adr r0, 1f
- ldmia r0, {r3-r5, r7}
- sub r3, r0, r3 @ PHYS_OFFSET - PAGE_OFFSET
+ ldmia r0, {r3-r7}
+ mvn ip, #0
+ subs r3, r0, r3 @ PHYS_OFFSET - PAGE_OFFSET
add r4, r4, r3 @ adjust table start address
add r5, r5, r3 @ adjust table end address
- add r7, r7, r3 @ adjust __pv_phys_offset address
- str r8, [r7] @ save computed PHYS_OFFSET to __pv_phys_offset
+ add r6, r6, r3 @ adjust __pv_phys_pfn_offset address
+ add r7, r7, r3 @ adjust __pv_offset address
+ mov r0, r8, lsr #12 @ convert to PFN
+ str r0, [r6] @ save computed PHYS_OFFSET to __pv_phys_pfn_offset
+ strcc ip, [r7, #HIGH_OFFSET] @ save to __pv_offset high bits
mov r6, r3, lsr #24 @ constant for add/sub instructions
teq r3, r6, lsl #24 @ must be 16MiB aligned
THUMB( it ne @ cross section branch )
bne __error
- str r6, [r7, #4] @ save to __pv_offset
+ str r3, [r7, #LOW_OFFSET] @ save to __pv_offset low bits
b __fixup_a_pv_table
ENDPROC(__fixup_pv_table)
@@ -564,11 +601,20 @@ ENDPROC(__fixup_pv_table)
1: .long .
.long __pv_table_begin
.long __pv_table_end
-2: .long __pv_phys_offset
+2: .long __pv_phys_pfn_offset
+ .long __pv_offset
.text
__fixup_a_pv_table:
+ adr r0, 3f
+ ldr r6, [r0]
+ add r6, r6, r3
+ ldr r0, [r6, #HIGH_OFFSET] @ pv_offset high word
+ ldr r6, [r6, #LOW_OFFSET] @ pv_offset low word
+ mov r6, r6, lsr #24
+ cmn r0, #1
#ifdef CONFIG_THUMB2_KERNEL
+ moveq r0, #0x200000 @ set bit 21, mov to mvn instruction
lsls r6, #24
beq 2f
clz r7, r6
@@ -582,18 +628,46 @@ __fixup_a_pv_table:
b 2f
1: add r7, r3
ldrh ip, [r7, #2]
- and ip, 0x8f00
- orr ip, r6 @ mask in offset bits 31-24
+ARM_BE8(rev16 ip, ip)
+ tst ip, #0x4000
+ and ip, #0x8f00
+ orrne ip, r6 @ mask in offset bits 31-24
+ orreq ip, r0 @ mask in offset bits 7-0
+ARM_BE8(rev16 ip, ip)
strh ip, [r7, #2]
+ bne 2f
+ ldrh ip, [r7]
+ARM_BE8(rev16 ip, ip)
+ bic ip, #0x20
+ orr ip, ip, r0, lsr #16
+ARM_BE8(rev16 ip, ip)
+ strh ip, [r7]
2: cmp r4, r5
ldrcc r7, [r4], #4 @ use branch for delay slot
bcc 1b
bx lr
#else
+#ifdef CONFIG_CPU_ENDIAN_BE8
+ moveq r0, #0x00004000 @ set bit 22, mov to mvn instruction
+#else
+ moveq r0, #0x400000 @ set bit 22, mov to mvn instruction
+#endif
b 2f
1: ldr ip, [r7, r3]
+#ifdef CONFIG_CPU_ENDIAN_BE8
+ @ in BE8, we load data in BE, but instructions still in LE
+ bic ip, ip, #0xff000000
+ tst ip, #0x000f0000 @ check the rotation field
+ orrne ip, ip, r6, lsl #24 @ mask in offset bits 31-24
+ biceq ip, ip, #0x00004000 @ clear bit 22
+ orreq ip, ip, r0 @ mask in offset bits 7-0
+#else
bic ip, ip, #0x000000ff
- orr ip, ip, r6 @ mask in offset bits 31-24
+ tst ip, #0xf00 @ check the rotation field
+ orrne ip, ip, r6 @ mask in offset bits 31-24
+ biceq ip, ip, #0x400000 @ clear bit 22
+ orreq ip, ip, r0 @ mask in offset bits 7-0
+#endif
str ip, [r7, r3]
2: cmp r4, r5
ldrcc r7, [r4], #4 @ use branch for delay slot
@@ -602,28 +676,30 @@ __fixup_a_pv_table:
#endif
ENDPROC(__fixup_a_pv_table)
+ .align
+3: .long __pv_offset
+
ENTRY(fixup_pv_table)
stmfd sp!, {r4 - r7, lr}
- ldr r2, 2f @ get address of __pv_phys_offset
mov r3, #0 @ no offset
mov r4, r0 @ r0 = table start
add r5, r0, r1 @ r1 = table size
- ldr r6, [r2, #4] @ get __pv_offset
bl __fixup_a_pv_table
ldmfd sp!, {r4 - r7, pc}
ENDPROC(fixup_pv_table)
- .align
-2: .long __pv_phys_offset
-
.data
- .globl __pv_phys_offset
- .type __pv_phys_offset, %object
-__pv_phys_offset:
- .long 0
- .size __pv_phys_offset, . - __pv_phys_offset
+ .globl __pv_phys_pfn_offset
+ .type __pv_phys_pfn_offset, %object
+__pv_phys_pfn_offset:
+ .word 0
+ .size __pv_phys_pfn_offset, . -__pv_phys_pfn_offset
+
+ .globl __pv_offset
+ .type __pv_offset, %object
__pv_offset:
- .long 0
+ .quad 0
+ .size __pv_offset, . -__pv_offset
#endif
#include "head-common.S"
diff --git a/arch/arm/kernel/hibernate.c b/arch/arm/kernel/hibernate.c
new file mode 100644
index 00000000000..bb8b7964864
--- /dev/null
+++ b/arch/arm/kernel/hibernate.c
@@ -0,0 +1,107 @@
+/*
+ * Hibernation support specific for ARM
+ *
+ * Derived from work on ARM hibernation support by:
+ *
+ * Ubuntu project, hibernation support for mach-dove
+ * Copyright (C) 2010 Nokia Corporation (Hiroshi Doyu)
+ * Copyright (C) 2010 Texas Instruments, Inc. (Teerth Reddy et al.)
+ * https://lkml.org/lkml/2010/6/18/4
+ * https://lists.linux-foundation.org/pipermail/linux-pm/2010-June/027422.html
+ * https://patchwork.kernel.org/patch/96442/
+ *
+ * Copyright (C) 2006 Rafael J. Wysocki <rjw@sisk.pl>
+ *
+ * License terms: GNU General Public License (GPL) version 2
+ */
+
+#include <linux/mm.h>
+#include <linux/suspend.h>
+#include <asm/system_misc.h>
+#include <asm/idmap.h>
+#include <asm/suspend.h>
+#include <asm/memory.h>
+
+extern const void __nosave_begin, __nosave_end;
+
+int pfn_is_nosave(unsigned long pfn)
+{
+ unsigned long nosave_begin_pfn = virt_to_pfn(&__nosave_begin);
+ unsigned long nosave_end_pfn = virt_to_pfn(&__nosave_end - 1);
+
+ return (pfn >= nosave_begin_pfn) && (pfn <= nosave_end_pfn);
+}
+
+void notrace save_processor_state(void)
+{
+ WARN_ON(num_online_cpus() != 1);
+ local_fiq_disable();
+}
+
+void notrace restore_processor_state(void)
+{
+ local_fiq_enable();
+}
+
+/*
+ * Snapshot kernel memory and reset the system.
+ *
+ * swsusp_save() is executed in the suspend finisher so that the CPU
+ * context pointer and memory are part of the saved image, which is
+ * required by the resume kernel image to restart execution from
+ * swsusp_arch_suspend().
+ *
+ * soft_restart is not technically needed, but is used to get success
+ * returned from cpu_suspend.
+ *
+ * When soft reboot completes, the hibernation snapshot is written out.
+ */
+static int notrace arch_save_image(unsigned long unused)
+{
+ int ret;
+
+ ret = swsusp_save();
+ if (ret == 0)
+ soft_restart(virt_to_phys(cpu_resume));
+ return ret;
+}
+
+/*
+ * Save the current CPU state before suspend / poweroff.
+ */
+int notrace swsusp_arch_suspend(void)
+{
+ return cpu_suspend(0, arch_save_image);
+}
+
+/*
+ * Restore page contents for physical pages that were in use during loading
+ * hibernation image. Switch to idmap_pgd so the physical page tables
+ * are overwritten with the same contents.
+ */
+static void notrace arch_restore_image(void *unused)
+{
+ struct pbe *pbe;
+
+ cpu_switch_mm(idmap_pgd, &init_mm);
+ for (pbe = restore_pblist; pbe; pbe = pbe->next)
+ copy_page(pbe->orig_address, pbe->address);
+
+ soft_restart(virt_to_phys(cpu_resume));
+}
+
+static u64 resume_stack[PAGE_SIZE/2/sizeof(u64)] __nosavedata;
+
+/*
+ * Resume from the hibernation image.
+ * Due to the kernel heap / data restore, stack contents change underneath
+ * and that would make function calls impossible; switch to a temporary
+ * stack within the nosave region to avoid that problem.
+ */
+int swsusp_arch_resume(void)
+{
+ extern void call_with_stack(void (*fn)(void *), void *arg, void *sp);
+ call_with_stack(arch_restore_image, 0,
+ resume_stack + ARRAY_SIZE(resume_stack));
+ return 0;
+}
diff --git a/arch/arm/kernel/hw_breakpoint.c b/arch/arm/kernel/hw_breakpoint.c
index 7b95de60135..4d963fb66e3 100644
--- a/arch/arm/kernel/hw_breakpoint.c
+++ b/arch/arm/kernel/hw_breakpoint.c
@@ -167,7 +167,7 @@ static int debug_arch_supported(void)
/* Can we determine the watchpoint access type from the fsr? */
static int debug_exception_updates_fsr(void)
{
- return 0;
+ return get_debug_arch() >= ARM_DEBUG_ARCH_V8;
}
/* Determine number of WRP registers available. */
@@ -257,6 +257,7 @@ static int enable_monitor_mode(void)
break;
case ARM_DEBUG_ARCH_V7_ECP14:
case ARM_DEBUG_ARCH_V7_1:
+ case ARM_DEBUG_ARCH_V8:
ARM_DBG_WRITE(c0, c2, 2, (dscr | ARM_DSCR_MDBGEN));
isb();
break;
@@ -344,13 +345,13 @@ int arch_install_hw_breakpoint(struct perf_event *bp)
/* Breakpoint */
ctrl_base = ARM_BASE_BCR;
val_base = ARM_BASE_BVR;
- slots = (struct perf_event **)__get_cpu_var(bp_on_reg);
+ slots = this_cpu_ptr(bp_on_reg);
max_slots = core_num_brps;
} else {
/* Watchpoint */
ctrl_base = ARM_BASE_WCR;
val_base = ARM_BASE_WVR;
- slots = (struct perf_event **)__get_cpu_var(wp_on_reg);
+ slots = this_cpu_ptr(wp_on_reg);
max_slots = core_num_wrps;
}
@@ -396,12 +397,12 @@ void arch_uninstall_hw_breakpoint(struct perf_event *bp)
if (info->ctrl.type == ARM_BREAKPOINT_EXECUTE) {
/* Breakpoint */
base = ARM_BASE_BCR;
- slots = (struct perf_event **)__get_cpu_var(bp_on_reg);
+ slots = this_cpu_ptr(bp_on_reg);
max_slots = core_num_brps;
} else {
/* Watchpoint */
base = ARM_BASE_WCR;
- slots = (struct perf_event **)__get_cpu_var(wp_on_reg);
+ slots = this_cpu_ptr(wp_on_reg);
max_slots = core_num_wrps;
}
@@ -697,7 +698,7 @@ static void watchpoint_handler(unsigned long addr, unsigned int fsr,
struct arch_hw_breakpoint *info;
struct arch_hw_breakpoint_ctrl ctrl;
- slots = (struct perf_event **)__get_cpu_var(wp_on_reg);
+ slots = this_cpu_ptr(wp_on_reg);
for (i = 0; i < core_num_wrps; ++i) {
rcu_read_lock();
@@ -768,7 +769,7 @@ static void watchpoint_single_step_handler(unsigned long pc)
struct perf_event *wp, **slots;
struct arch_hw_breakpoint *info;
- slots = (struct perf_event **)__get_cpu_var(wp_on_reg);
+ slots = this_cpu_ptr(wp_on_reg);
for (i = 0; i < core_num_wrps; ++i) {
rcu_read_lock();
@@ -802,7 +803,7 @@ static void breakpoint_handler(unsigned long unknown, struct pt_regs *regs)
struct arch_hw_breakpoint *info;
struct arch_hw_breakpoint_ctrl ctrl;
- slots = (struct perf_event **)__get_cpu_var(bp_on_reg);
+ slots = this_cpu_ptr(bp_on_reg);
/* The exception entry code places the amended lr in the PC. */
addr = regs->ARM_pc;
@@ -1072,6 +1073,8 @@ static int __init arch_hw_breakpoint_init(void)
core_num_brps = get_num_brps();
core_num_wrps = get_num_wrps();
+ cpu_notifier_register_begin();
+
/*
* We need to tread carefully here because DBGSWENABLE may be
* driven low on this core and there isn't an architected way to
@@ -1088,6 +1091,7 @@ static int __init arch_hw_breakpoint_init(void)
if (!cpumask_empty(&debug_err_mask)) {
core_num_brps = 0;
core_num_wrps = 0;
+ cpu_notifier_register_done();
return 0;
}
@@ -1107,7 +1111,10 @@ static int __init arch_hw_breakpoint_init(void)
TRAP_HWBKPT, "breakpoint debug exception");
/* Register hotplug and PM notifiers. */
- register_cpu_notifier(&dbg_reset_nb);
+ __register_cpu_notifier(&dbg_reset_nb);
+
+ cpu_notifier_register_done();
+
pm_init();
return 0;
}
diff --git a/arch/arm/kernel/io.c b/arch/arm/kernel/io.c
index dcd5b4d8614..9203cf88333 100644
--- a/arch/arm/kernel/io.c
+++ b/arch/arm/kernel/io.c
@@ -1,6 +1,41 @@
#include <linux/export.h>
#include <linux/types.h>
#include <linux/io.h>
+#include <linux/spinlock.h>
+
+static DEFINE_RAW_SPINLOCK(__io_lock);
+
+/*
+ * Generic atomic MMIO modify.
+ *
+ * Allows thread-safe access to registers shared by unrelated subsystems.
+ * The access is protected by a single MMIO-wide lock.
+ */
+void atomic_io_modify_relaxed(void __iomem *reg, u32 mask, u32 set)
+{
+ unsigned long flags;
+ u32 value;
+
+ raw_spin_lock_irqsave(&__io_lock, flags);
+ value = readl_relaxed(reg) & ~mask;
+ value |= (set & mask);
+ writel_relaxed(value, reg);
+ raw_spin_unlock_irqrestore(&__io_lock, flags);
+}
+EXPORT_SYMBOL(atomic_io_modify_relaxed);
+
+void atomic_io_modify(void __iomem *reg, u32 mask, u32 set)
+{
+ unsigned long flags;
+ u32 value;
+
+ raw_spin_lock_irqsave(&__io_lock, flags);
+ value = readl_relaxed(reg) & ~mask;
+ value |= (set & mask);
+ writel(value, reg);
+ raw_spin_unlock_irqrestore(&__io_lock, flags);
+}
+EXPORT_SYMBOL(atomic_io_modify);
/*
* Copy data from IO memory space to "real" memory space.
diff --git a/arch/arm/kernel/irq.c b/arch/arm/kernel/irq.c
index 9723d17b8f3..2c425760451 100644
--- a/arch/arm/kernel/irq.c
+++ b/arch/arm/kernel/irq.c
@@ -37,6 +37,7 @@
#include <linux/proc_fs.h>
#include <linux/export.h>
+#include <asm/hardware/cache-l2x0.h>
#include <asm/exception.h>
#include <asm/mach/arch.h>
#include <asm/mach/irq.h>
@@ -115,10 +116,21 @@ EXPORT_SYMBOL_GPL(set_irq_flags);
void __init init_IRQ(void)
{
+ int ret;
+
if (IS_ENABLED(CONFIG_OF) && !machine_desc->init_irq)
irqchip_init();
else
machine_desc->init_irq();
+
+ if (IS_ENABLED(CONFIG_OF) && IS_ENABLED(CONFIG_CACHE_L2X0) &&
+ (machine_desc->l2c_aux_mask || machine_desc->l2c_aux_val)) {
+ outer_cache.write_sec = machine_desc->l2c_write_sec;
+ ret = l2x0_of_init(machine_desc->l2c_aux_val,
+ machine_desc->l2c_aux_mask);
+ if (ret)
+ pr_err("L2C: failed to init: %d\n", ret);
+ }
}
#ifdef CONFIG_MULTI_IRQ_HANDLER
diff --git a/arch/arm/kernel/isa.c b/arch/arm/kernel/isa.c
index 34648591073..9d1cf715689 100644
--- a/arch/arm/kernel/isa.c
+++ b/arch/arm/kernel/isa.c
@@ -20,7 +20,7 @@
static unsigned int isa_membase, isa_portbase, isa_portshift;
-static ctl_table ctl_isa_vars[4] = {
+static struct ctl_table ctl_isa_vars[4] = {
{
.procname = "membase",
.data = &isa_membase,
@@ -44,7 +44,7 @@ static ctl_table ctl_isa_vars[4] = {
static struct ctl_table_header *isa_sysctl_header;
-static ctl_table ctl_isa[2] = {
+static struct ctl_table ctl_isa[2] = {
{
.procname = "isa",
.mode = 0555,
@@ -52,7 +52,7 @@ static ctl_table ctl_isa[2] = {
}, {}
};
-static ctl_table ctl_bus[2] = {
+static struct ctl_table ctl_bus[2] = {
{
.procname = "bus",
.mode = 0555,
diff --git a/arch/arm/kernel/iwmmxt.S b/arch/arm/kernel/iwmmxt.S
index a08783823b3..2b32978ae90 100644
--- a/arch/arm/kernel/iwmmxt.S
+++ b/arch/arm/kernel/iwmmxt.S
@@ -18,13 +18,18 @@
#include <asm/ptrace.h>
#include <asm/thread_info.h>
#include <asm/asm-offsets.h>
+#include <asm/assembler.h>
-#if defined(CONFIG_CPU_PJ4)
+#if defined(CONFIG_CPU_PJ4) || defined(CONFIG_CPU_PJ4B)
#define PJ4(code...) code
#define XSC(code...)
-#else
+#elif defined(CONFIG_CPU_MOHAWK) || \
+ defined(CONFIG_CPU_XSC3) || \
+ defined(CONFIG_CPU_XSCALE)
#define PJ4(code...)
#define XSC(code...) code
+#else
+#error "Unsupported iWMMXt architecture"
#endif
#define MMX_WR0 (0x00)
@@ -61,17 +66,18 @@
* r9 = ret_from_exception
* lr = undefined instr exit
*
- * called from prefetch exception handler with interrupts disabled
+ * called from prefetch exception handler with interrupts enabled
*/
ENTRY(iwmmxt_task_enable)
+ inc_preempt_count r10, r3
XSC(mrc p15, 0, r2, c15, c1, 0)
PJ4(mrc p15, 0, r2, c1, c0, 2)
@ CP0 and CP1 accessible?
XSC(tst r2, #0x3)
PJ4(tst r2, #0xf)
- movne pc, lr @ if so no business here
+ bne 4f @ if so no business here
@ enable access to CP0 and CP1
XSC(orr r2, r2, #0x3)
XSC(mcr p15, 0, r2, c15, c1, 0)
@@ -88,13 +94,19 @@ ENTRY(iwmmxt_task_enable)
mrc p15, 0, r2, c2, c0, 0
mov r2, r2 @ cpwait
+ bl concan_save
- teq r1, #0 @ test for last ownership
- mov lr, r9 @ normal exit from exception
- beq concan_load @ no owner, skip save
+#ifdef CONFIG_PREEMPT_COUNT
+ get_thread_info r10
+#endif
+4: dec_preempt_count r10, r3
+ mov pc, r9 @ normal exit from exception
concan_save:
+ teq r1, #0 @ test for last ownership
+ beq concan_load @ no owner, skip save
+
tmrc r2, wCon
@ CUP? wCx
@@ -132,7 +144,7 @@ concan_dump:
wstrd wR15, [r1, #MMX_WR15]
2: teq r0, #0 @ anything to load?
- moveq pc, lr
+ moveq pc, lr @ if not, return
concan_load:
@@ -166,6 +178,7 @@ concan_load:
teq r1, #0
mov r2, #0
moveq pc, lr
+
tmcr wCon, r2
mov pc, lr
diff --git a/arch/arm/kernel/kgdb.c b/arch/arm/kernel/kgdb.c
index 778c2f7024f..a74b53c1b7d 100644
--- a/arch/arm/kernel/kgdb.c
+++ b/arch/arm/kernel/kgdb.c
@@ -160,12 +160,16 @@ static int kgdb_compiled_brk_fn(struct pt_regs *regs, unsigned int instr)
static struct undef_hook kgdb_brkpt_hook = {
.instr_mask = 0xffffffff,
.instr_val = KGDB_BREAKINST,
+ .cpsr_mask = MODE_MASK,
+ .cpsr_val = SVC_MODE,
.fn = kgdb_brk_fn
};
static struct undef_hook kgdb_compiled_brkpt_hook = {
.instr_mask = 0xffffffff,
.instr_val = KGDB_COMPILED_BREAK,
+ .cpsr_mask = MODE_MASK,
+ .cpsr_val = SVC_MODE,
.fn = kgdb_compiled_brk_fn
};
diff --git a/arch/arm/kernel/kprobes-arm.c b/arch/arm/kernel/kprobes-arm.c
index 8a30c89da70..ac300c60d65 100644
--- a/arch/arm/kernel/kprobes-arm.c
+++ b/arch/arm/kernel/kprobes-arm.c
@@ -60,13 +60,10 @@
#include <linux/kernel.h>
#include <linux/kprobes.h>
-#include <linux/module.h>
+#include <linux/ptrace.h>
#include "kprobes.h"
-
-#define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
-
-#define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
+#include "probes-arm.h"
#if __LINUX_ARM_ARCH__ >= 6
#define BLX(reg) "blx "reg" \n\t"
@@ -75,92 +72,11 @@
"mov pc, "reg" \n\t"
#endif
-/*
- * To avoid the complications of mimicing single-stepping on a
- * processor without a Next-PC or a single-step mode, and to
- * avoid having to deal with the side-effects of boosting, we
- * simulate or emulate (almost) all ARM instructions.
- *
- * "Simulation" is where the instruction's behavior is duplicated in
- * C code. "Emulation" is where the original instruction is rewritten
- * and executed, often by altering its registers.
- *
- * By having all behavior of the kprobe'd instruction completed before
- * returning from the kprobe_handler(), all locks (scheduler and
- * interrupt) can safely be released. There is no need for secondary
- * breakpoints, no race with MP or preemptable kernels, nor having to
- * clean up resources counts at a later time impacting overall system
- * performance. By rewriting the instruction, only the minimum registers
- * need to be loaded and saved back optimizing performance.
- *
- * Calling the insnslot_*_rwflags version of a function doesn't hurt
- * anything even when the CPSR flags aren't updated by the
- * instruction. It's just a little slower in return for saving
- * a little space by not having a duplicate function that doesn't
- * update the flags. (The same optimization can be said for
- * instructions that do or don't perform register writeback)
- * Also, instructions can either read the flags, only write the
- * flags, or read and write the flags. To save combinations
- * rather than for sheer performance, flag functions just assume
- * read and write of flags.
- */
-
-static void __kprobes simulate_bbl(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- long iaddr = (long)p->addr;
- int disp = branch_displacement(insn);
-
- if (insn & (1 << 24))
- regs->ARM_lr = iaddr + 4;
-
- regs->ARM_pc = iaddr + 8 + disp;
-}
-
-static void __kprobes simulate_blx1(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- long iaddr = (long)p->addr;
- int disp = branch_displacement(insn);
-
- regs->ARM_lr = iaddr + 4;
- regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
- regs->ARM_cpsr |= PSR_T_BIT;
-}
-
-static void __kprobes simulate_blx2bx(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- int rm = insn & 0xf;
- long rmv = regs->uregs[rm];
-
- if (insn & (1 << 5))
- regs->ARM_lr = (long)p->addr + 4;
-
- regs->ARM_pc = rmv & ~0x1;
- regs->ARM_cpsr &= ~PSR_T_BIT;
- if (rmv & 0x1)
- regs->ARM_cpsr |= PSR_T_BIT;
-}
-
-static void __kprobes simulate_mrs(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- unsigned long mask = 0xf8ff03df; /* Mask out execution state */
- regs->uregs[rd] = regs->ARM_cpsr & mask;
-}
-
-static void __kprobes simulate_mov_ipsp(struct kprobe *p, struct pt_regs *regs)
-{
- regs->uregs[12] = regs->uregs[13];
-}
-
static void __kprobes
-emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
+emulate_ldrdstrd(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = (unsigned long)p->addr + 8;
+ unsigned long pc = regs->ARM_pc + 4;
int rt = (insn >> 12) & 0xf;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -175,7 +91,7 @@ emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
BLX("%[fn]")
: "=r" (rtv), "=r" (rt2v), "=r" (rnv)
: "0" (rtv), "1" (rt2v), "2" (rnv), "r" (rmv),
- [fn] "r" (p->ainsn.insn_fn)
+ [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -186,10 +102,10 @@ emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_ldr(struct kprobe *p, struct pt_regs *regs)
+emulate_ldr(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = (unsigned long)p->addr + 8;
+ unsigned long pc = regs->ARM_pc + 4;
int rt = (insn >> 12) & 0xf;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -202,7 +118,7 @@ emulate_ldr(struct kprobe *p, struct pt_regs *regs)
__asm__ __volatile__ (
BLX("%[fn]")
: "=r" (rtv), "=r" (rnv)
- : "1" (rnv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "1" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -216,11 +132,11 @@ emulate_ldr(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_str(struct kprobe *p, struct pt_regs *regs)
+emulate_str(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long rtpc = (unsigned long)p->addr + str_pc_offset;
- unsigned long rnpc = (unsigned long)p->addr + 8;
+ unsigned long rtpc = regs->ARM_pc - 4 + str_pc_offset;
+ unsigned long rnpc = regs->ARM_pc + 4;
int rt = (insn >> 12) & 0xf;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -234,7 +150,7 @@ emulate_str(struct kprobe *p, struct pt_regs *regs)
__asm__ __volatile__ (
BLX("%[fn]")
: "=r" (rnv)
- : "r" (rtv), "0" (rnv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "r" (rtv), "0" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -243,10 +159,10 @@ emulate_str(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_rd12rn16rm0rs8_rwflags(struct kprobe *p, struct pt_regs *regs)
+emulate_rd12rn16rm0rs8_rwflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = (unsigned long)p->addr + 8;
+ unsigned long pc = regs->ARM_pc + 4;
int rd = (insn >> 12) & 0xf;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -266,7 +182,7 @@ emulate_rd12rn16rm0rs8_rwflags(struct kprobe *p, struct pt_regs *regs)
"mrs %[cpsr], cpsr \n\t"
: "=r" (rdv), [cpsr] "=r" (cpsr)
: "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv),
- "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ "1" (cpsr), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -278,9 +194,9 @@ emulate_rd12rn16rm0rs8_rwflags(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_rd12rn16rm0_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
+emulate_rd12rn16rm0_rwflags_nopc(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rd = (insn >> 12) & 0xf;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -296,7 +212,7 @@ emulate_rd12rn16rm0_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
"mrs %[cpsr], cpsr \n\t"
: "=r" (rdv), [cpsr] "=r" (cpsr)
: "0" (rdv), "r" (rnv), "r" (rmv),
- "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ "1" (cpsr), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -305,9 +221,10 @@ emulate_rd12rn16rm0_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_rd16rn12rm0rs8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
+emulate_rd16rn12rm0rs8_rwflags_nopc(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rd = (insn >> 16) & 0xf;
int rn = (insn >> 12) & 0xf;
int rm = insn & 0xf;
@@ -325,7 +242,7 @@ emulate_rd16rn12rm0rs8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
"mrs %[cpsr], cpsr \n\t"
: "=r" (rdv), [cpsr] "=r" (cpsr)
: "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv),
- "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ "1" (cpsr), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -334,9 +251,9 @@ emulate_rd16rn12rm0rs8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_rd12rm0_noflags_nopc(struct kprobe *p, struct pt_regs *regs)
+emulate_rd12rm0_noflags_nopc(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rd = (insn >> 12) & 0xf;
int rm = insn & 0xf;
@@ -346,7 +263,7 @@ emulate_rd12rm0_noflags_nopc(struct kprobe *p, struct pt_regs *regs)
__asm__ __volatile__ (
BLX("%[fn]")
: "=r" (rdv)
- : "0" (rdv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "0" (rdv), "r" (rmv), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -354,9 +271,10 @@ emulate_rd12rm0_noflags_nopc(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
+emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rdlo = (insn >> 12) & 0xf;
int rdhi = (insn >> 16) & 0xf;
int rn = insn & 0xf;
@@ -374,7 +292,7 @@ emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
"mrs %[cpsr], cpsr \n\t"
: "=r" (rdlov), "=r" (rdhiv), [cpsr] "=r" (cpsr)
: "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv),
- "2" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ "2" (cpsr), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -383,623 +301,43 @@ emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
}
-/*
- * For the instruction masking and comparisons in all the "space_*"
- * functions below, Do _not_ rearrange the order of tests unless
- * you're very, very sure of what you are doing. For the sake of
- * efficiency, the masks for some tests sometimes assume other test
- * have been done prior to them so the number of patterns to test
- * for an instruction set can be as broad as possible to reduce the
- * number of tests needed.
- */
-
-static const union decode_item arm_1111_table[] = {
- /* Unconditional instructions */
-
- /* memory hint 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx */
- /* PLDI (immediate) 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx */
- /* PLDW (immediate) 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx */
- /* PLD (immediate) 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx */
- DECODE_SIMULATE (0xfe300000, 0xf4100000, kprobe_simulate_nop),
-
- /* memory hint 1111 0110 x001 xxxx xxxx xxxx xxx0 xxxx */
- /* PLDI (register) 1111 0110 x101 xxxx xxxx xxxx xxx0 xxxx */
- /* PLDW (register) 1111 0111 x001 xxxx xxxx xxxx xxx0 xxxx */
- /* PLD (register) 1111 0111 x101 xxxx xxxx xxxx xxx0 xxxx */
- DECODE_SIMULATE (0xfe300010, 0xf6100000, kprobe_simulate_nop),
-
- /* BLX (immediate) 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx */
- DECODE_SIMULATE (0xfe000000, 0xfa000000, simulate_blx1),
-
- /* CPS 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */
- /* SETEND 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
- /* SRS 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
- /* RFE 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
-
- /* Coprocessor instructions... */
- /* MCRR2 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx */
- /* MRRC2 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx */
- /* LDC2 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
- /* STC2 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
- /* CDP2 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
- /* MCR2 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
- /* MRC2 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
-
- /* Other unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item arm_cccc_0001_0xx0____0xxx_table[] = {
- /* Miscellaneous instructions */
-
- /* MRS cpsr cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */
- DECODE_SIMULATEX(0x0ff000f0, 0x01000000, simulate_mrs,
- REGS(0, NOPC, 0, 0, 0)),
-
- /* BX cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
- DECODE_SIMULATE (0x0ff000f0, 0x01200010, simulate_blx2bx),
-
- /* BLX (register) cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
- DECODE_SIMULATEX(0x0ff000f0, 0x01200030, simulate_blx2bx,
- REGS(0, 0, 0, 0, NOPC)),
-
- /* CLZ cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
- DECODE_EMULATEX (0x0ff000f0, 0x01600010, emulate_rd12rm0_noflags_nopc,
- REGS(0, NOPC, 0, 0, NOPC)),
-
- /* QADD cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx */
- /* QSUB cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx */
- /* QDADD cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx */
- /* QDSUB cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx */
- DECODE_EMULATEX (0x0f9000f0, 0x01000050, emulate_rd12rn16rm0_rwflags_nopc,
- REGS(NOPC, NOPC, 0, 0, NOPC)),
-
- /* BXJ cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
- /* MSR cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
- /* MRS spsr cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */
- /* BKPT 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
- /* SMC cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */
- /* And unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item arm_cccc_0001_0xx0____1xx0_table[] = {
- /* Halfword multiply and multiply-accumulate */
-
- /* SMLALxy cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
- DECODE_EMULATEX (0x0ff00090, 0x01400080, emulate_rdlo12rdhi16rn0rm8_rwflags_nopc,
- REGS(NOPC, NOPC, NOPC, 0, NOPC)),
-
- /* SMULWy cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
- DECODE_OR (0x0ff000b0, 0x012000a0),
- /* SMULxy cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
- DECODE_EMULATEX (0x0ff00090, 0x01600080, emulate_rd16rn12rm0rs8_rwflags_nopc,
- REGS(NOPC, 0, NOPC, 0, NOPC)),
-
- /* SMLAxy cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx */
- DECODE_OR (0x0ff00090, 0x01000080),
- /* SMLAWy cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx */
- DECODE_EMULATEX (0x0ff000b0, 0x01200080, emulate_rd16rn12rm0rs8_rwflags_nopc,
- REGS(NOPC, NOPC, NOPC, 0, NOPC)),
-
- DECODE_END
+const union decode_action kprobes_arm_actions[NUM_PROBES_ARM_ACTIONS] = {
+ [PROBES_EMULATE_NONE] = {.handler = probes_emulate_none},
+ [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop},
+ [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop},
+ [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop},
+ [PROBES_BRANCH_IMM] = {.handler = simulate_blx1},
+ [PROBES_MRS] = {.handler = simulate_mrs},
+ [PROBES_BRANCH_REG] = {.handler = simulate_blx2bx},
+ [PROBES_CLZ] = {.handler = emulate_rd12rm0_noflags_nopc},
+ [PROBES_SATURATING_ARITHMETIC] = {
+ .handler = emulate_rd12rn16rm0_rwflags_nopc},
+ [PROBES_MUL1] = {.handler = emulate_rdlo12rdhi16rn0rm8_rwflags_nopc},
+ [PROBES_MUL2] = {.handler = emulate_rd16rn12rm0rs8_rwflags_nopc},
+ [PROBES_SWP] = {.handler = emulate_rd12rn16rm0_rwflags_nopc},
+ [PROBES_LDRSTRD] = {.handler = emulate_ldrdstrd},
+ [PROBES_LOAD_EXTRA] = {.handler = emulate_ldr},
+ [PROBES_LOAD] = {.handler = emulate_ldr},
+ [PROBES_STORE_EXTRA] = {.handler = emulate_str},
+ [PROBES_STORE] = {.handler = emulate_str},
+ [PROBES_MOV_IP_SP] = {.handler = simulate_mov_ipsp},
+ [PROBES_DATA_PROCESSING_REG] = {
+ .handler = emulate_rd12rn16rm0rs8_rwflags},
+ [PROBES_DATA_PROCESSING_IMM] = {
+ .handler = emulate_rd12rn16rm0rs8_rwflags},
+ [PROBES_MOV_HALFWORD] = {.handler = emulate_rd12rm0_noflags_nopc},
+ [PROBES_SEV] = {.handler = probes_emulate_none},
+ [PROBES_WFE] = {.handler = probes_simulate_nop},
+ [PROBES_SATURATE] = {.handler = emulate_rd12rn16rm0_rwflags_nopc},
+ [PROBES_REV] = {.handler = emulate_rd12rm0_noflags_nopc},
+ [PROBES_MMI] = {.handler = emulate_rd12rn16rm0_rwflags_nopc},
+ [PROBES_PACK] = {.handler = emulate_rd12rn16rm0_rwflags_nopc},
+ [PROBES_EXTEND] = {.handler = emulate_rd12rm0_noflags_nopc},
+ [PROBES_EXTEND_ADD] = {.handler = emulate_rd12rn16rm0_rwflags_nopc},
+ [PROBES_MUL_ADD_LONG] = {
+ .handler = emulate_rdlo12rdhi16rn0rm8_rwflags_nopc},
+ [PROBES_MUL_ADD] = {.handler = emulate_rd16rn12rm0rs8_rwflags_nopc},
+ [PROBES_BITFIELD] = {.handler = emulate_rd12rm0_noflags_nopc},
+ [PROBES_BRANCH] = {.handler = simulate_bbl},
+ [PROBES_LDMSTM] = {.decoder = kprobe_decode_ldmstm}
};
-
-static const union decode_item arm_cccc_0000_____1001_table[] = {
- /* Multiply and multiply-accumulate */
-
- /* MUL cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx */
- /* MULS cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx */
- DECODE_EMULATEX (0x0fe000f0, 0x00000090, emulate_rd16rn12rm0rs8_rwflags_nopc,
- REGS(NOPC, 0, NOPC, 0, NOPC)),
-
- /* MLA cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx */
- /* MLAS cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx */
- DECODE_OR (0x0fe000f0, 0x00200090),
- /* MLS cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx */
- DECODE_EMULATEX (0x0ff000f0, 0x00600090, emulate_rd16rn12rm0rs8_rwflags_nopc,
- REGS(NOPC, NOPC, NOPC, 0, NOPC)),
-
- /* UMAAL cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx */
- DECODE_OR (0x0ff000f0, 0x00400090),
- /* UMULL cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx */
- /* UMULLS cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx */
- /* UMLAL cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx */
- /* UMLALS cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx */
- /* SMULL cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx */
- /* SMULLS cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx */
- /* SMLAL cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx */
- /* SMLALS cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx */
- DECODE_EMULATEX (0x0f8000f0, 0x00800090, emulate_rdlo12rdhi16rn0rm8_rwflags_nopc,
- REGS(NOPC, NOPC, NOPC, 0, NOPC)),
-
- DECODE_END
-};
-
-static const union decode_item arm_cccc_0001_____1001_table[] = {
- /* Synchronization primitives */
-
-#if __LINUX_ARM_ARCH__ < 6
- /* Deprecated on ARMv6 and may be UNDEFINED on v7 */
- /* SMP/SWPB cccc 0001 0x00 xxxx xxxx xxxx 1001 xxxx */
- DECODE_EMULATEX (0x0fb000f0, 0x01000090, emulate_rd12rn16rm0_rwflags_nopc,
- REGS(NOPC, NOPC, 0, 0, NOPC)),
-#endif
- /* LDREX/STREX{,D,B,H} cccc 0001 1xxx xxxx xxxx xxxx 1001 xxxx */
- /* And unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item arm_cccc_000x_____1xx1_table[] = {
- /* Extra load/store instructions */
-
- /* STRHT cccc 0000 xx10 xxxx xxxx xxxx 1011 xxxx */
- /* ??? cccc 0000 xx10 xxxx xxxx xxxx 11x1 xxxx */
- /* LDRHT cccc 0000 xx11 xxxx xxxx xxxx 1011 xxxx */
- /* LDRSBT cccc 0000 xx11 xxxx xxxx xxxx 1101 xxxx */
- /* LDRSHT cccc 0000 xx11 xxxx xxxx xxxx 1111 xxxx */
- DECODE_REJECT (0x0f200090, 0x00200090),
-
- /* LDRD/STRD lr,pc,{... cccc 000x x0x0 xxxx 111x xxxx 1101 xxxx */
- DECODE_REJECT (0x0e10e0d0, 0x0000e0d0),
-
- /* LDRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1101 xxxx */
- /* STRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1111 xxxx */
- DECODE_EMULATEX (0x0e5000d0, 0x000000d0, emulate_ldrdstrd,
- REGS(NOPCWB, NOPCX, 0, 0, NOPC)),
-
- /* LDRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1101 xxxx */
- /* STRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1111 xxxx */
- DECODE_EMULATEX (0x0e5000d0, 0x004000d0, emulate_ldrdstrd,
- REGS(NOPCWB, NOPCX, 0, 0, 0)),
-
- /* STRH (register) cccc 000x x0x0 xxxx xxxx xxxx 1011 xxxx */
- DECODE_EMULATEX (0x0e5000f0, 0x000000b0, emulate_str,
- REGS(NOPCWB, NOPC, 0, 0, NOPC)),
-
- /* LDRH (register) cccc 000x x0x1 xxxx xxxx xxxx 1011 xxxx */
- /* LDRSB (register) cccc 000x x0x1 xxxx xxxx xxxx 1101 xxxx */
- /* LDRSH (register) cccc 000x x0x1 xxxx xxxx xxxx 1111 xxxx */
- DECODE_EMULATEX (0x0e500090, 0x00100090, emulate_ldr,
- REGS(NOPCWB, NOPC, 0, 0, NOPC)),
-
- /* STRH (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1011 xxxx */
- DECODE_EMULATEX (0x0e5000f0, 0x004000b0, emulate_str,
- REGS(NOPCWB, NOPC, 0, 0, 0)),
-
- /* LDRH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1011 xxxx */
- /* LDRSB (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1101 xxxx */
- /* LDRSH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1111 xxxx */
- DECODE_EMULATEX (0x0e500090, 0x00500090, emulate_ldr,
- REGS(NOPCWB, NOPC, 0, 0, 0)),
-
- DECODE_END
-};
-
-static const union decode_item arm_cccc_000x_table[] = {
- /* Data-processing (register) */
-
- /* <op>S PC, ... cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx */
- DECODE_REJECT (0x0e10f000, 0x0010f000),
-
- /* MOV IP, SP 1110 0001 1010 0000 1100 0000 0000 1101 */
- DECODE_SIMULATE (0xffffffff, 0xe1a0c00d, simulate_mov_ipsp),
-
- /* TST (register) cccc 0001 0001 xxxx xxxx xxxx xxx0 xxxx */
- /* TEQ (register) cccc 0001 0011 xxxx xxxx xxxx xxx0 xxxx */
- /* CMP (register) cccc 0001 0101 xxxx xxxx xxxx xxx0 xxxx */
- /* CMN (register) cccc 0001 0111 xxxx xxxx xxxx xxx0 xxxx */
- DECODE_EMULATEX (0x0f900010, 0x01100000, emulate_rd12rn16rm0rs8_rwflags,
- REGS(ANY, 0, 0, 0, ANY)),
-
- /* MOV (register) cccc 0001 101x xxxx xxxx xxxx xxx0 xxxx */
- /* MVN (register) cccc 0001 111x xxxx xxxx xxxx xxx0 xxxx */
- DECODE_EMULATEX (0x0fa00010, 0x01a00000, emulate_rd12rn16rm0rs8_rwflags,
- REGS(0, ANY, 0, 0, ANY)),
-
- /* AND (register) cccc 0000 000x xxxx xxxx xxxx xxx0 xxxx */
- /* EOR (register) cccc 0000 001x xxxx xxxx xxxx xxx0 xxxx */
- /* SUB (register) cccc 0000 010x xxxx xxxx xxxx xxx0 xxxx */
- /* RSB (register) cccc 0000 011x xxxx xxxx xxxx xxx0 xxxx */
- /* ADD (register) cccc 0000 100x xxxx xxxx xxxx xxx0 xxxx */
- /* ADC (register) cccc 0000 101x xxxx xxxx xxxx xxx0 xxxx */
- /* SBC (register) cccc 0000 110x xxxx xxxx xxxx xxx0 xxxx */
- /* RSC (register) cccc 0000 111x xxxx xxxx xxxx xxx0 xxxx */
- /* ORR (register) cccc 0001 100x xxxx xxxx xxxx xxx0 xxxx */
- /* BIC (register) cccc 0001 110x xxxx xxxx xxxx xxx0 xxxx */
- DECODE_EMULATEX (0x0e000010, 0x00000000, emulate_rd12rn16rm0rs8_rwflags,
- REGS(ANY, ANY, 0, 0, ANY)),
-
- /* TST (reg-shift reg) cccc 0001 0001 xxxx xxxx xxxx 0xx1 xxxx */
- /* TEQ (reg-shift reg) cccc 0001 0011 xxxx xxxx xxxx 0xx1 xxxx */
- /* CMP (reg-shift reg) cccc 0001 0101 xxxx xxxx xxxx 0xx1 xxxx */
- /* CMN (reg-shift reg) cccc 0001 0111 xxxx xxxx xxxx 0xx1 xxxx */
- DECODE_EMULATEX (0x0f900090, 0x01100010, emulate_rd12rn16rm0rs8_rwflags,
- REGS(ANY, 0, NOPC, 0, ANY)),
-
- /* MOV (reg-shift reg) cccc 0001 101x xxxx xxxx xxxx 0xx1 xxxx */
- /* MVN (reg-shift reg) cccc 0001 111x xxxx xxxx xxxx 0xx1 xxxx */
- DECODE_EMULATEX (0x0fa00090, 0x01a00010, emulate_rd12rn16rm0rs8_rwflags,
- REGS(0, ANY, NOPC, 0, ANY)),
-
- /* AND (reg-shift reg) cccc 0000 000x xxxx xxxx xxxx 0xx1 xxxx */
- /* EOR (reg-shift reg) cccc 0000 001x xxxx xxxx xxxx 0xx1 xxxx */
- /* SUB (reg-shift reg) cccc 0000 010x xxxx xxxx xxxx 0xx1 xxxx */
- /* RSB (reg-shift reg) cccc 0000 011x xxxx xxxx xxxx 0xx1 xxxx */
- /* ADD (reg-shift reg) cccc 0000 100x xxxx xxxx xxxx 0xx1 xxxx */
- /* ADC (reg-shift reg) cccc 0000 101x xxxx xxxx xxxx 0xx1 xxxx */
- /* SBC (reg-shift reg) cccc 0000 110x xxxx xxxx xxxx 0xx1 xxxx */
- /* RSC (reg-shift reg) cccc 0000 111x xxxx xxxx xxxx 0xx1 xxxx */
- /* ORR (reg-shift reg) cccc 0001 100x xxxx xxxx xxxx 0xx1 xxxx */
- /* BIC (reg-shift reg) cccc 0001 110x xxxx xxxx xxxx 0xx1 xxxx */
- DECODE_EMULATEX (0x0e000090, 0x00000010, emulate_rd12rn16rm0rs8_rwflags,
- REGS(ANY, ANY, NOPC, 0, ANY)),
-
- DECODE_END
-};
-
-static const union decode_item arm_cccc_001x_table[] = {
- /* Data-processing (immediate) */
-
- /* MOVW cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */
- /* MOVT cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0fb00000, 0x03000000, emulate_rd12rm0_noflags_nopc,
- REGS(0, NOPC, 0, 0, 0)),
-
- /* YIELD cccc 0011 0010 0000 xxxx xxxx 0000 0001 */
- DECODE_OR (0x0fff00ff, 0x03200001),
- /* SEV cccc 0011 0010 0000 xxxx xxxx 0000 0100 */
- DECODE_EMULATE (0x0fff00ff, 0x03200004, kprobe_emulate_none),
- /* NOP cccc 0011 0010 0000 xxxx xxxx 0000 0000 */
- /* WFE cccc 0011 0010 0000 xxxx xxxx 0000 0010 */
- /* WFI cccc 0011 0010 0000 xxxx xxxx 0000 0011 */
- DECODE_SIMULATE (0x0fff00fc, 0x03200000, kprobe_simulate_nop),
- /* DBG cccc 0011 0010 0000 xxxx xxxx ffff xxxx */
- /* unallocated hints cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */
- /* MSR (immediate) cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0x0fb00000, 0x03200000),
-
- /* <op>S PC, ... cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx */
- DECODE_REJECT (0x0e10f000, 0x0210f000),
-
- /* TST (immediate) cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx */
- /* TEQ (immediate) cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx */
- /* CMP (immediate) cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx */
- /* CMN (immediate) cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0f900000, 0x03100000, emulate_rd12rn16rm0rs8_rwflags,
- REGS(ANY, 0, 0, 0, 0)),
-
- /* MOV (immediate) cccc 0011 101x xxxx xxxx xxxx xxxx xxxx */
- /* MVN (immediate) cccc 0011 111x xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0fa00000, 0x03a00000, emulate_rd12rn16rm0rs8_rwflags,
- REGS(0, ANY, 0, 0, 0)),
-
- /* AND (immediate) cccc 0010 000x xxxx xxxx xxxx xxxx xxxx */
- /* EOR (immediate) cccc 0010 001x xxxx xxxx xxxx xxxx xxxx */
- /* SUB (immediate) cccc 0010 010x xxxx xxxx xxxx xxxx xxxx */
- /* RSB (immediate) cccc 0010 011x xxxx xxxx xxxx xxxx xxxx */
- /* ADD (immediate) cccc 0010 100x xxxx xxxx xxxx xxxx xxxx */
- /* ADC (immediate) cccc 0010 101x xxxx xxxx xxxx xxxx xxxx */
- /* SBC (immediate) cccc 0010 110x xxxx xxxx xxxx xxxx xxxx */
- /* RSC (immediate) cccc 0010 111x xxxx xxxx xxxx xxxx xxxx */
- /* ORR (immediate) cccc 0011 100x xxxx xxxx xxxx xxxx xxxx */
- /* BIC (immediate) cccc 0011 110x xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0e000000, 0x02000000, emulate_rd12rn16rm0rs8_rwflags,
- REGS(ANY, ANY, 0, 0, 0)),
-
- DECODE_END
-};
-
-static const union decode_item arm_cccc_0110_____xxx1_table[] = {
- /* Media instructions */
-
- /* SEL cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx */
- DECODE_EMULATEX (0x0ff000f0, 0x068000b0, emulate_rd12rn16rm0_rwflags_nopc,
- REGS(NOPC, NOPC, 0, 0, NOPC)),
-
- /* SSAT cccc 0110 101x xxxx xxxx xxxx xx01 xxxx */
- /* USAT cccc 0110 111x xxxx xxxx xxxx xx01 xxxx */
- DECODE_OR(0x0fa00030, 0x06a00010),
- /* SSAT16 cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx */
- /* USAT16 cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx */
- DECODE_EMULATEX (0x0fb000f0, 0x06a00030, emulate_rd12rn16rm0_rwflags_nopc,
- REGS(0, NOPC, 0, 0, NOPC)),
-
- /* REV cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
- /* REV16 cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
- /* RBIT cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */
- /* REVSH cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
- DECODE_EMULATEX (0x0fb00070, 0x06b00030, emulate_rd12rm0_noflags_nopc,
- REGS(0, NOPC, 0, 0, NOPC)),
-
- /* ??? cccc 0110 0x00 xxxx xxxx xxxx xxx1 xxxx */
- DECODE_REJECT (0x0fb00010, 0x06000010),
- /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1011 xxxx */
- DECODE_REJECT (0x0f8000f0, 0x060000b0),
- /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1101 xxxx */
- DECODE_REJECT (0x0f8000f0, 0x060000d0),
- /* SADD16 cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx */
- /* SADDSUBX cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx */
- /* SSUBADDX cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx */
- /* SSUB16 cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx */
- /* SADD8 cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx */
- /* SSUB8 cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx */
- /* QADD16 cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx */
- /* QADDSUBX cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx */
- /* QSUBADDX cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx */
- /* QSUB16 cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx */
- /* QADD8 cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx */
- /* QSUB8 cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx */
- /* SHADD16 cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx */
- /* SHADDSUBX cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx */
- /* SHSUBADDX cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx */
- /* SHSUB16 cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx */
- /* SHADD8 cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx */
- /* SHSUB8 cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx */
- /* UADD16 cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx */
- /* UADDSUBX cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx */
- /* USUBADDX cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx */
- /* USUB16 cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx */
- /* UADD8 cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx */
- /* USUB8 cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx */
- /* UQADD16 cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx */
- /* UQADDSUBX cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx */
- /* UQSUBADDX cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx */
- /* UQSUB16 cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx */
- /* UQADD8 cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx */
- /* UQSUB8 cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx */
- /* UHADD16 cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx */
- /* UHADDSUBX cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx */
- /* UHSUBADDX cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx */
- /* UHSUB16 cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx */
- /* UHADD8 cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx */
- /* UHSUB8 cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx */
- DECODE_EMULATEX (0x0f800010, 0x06000010, emulate_rd12rn16rm0_rwflags_nopc,
- REGS(NOPC, NOPC, 0, 0, NOPC)),
-
- /* PKHBT cccc 0110 1000 xxxx xxxx xxxx x001 xxxx */
- /* PKHTB cccc 0110 1000 xxxx xxxx xxxx x101 xxxx */
- DECODE_EMULATEX (0x0ff00030, 0x06800010, emulate_rd12rn16rm0_rwflags_nopc,
- REGS(NOPC, NOPC, 0, 0, NOPC)),
-
- /* ??? cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx */
- /* ??? cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx */
- DECODE_REJECT (0x0fb000f0, 0x06900070),
-
- /* SXTB16 cccc 0110 1000 1111 xxxx xxxx 0111 xxxx */
- /* SXTB cccc 0110 1010 1111 xxxx xxxx 0111 xxxx */
- /* SXTH cccc 0110 1011 1111 xxxx xxxx 0111 xxxx */
- /* UXTB16 cccc 0110 1100 1111 xxxx xxxx 0111 xxxx */
- /* UXTB cccc 0110 1110 1111 xxxx xxxx 0111 xxxx */
- /* UXTH cccc 0110 1111 1111 xxxx xxxx 0111 xxxx */
- DECODE_EMULATEX (0x0f8f00f0, 0x068f0070, emulate_rd12rm0_noflags_nopc,
- REGS(0, NOPC, 0, 0, NOPC)),
-
- /* SXTAB16 cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx */
- /* SXTAB cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx */
- /* SXTAH cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx */
- /* UXTAB16 cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx */
- /* UXTAB cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx */
- /* UXTAH cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx */
- DECODE_EMULATEX (0x0f8000f0, 0x06800070, emulate_rd12rn16rm0_rwflags_nopc,
- REGS(NOPCX, NOPC, 0, 0, NOPC)),
-
- DECODE_END
-};
-
-static const union decode_item arm_cccc_0111_____xxx1_table[] = {
- /* Media instructions */
-
- /* UNDEFINED cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
- DECODE_REJECT (0x0ff000f0, 0x07f000f0),
-
- /* SMLALD cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
- /* SMLSLD cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
- DECODE_EMULATEX (0x0ff00090, 0x07400010, emulate_rdlo12rdhi16rn0rm8_rwflags_nopc,
- REGS(NOPC, NOPC, NOPC, 0, NOPC)),
-
- /* SMUAD cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx */
- /* SMUSD cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx */
- DECODE_OR (0x0ff0f090, 0x0700f010),
- /* SMMUL cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx */
- DECODE_OR (0x0ff0f0d0, 0x0750f010),
- /* USAD8 cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx */
- DECODE_EMULATEX (0x0ff0f0f0, 0x0780f010, emulate_rd16rn12rm0rs8_rwflags_nopc,
- REGS(NOPC, 0, NOPC, 0, NOPC)),
-
- /* SMLAD cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx */
- /* SMLSD cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx */
- DECODE_OR (0x0ff00090, 0x07000010),
- /* SMMLA cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx */
- DECODE_OR (0x0ff000d0, 0x07500010),
- /* USADA8 cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx */
- DECODE_EMULATEX (0x0ff000f0, 0x07800010, emulate_rd16rn12rm0rs8_rwflags_nopc,
- REGS(NOPC, NOPCX, NOPC, 0, NOPC)),
-
- /* SMMLS cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx */
- DECODE_EMULATEX (0x0ff000d0, 0x075000d0, emulate_rd16rn12rm0rs8_rwflags_nopc,
- REGS(NOPC, NOPC, NOPC, 0, NOPC)),
-
- /* SBFX cccc 0111 101x xxxx xxxx xxxx x101 xxxx */
- /* UBFX cccc 0111 111x xxxx xxxx xxxx x101 xxxx */
- DECODE_EMULATEX (0x0fa00070, 0x07a00050, emulate_rd12rm0_noflags_nopc,
- REGS(0, NOPC, 0, 0, NOPC)),
-
- /* BFC cccc 0111 110x xxxx xxxx xxxx x001 1111 */
- DECODE_EMULATEX (0x0fe0007f, 0x07c0001f, emulate_rd12rm0_noflags_nopc,
- REGS(0, NOPC, 0, 0, 0)),
-
- /* BFI cccc 0111 110x xxxx xxxx xxxx x001 xxxx */
- DECODE_EMULATEX (0x0fe00070, 0x07c00010, emulate_rd12rm0_noflags_nopc,
- REGS(0, NOPC, 0, 0, NOPCX)),
-
- DECODE_END
-};
-
-static const union decode_item arm_cccc_01xx_table[] = {
- /* Load/store word and unsigned byte */
-
- /* LDRB/STRB pc,[...] cccc 01xx x0xx xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0x0c40f000, 0x0440f000),
-
- /* STRT cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
- /* LDRT cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
- /* STRBT cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
- /* LDRBT cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0x0d200000, 0x04200000),
-
- /* STR (immediate) cccc 010x x0x0 xxxx xxxx xxxx xxxx xxxx */
- /* STRB (immediate) cccc 010x x1x0 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0e100000, 0x04000000, emulate_str,
- REGS(NOPCWB, ANY, 0, 0, 0)),
-
- /* LDR (immediate) cccc 010x x0x1 xxxx xxxx xxxx xxxx xxxx */
- /* LDRB (immediate) cccc 010x x1x1 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0e100000, 0x04100000, emulate_ldr,
- REGS(NOPCWB, ANY, 0, 0, 0)),
-
- /* STR (register) cccc 011x x0x0 xxxx xxxx xxxx xxxx xxxx */
- /* STRB (register) cccc 011x x1x0 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0e100000, 0x06000000, emulate_str,
- REGS(NOPCWB, ANY, 0, 0, NOPC)),
-
- /* LDR (register) cccc 011x x0x1 xxxx xxxx xxxx xxxx xxxx */
- /* LDRB (register) cccc 011x x1x1 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0x0e100000, 0x06100000, emulate_ldr,
- REGS(NOPCWB, ANY, 0, 0, NOPC)),
-
- DECODE_END
-};
-
-static const union decode_item arm_cccc_100x_table[] = {
- /* Block data transfer instructions */
-
- /* LDM cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
- /* STM cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
- DECODE_CUSTOM (0x0e400000, 0x08000000, kprobe_decode_ldmstm),
-
- /* STM (user registers) cccc 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
- /* LDM (user registers) cccc 100x x1x1 xxxx 0xxx xxxx xxxx xxxx */
- /* LDM (exception ret) cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
- DECODE_END
-};
-
-const union decode_item kprobe_decode_arm_table[] = {
- /*
- * Unconditional instructions
- * 1111 xxxx xxxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xf0000000, 0xf0000000, arm_1111_table),
-
- /*
- * Miscellaneous instructions
- * cccc 0001 0xx0 xxxx xxxx xxxx 0xxx xxxx
- */
- DECODE_TABLE (0x0f900080, 0x01000000, arm_cccc_0001_0xx0____0xxx_table),
-
- /*
- * Halfword multiply and multiply-accumulate
- * cccc 0001 0xx0 xxxx xxxx xxxx 1xx0 xxxx
- */
- DECODE_TABLE (0x0f900090, 0x01000080, arm_cccc_0001_0xx0____1xx0_table),
-
- /*
- * Multiply and multiply-accumulate
- * cccc 0000 xxxx xxxx xxxx xxxx 1001 xxxx
- */
- DECODE_TABLE (0x0f0000f0, 0x00000090, arm_cccc_0000_____1001_table),
-
- /*
- * Synchronization primitives
- * cccc 0001 xxxx xxxx xxxx xxxx 1001 xxxx
- */
- DECODE_TABLE (0x0f0000f0, 0x01000090, arm_cccc_0001_____1001_table),
-
- /*
- * Extra load/store instructions
- * cccc 000x xxxx xxxx xxxx xxxx 1xx1 xxxx
- */
- DECODE_TABLE (0x0e000090, 0x00000090, arm_cccc_000x_____1xx1_table),
-
- /*
- * Data-processing (register)
- * cccc 000x xxxx xxxx xxxx xxxx xxx0 xxxx
- * Data-processing (register-shifted register)
- * cccc 000x xxxx xxxx xxxx xxxx 0xx1 xxxx
- */
- DECODE_TABLE (0x0e000000, 0x00000000, arm_cccc_000x_table),
-
- /*
- * Data-processing (immediate)
- * cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0x0e000000, 0x02000000, arm_cccc_001x_table),
-
- /*
- * Media instructions
- * cccc 011x xxxx xxxx xxxx xxxx xxx1 xxxx
- */
- DECODE_TABLE (0x0f000010, 0x06000010, arm_cccc_0110_____xxx1_table),
- DECODE_TABLE (0x0f000010, 0x07000010, arm_cccc_0111_____xxx1_table),
-
- /*
- * Load/store word and unsigned byte
- * cccc 01xx xxxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0x0c000000, 0x04000000, arm_cccc_01xx_table),
-
- /*
- * Block data transfer instructions
- * cccc 100x xxxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0x0e000000, 0x08000000, arm_cccc_100x_table),
-
- /* B cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
- /* BL cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
- DECODE_SIMULATE (0x0e000000, 0x0a000000, simulate_bbl),
-
- /*
- * Supervisor Call, and coprocessor instructions
- */
-
- /* MCRR cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx */
- /* MRRC cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx */
- /* LDC cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
- /* STC cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
- /* CDP cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
- /* MCR cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
- /* MRC cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
- /* SVC cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0x0c000000, 0x0c000000),
-
- DECODE_END
-};
-#ifdef CONFIG_ARM_KPROBES_TEST_MODULE
-EXPORT_SYMBOL_GPL(kprobe_decode_arm_table);
-#endif
-
-static void __kprobes arm_singlestep(struct kprobe *p, struct pt_regs *regs)
-{
- regs->ARM_pc += 4;
- p->ainsn.insn_handler(p, regs);
-}
-
-/* Return:
- * INSN_REJECTED If instruction is one not allowed to kprobe,
- * INSN_GOOD If instruction is supported and uses instruction slot,
- * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
- *
- * For instructions we don't want to kprobe (INSN_REJECTED return result):
- * These are generally ones that modify the processor state making
- * them "hard" to simulate such as switches processor modes or
- * make accesses in alternate modes. Any of these could be simulated
- * if the work was put into it, but low return considering they
- * should also be very rare.
- */
-enum kprobe_insn __kprobes
-arm_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- asi->insn_singlestep = arm_singlestep;
- asi->insn_check_cc = kprobe_condition_checks[insn>>28];
- return kprobe_decode_insn(insn, asi, kprobe_decode_arm_table, false);
-}
diff --git a/arch/arm/kernel/kprobes-common.c b/arch/arm/kernel/kprobes-common.c
index 18a76282970..0bf5d64eba1 100644
--- a/arch/arm/kernel/kprobes-common.c
+++ b/arch/arm/kernel/kprobes-common.c
@@ -13,178 +13,15 @@
#include <linux/kernel.h>
#include <linux/kprobes.h>
-#include <asm/system_info.h>
+#include <asm/opcodes.h>
#include "kprobes.h"
-#ifndef find_str_pc_offset
-
-/*
- * For STR and STM instructions, an ARM core may choose to use either
- * a +8 or a +12 displacement from the current instruction's address.
- * Whichever value is chosen for a given core, it must be the same for
- * both instructions and may not change. This function measures it.
- */
-
-int str_pc_offset;
-
-void __init find_str_pc_offset(void)
-{
- int addr, scratch, ret;
-
- __asm__ (
- "sub %[ret], pc, #4 \n\t"
- "str pc, %[addr] \n\t"
- "ldr %[scr], %[addr] \n\t"
- "sub %[ret], %[scr], %[ret] \n\t"
- : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
-
- str_pc_offset = ret;
-}
-
-#endif /* !find_str_pc_offset */
-
-
-#ifndef test_load_write_pc_interworking
-
-bool load_write_pc_interworks;
-
-void __init test_load_write_pc_interworking(void)
-{
- int arch = cpu_architecture();
- BUG_ON(arch == CPU_ARCH_UNKNOWN);
- load_write_pc_interworks = arch >= CPU_ARCH_ARMv5T;
-}
-
-#endif /* !test_load_write_pc_interworking */
-
-
-#ifndef test_alu_write_pc_interworking
-
-bool alu_write_pc_interworks;
-
-void __init test_alu_write_pc_interworking(void)
-{
- int arch = cpu_architecture();
- BUG_ON(arch == CPU_ARCH_UNKNOWN);
- alu_write_pc_interworks = arch >= CPU_ARCH_ARMv7;
-}
-
-#endif /* !test_alu_write_pc_interworking */
-
-
-void __init arm_kprobe_decode_init(void)
-{
- find_str_pc_offset();
- test_load_write_pc_interworking();
- test_alu_write_pc_interworking();
-}
-
-
-static unsigned long __kprobes __check_eq(unsigned long cpsr)
-{
- return cpsr & PSR_Z_BIT;
-}
-
-static unsigned long __kprobes __check_ne(unsigned long cpsr)
-{
- return (~cpsr) & PSR_Z_BIT;
-}
-
-static unsigned long __kprobes __check_cs(unsigned long cpsr)
-{
- return cpsr & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_cc(unsigned long cpsr)
-{
- return (~cpsr) & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_mi(unsigned long cpsr)
-{
- return cpsr & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_pl(unsigned long cpsr)
-{
- return (~cpsr) & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_vs(unsigned long cpsr)
-{
- return cpsr & PSR_V_BIT;
-}
-
-static unsigned long __kprobes __check_vc(unsigned long cpsr)
-{
- return (~cpsr) & PSR_V_BIT;
-}
-
-static unsigned long __kprobes __check_hi(unsigned long cpsr)
-{
- cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
- return cpsr & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_ls(unsigned long cpsr)
-{
- cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
- return (~cpsr) & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_ge(unsigned long cpsr)
-{
- cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- return (~cpsr) & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_lt(unsigned long cpsr)
-{
- cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- return cpsr & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_gt(unsigned long cpsr)
-{
- unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
- return (~temp) & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_le(unsigned long cpsr)
-{
- unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
- return temp & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_al(unsigned long cpsr)
-{
- return true;
-}
-
-kprobe_check_cc * const kprobe_condition_checks[16] = {
- &__check_eq, &__check_ne, &__check_cs, &__check_cc,
- &__check_mi, &__check_pl, &__check_vs, &__check_vc,
- &__check_hi, &__check_ls, &__check_ge, &__check_lt,
- &__check_gt, &__check_le, &__check_al, &__check_al
-};
-
-
-void __kprobes kprobe_simulate_nop(struct kprobe *p, struct pt_regs *regs)
-{
-}
-
-void __kprobes kprobe_emulate_none(struct kprobe *p, struct pt_regs *regs)
-{
- p->ainsn.insn_fn();
-}
-
-static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
+static void __kprobes simulate_ldm1stm1(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rn = (insn >> 16) & 0xf;
int lbit = insn & (1 << 20);
int wbit = insn & (1 << 21);
@@ -223,24 +60,31 @@ static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
}
}
-static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
+static void __kprobes simulate_stm1_pc(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
{
- regs->ARM_pc = (long)p->addr + str_pc_offset;
- simulate_ldm1stm1(p, regs);
- regs->ARM_pc = (long)p->addr + 4;
+ unsigned long addr = regs->ARM_pc - 4;
+
+ regs->ARM_pc = (long)addr + str_pc_offset;
+ simulate_ldm1stm1(insn, asi, regs);
+ regs->ARM_pc = (long)addr + 4;
}
-static void __kprobes simulate_ldm1_pc(struct kprobe *p, struct pt_regs *regs)
+static void __kprobes simulate_ldm1_pc(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
{
- simulate_ldm1stm1(p, regs);
+ simulate_ldm1stm1(insn, asi, regs);
load_write_pc(regs->ARM_pc, regs);
}
static void __kprobes
-emulate_generic_r0_12_noflags(struct kprobe *p, struct pt_regs *regs)
+emulate_generic_r0_12_noflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
register void *rregs asm("r1") = regs;
- register void *rfn asm("lr") = p->ainsn.insn_fn;
+ register void *rfn asm("lr") = asi->insn_fn;
__asm__ __volatile__ (
"stmdb sp!, {%[regs], r11} \n\t"
@@ -264,22 +108,27 @@ emulate_generic_r0_12_noflags(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-emulate_generic_r2_14_noflags(struct kprobe *p, struct pt_regs *regs)
+emulate_generic_r2_14_noflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+2));
+ emulate_generic_r0_12_noflags(insn, asi,
+ (struct pt_regs *)(regs->uregs+2));
}
static void __kprobes
-emulate_ldm_r3_15(struct kprobe *p, struct pt_regs *regs)
+emulate_ldm_r3_15(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+3));
+ emulate_generic_r0_12_noflags(insn, asi,
+ (struct pt_regs *)(regs->uregs+3));
load_write_pc(regs->ARM_pc, regs);
}
-enum kprobe_insn __kprobes
-kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+enum probes_insn __kprobes
+kprobe_decode_ldmstm(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *h)
{
- kprobe_insn_handler_t *handler = 0;
+ probes_insn_handler_t *handler = 0;
unsigned reglist = insn & 0xffff;
int is_ldm = insn & 0x100000;
int rn = (insn >> 16) & 0xf;
@@ -305,7 +154,8 @@ kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
if (handler) {
/* We can emulate the instruction in (possibly) modified form */
- asi->insn[0] = (insn & 0xfff00000) | (rn << 16) | reglist;
+ asi->insn[0] = __opcode_to_mem_arm((insn & 0xfff00000) |
+ (rn << 16) | reglist);
asi->insn_handler = handler;
return INSN_GOOD;
}
@@ -319,260 +169,3 @@ kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
return INSN_GOOD_NO_SLOT;
}
-
-/*
- * Prepare an instruction slot to receive an instruction for emulating.
- * This is done by placing a subroutine return after the location where the
- * instruction will be placed. We also modify ARM instructions to be
- * unconditional as the condition code will already be checked before any
- * emulation handler is called.
- */
-static kprobe_opcode_t __kprobes
-prepare_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
- bool thumb)
-{
-#ifdef CONFIG_THUMB2_KERNEL
- if (thumb) {
- u16 *thumb_insn = (u16 *)asi->insn;
- thumb_insn[1] = 0x4770; /* Thumb bx lr */
- thumb_insn[2] = 0x4770; /* Thumb bx lr */
- return insn;
- }
- asi->insn[1] = 0xe12fff1e; /* ARM bx lr */
-#else
- asi->insn[1] = 0xe1a0f00e; /* mov pc, lr */
-#endif
- /* Make an ARM instruction unconditional */
- if (insn < 0xe0000000)
- insn = (insn | 0xe0000000) & ~0x10000000;
- return insn;
-}
-
-/*
- * Write a (probably modified) instruction into the slot previously prepared by
- * prepare_emulated_insn
- */
-static void __kprobes
-set_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
- bool thumb)
-{
-#ifdef CONFIG_THUMB2_KERNEL
- if (thumb) {
- u16 *ip = (u16 *)asi->insn;
- if (is_wide_instruction(insn))
- *ip++ = insn >> 16;
- *ip++ = insn;
- return;
- }
-#endif
- asi->insn[0] = insn;
-}
-
-/*
- * When we modify the register numbers encoded in an instruction to be emulated,
- * the new values come from this define. For ARM and 32-bit Thumb instructions
- * this gives...
- *
- * bit position 16 12 8 4 0
- * ---------------+---+---+---+---+---+
- * register r2 r0 r1 -- r3
- */
-#define INSN_NEW_BITS 0x00020103
-
-/* Each nibble has same value as that at INSN_NEW_BITS bit 16 */
-#define INSN_SAMEAS16_BITS 0x22222222
-
-/*
- * Validate and modify each of the registers encoded in an instruction.
- *
- * Each nibble in regs contains a value from enum decode_reg_type. For each
- * non-zero value, the corresponding nibble in pinsn is validated and modified
- * according to the type.
- */
-static bool __kprobes decode_regs(kprobe_opcode_t* pinsn, u32 regs)
-{
- kprobe_opcode_t insn = *pinsn;
- kprobe_opcode_t mask = 0xf; /* Start at least significant nibble */
-
- for (; regs != 0; regs >>= 4, mask <<= 4) {
-
- kprobe_opcode_t new_bits = INSN_NEW_BITS;
-
- switch (regs & 0xf) {
-
- case REG_TYPE_NONE:
- /* Nibble not a register, skip to next */
- continue;
-
- case REG_TYPE_ANY:
- /* Any register is allowed */
- break;
-
- case REG_TYPE_SAMEAS16:
- /* Replace register with same as at bit position 16 */
- new_bits = INSN_SAMEAS16_BITS;
- break;
-
- case REG_TYPE_SP:
- /* Only allow SP (R13) */
- if ((insn ^ 0xdddddddd) & mask)
- goto reject;
- break;
-
- case REG_TYPE_PC:
- /* Only allow PC (R15) */
- if ((insn ^ 0xffffffff) & mask)
- goto reject;
- break;
-
- case REG_TYPE_NOSP:
- /* Reject SP (R13) */
- if (((insn ^ 0xdddddddd) & mask) == 0)
- goto reject;
- break;
-
- case REG_TYPE_NOSPPC:
- case REG_TYPE_NOSPPCX:
- /* Reject SP and PC (R13 and R15) */
- if (((insn ^ 0xdddddddd) & 0xdddddddd & mask) == 0)
- goto reject;
- break;
-
- case REG_TYPE_NOPCWB:
- if (!is_writeback(insn))
- break; /* No writeback, so any register is OK */
- /* fall through... */
- case REG_TYPE_NOPC:
- case REG_TYPE_NOPCX:
- /* Reject PC (R15) */
- if (((insn ^ 0xffffffff) & mask) == 0)
- goto reject;
- break;
- }
-
- /* Replace value of nibble with new register number... */
- insn &= ~mask;
- insn |= new_bits & mask;
- }
-
- *pinsn = insn;
- return true;
-
-reject:
- return false;
-}
-
-static const int decode_struct_sizes[NUM_DECODE_TYPES] = {
- [DECODE_TYPE_TABLE] = sizeof(struct decode_table),
- [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom),
- [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate),
- [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate),
- [DECODE_TYPE_OR] = sizeof(struct decode_or),
- [DECODE_TYPE_REJECT] = sizeof(struct decode_reject)
-};
-
-/*
- * kprobe_decode_insn operates on data tables in order to decode an ARM
- * architecture instruction onto which a kprobe has been placed.
- *
- * These instruction decoding tables are a concatenation of entries each
- * of which consist of one of the following structs:
- *
- * decode_table
- * decode_custom
- * decode_simulate
- * decode_emulate
- * decode_or
- * decode_reject
- *
- * Each of these starts with a struct decode_header which has the following
- * fields:
- *
- * type_regs
- * mask
- * value
- *
- * The least significant DECODE_TYPE_BITS of type_regs contains a value
- * from enum decode_type, this indicates which of the decode_* structs
- * the entry contains. The value DECODE_TYPE_END indicates the end of the
- * table.
- *
- * When the table is parsed, each entry is checked in turn to see if it
- * matches the instruction to be decoded using the test:
- *
- * (insn & mask) == value
- *
- * If no match is found before the end of the table is reached then decoding
- * fails with INSN_REJECTED.
- *
- * When a match is found, decode_regs() is called to validate and modify each
- * of the registers encoded in the instruction; the data it uses to do this
- * is (type_regs >> DECODE_TYPE_BITS). A validation failure will cause decoding
- * to fail with INSN_REJECTED.
- *
- * Once the instruction has passed the above tests, further processing
- * depends on the type of the table entry's decode struct.
- *
- */
-int __kprobes
-kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
- const union decode_item *table, bool thumb)
-{
- const struct decode_header *h = (struct decode_header *)table;
- const struct decode_header *next;
- bool matched = false;
-
- insn = prepare_emulated_insn(insn, asi, thumb);
-
- for (;; h = next) {
- enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK;
- u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS;
-
- if (type == DECODE_TYPE_END)
- return INSN_REJECTED;
-
- next = (struct decode_header *)
- ((uintptr_t)h + decode_struct_sizes[type]);
-
- if (!matched && (insn & h->mask.bits) != h->value.bits)
- continue;
-
- if (!decode_regs(&insn, regs))
- return INSN_REJECTED;
-
- switch (type) {
-
- case DECODE_TYPE_TABLE: {
- struct decode_table *d = (struct decode_table *)h;
- next = (struct decode_header *)d->table.table;
- break;
- }
-
- case DECODE_TYPE_CUSTOM: {
- struct decode_custom *d = (struct decode_custom *)h;
- return (*d->decoder.decoder)(insn, asi);
- }
-
- case DECODE_TYPE_SIMULATE: {
- struct decode_simulate *d = (struct decode_simulate *)h;
- asi->insn_handler = d->handler.handler;
- return INSN_GOOD_NO_SLOT;
- }
-
- case DECODE_TYPE_EMULATE: {
- struct decode_emulate *d = (struct decode_emulate *)h;
- asi->insn_handler = d->handler.handler;
- set_emulated_insn(insn, asi, thumb);
- return INSN_GOOD;
- }
-
- case DECODE_TYPE_OR:
- matched = true;
- break;
-
- case DECODE_TYPE_REJECT:
- default:
- return INSN_REJECTED;
- }
- }
- }
diff --git a/arch/arm/kernel/kprobes-test-arm.c b/arch/arm/kernel/kprobes-test-arm.c
index 83931290506..cb1424240ff 100644
--- a/arch/arm/kernel/kprobes-test-arm.c
+++ b/arch/arm/kernel/kprobes-test-arm.c
@@ -10,6 +10,8 @@
#include <linux/kernel.h>
#include <linux/module.h>
+#include <asm/system_info.h>
+#include <asm/opcodes.h>
#include "kprobes-test.h"
@@ -72,8 +74,6 @@ void kprobe_arm_test_cases(void)
TEST_RRR( op "lt" s " r11, r",11,VAL1,", r",14,N(val),", asr r",7, 6,"")\
TEST_RR( op "gt" s " r12, r13" ", r",14,val, ", ror r",14,7,"")\
TEST_RR( op "le" s " r14, r",0, val, ", r13" ", lsl r",14,8,"")\
- TEST_RR( op s " r12, pc" ", r",14,val, ", ror r",14,7,"")\
- TEST_RR( op s " r14, r",0, val, ", pc" ", lsl r",14,8,"")\
TEST_R( op "eq" s " r0, r",11,VAL1,", #0xf5") \
TEST_R( op "ne" s " r11, r",0, VAL1,", #0xf5000000") \
TEST_R( op s " r7, r",8, VAL2,", #0x000af000") \
@@ -101,8 +101,6 @@ void kprobe_arm_test_cases(void)
TEST_RRR( op "ge r",11,VAL1,", r",14,N(val),", asr r",7, 6,"") \
TEST_RR( op "le r13" ", r",14,val, ", ror r",14,7,"") \
TEST_RR( op "gt r",0, val, ", r13" ", lsl r",14,8,"") \
- TEST_RR( op " pc" ", r",14,val, ", ror r",14,7,"") \
- TEST_RR( op " r",0, val, ", pc" ", lsl r",14,8,"") \
TEST_R( op "eq r",11,VAL1,", #0xf5") \
TEST_R( op "ne r",0, VAL1,", #0xf5000000") \
TEST_R( op " r",8, VAL2,", #0x000af000")
@@ -123,7 +121,6 @@ void kprobe_arm_test_cases(void)
TEST_RR( op "ge" s " r11, r",11,N(val),", asr r",7, 6,"") \
TEST_RR( op "lt" s " r12, r",11,val, ", ror r",14,7,"") \
TEST_R( op "gt" s " r14, r13" ", lsl r",14,8,"") \
- TEST_R( op "le" s " r14, pc" ", lsl r",14,8,"") \
TEST( op "eq" s " r0, #0xf5") \
TEST( op "ne" s " r11, #0xf5000000") \
TEST( op s " r7, #0x000af000") \
@@ -157,12 +154,19 @@ void kprobe_arm_test_cases(void)
TEST_SUPPORTED("cmp pc, #0x1000");
TEST_SUPPORTED("cmp sp, #0x1000");
- /* Data-processing with PC as shift*/
- TEST_UNSUPPORTED(".word 0xe15c0f1e @ cmp r12, r14, asl pc")
- TEST_UNSUPPORTED(".word 0xe1a0cf1e @ mov r12, r14, asl pc")
- TEST_UNSUPPORTED(".word 0xe08caf1e @ add r10, r12, r14, asl pc")
-
- /* Data-processing with PC as shift*/
+ /* Data-processing with PC and a shift count in a register */
+ TEST_UNSUPPORTED(__inst_arm(0xe15c0f1e) " @ cmp r12, r14, asl pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe1a0cf1e) " @ mov r12, r14, asl pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe08caf1e) " @ add r10, r12, r14, asl pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe151021f) " @ cmp r1, pc, lsl r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe17f0211) " @ cmn pc, r1, lsl r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe1a0121f) " @ mov r1, pc, lsl r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe1a0f211) " @ mov pc, r1, lsl r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe042131f) " @ sub r1, r2, pc, lsl r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe1cf1312) " @ bic r1, pc, r2, lsl r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe081f312) " @ add pc, r1, r2, lsl r3")
+
+ /* Data-processing with PC as a target and status registers updated */
TEST_UNSUPPORTED("movs pc, r1")
TEST_UNSUPPORTED("movs pc, r1, lsl r2")
TEST_UNSUPPORTED("movs pc, #0x10000")
@@ -185,14 +189,14 @@ void kprobe_arm_test_cases(void)
TEST_BF_R ("add pc, pc, r",14,2f-1f-8,"")
TEST_BF_R ("add pc, r",14,2f-1f-8,", pc")
TEST_BF_R ("mov pc, r",0,2f,"")
- TEST_BF_RR("mov pc, r",0,2f,", asl r",1,0,"")
+ TEST_BF_R ("add pc, pc, r",14,(2f-1f-8)*2,", asr #1")
TEST_BB( "sub pc, pc, #1b-2b+8")
#if __LINUX_ARM_ARCH__ == 6 && !defined(CONFIG_CPU_V7)
TEST_BB( "sub pc, pc, #1b-2b+8-2") /* UNPREDICTABLE before and after ARMv6 */
#endif
TEST_BB_R( "sub pc, pc, r",14, 1f-2f+8,"")
TEST_BB_R( "rsb pc, r",14,1f-2f+8,", pc")
- TEST_RR( "add pc, pc, r",10,-2,", asl r",11,1,"")
+ TEST_R( "add pc, pc, r",10,-2,", asl #1")
#ifdef CONFIG_THUMB2_KERNEL
TEST_ARM_TO_THUMB_INTERWORK_R("add pc, pc, r",0,3f-1f-8+1,"")
TEST_ARM_TO_THUMB_INTERWORK_R("sub pc, r",0,3f+8+1,", #8")
@@ -202,7 +206,7 @@ void kprobe_arm_test_cases(void)
TEST("mrs r0, cpsr")
TEST("mrspl r7, cpsr")
TEST("mrs r14, cpsr")
- TEST_UNSUPPORTED(".word 0xe10ff000 @ mrs r15, cpsr")
+ TEST_UNSUPPORTED(__inst_arm(0xe10ff000) " @ mrs r15, cpsr")
TEST_UNSUPPORTED("mrs r0, spsr")
TEST_UNSUPPORTED("mrs lr, spsr")
@@ -214,12 +218,13 @@ void kprobe_arm_test_cases(void)
TEST_BB_R("bx r",7,2f,"")
TEST_BF_R("bxeq r",14,2f,"")
+#if __LINUX_ARM_ARCH__ >= 5
TEST_R("clz r0, r",0, 0x0,"")
TEST_R("clzeq r7, r",14,0x1,"")
TEST_R("clz lr, r",7, 0xffffffff,"")
TEST( "clz r4, sp")
- TEST_UNSUPPORTED(".word 0x016fff10 @ clz pc, r0")
- TEST_UNSUPPORTED(".word 0x016f0f1f @ clz r0, pc")
+ TEST_UNSUPPORTED(__inst_arm(0x016fff10) " @ clz pc, r0")
+ TEST_UNSUPPORTED(__inst_arm(0x016f0f1f) " @ clz r0, pc")
#if __LINUX_ARM_ARCH__ >= 6
TEST_UNSUPPORTED("bxj r0")
@@ -228,7 +233,7 @@ void kprobe_arm_test_cases(void)
TEST_BF_R("blx r",0,2f,"")
TEST_BB_R("blx r",7,2f,"")
TEST_BF_R("blxeq r",14,2f,"")
- TEST_UNSUPPORTED(".word 0x0120003f @ blx pc")
+ TEST_UNSUPPORTED(__inst_arm(0x0120003f) " @ blx pc")
TEST_RR( "qadd r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "qaddvs lr, r",9, VAL2,", r",8, VAL1,"")
@@ -242,190 +247,191 @@ void kprobe_arm_test_cases(void)
TEST_RR( "qdsub r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "qdsubvs lr, r",9, VAL2,", r",8, VAL1,"")
TEST_R( "qdsub lr, r",9, VAL2,", r13")
- TEST_UNSUPPORTED(".word 0xe101f050 @ qadd pc, r0, r1")
- TEST_UNSUPPORTED(".word 0xe121f050 @ qsub pc, r0, r1")
- TEST_UNSUPPORTED(".word 0xe141f050 @ qdadd pc, r0, r1")
- TEST_UNSUPPORTED(".word 0xe161f050 @ qdsub pc, r0, r1")
- TEST_UNSUPPORTED(".word 0xe16f2050 @ qdsub r2, r0, pc")
- TEST_UNSUPPORTED(".word 0xe161205f @ qdsub r2, pc, r1")
+ TEST_UNSUPPORTED(__inst_arm(0xe101f050) " @ qadd pc, r0, r1")
+ TEST_UNSUPPORTED(__inst_arm(0xe121f050) " @ qsub pc, r0, r1")
+ TEST_UNSUPPORTED(__inst_arm(0xe141f050) " @ qdadd pc, r0, r1")
+ TEST_UNSUPPORTED(__inst_arm(0xe161f050) " @ qdsub pc, r0, r1")
+ TEST_UNSUPPORTED(__inst_arm(0xe16f2050) " @ qdsub r2, r0, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe161205f) " @ qdsub r2, pc, r1")
TEST_UNSUPPORTED("bkpt 0xffff")
TEST_UNSUPPORTED("bkpt 0x0000")
- TEST_UNSUPPORTED(".word 0xe1600070 @ smc #0")
+ TEST_UNSUPPORTED(__inst_arm(0xe1600070) " @ smc #0")
TEST_GROUP("Halfword multiply and multiply-accumulate")
TEST_RRR( "smlabb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "smlabbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "smlabb lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe10f3281 @ smlabb pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe10f3281) " @ smlabb pc, r1, r2, r3")
TEST_RRR( "smlatb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "smlatbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "smlatb lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe10f32a1 @ smlatb pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe10f32a1) " @ smlatb pc, r1, r2, r3")
TEST_RRR( "smlabt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "smlabtge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "smlabt lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe10f32c1 @ smlabt pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe10f32c1) " @ smlabt pc, r1, r2, r3")
TEST_RRR( "smlatt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "smlattge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "smlatt lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe10f32e1 @ smlatt pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe10f32e1) " @ smlatt pc, r1, r2, r3")
TEST_RRR( "smlawb r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "smlawbge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "smlawb lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe12f3281 @ smlawb pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe12f3281) " @ smlawb pc, r1, r2, r3")
TEST_RRR( "smlawt r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "smlawtge r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "smlawt lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe12f32c1 @ smlawt pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe12032cf @ smlawt r0, pc, r2, r3")
- TEST_UNSUPPORTED(".word 0xe1203fc1 @ smlawt r0, r1, pc, r3")
- TEST_UNSUPPORTED(".word 0xe120f2c1 @ smlawt r0, r1, r2, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe12f32c1) " @ smlawt pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe12032cf) " @ smlawt r0, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe1203fc1) " @ smlawt r0, r1, pc, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe120f2c1) " @ smlawt r0, r1, r2, pc")
TEST_RR( "smulwb r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "smulwbge r7, r",8, VAL3,", r",9, VAL1,"")
TEST_R( "smulwb lr, r",1, VAL2,", r13")
- TEST_UNSUPPORTED(".word 0xe12f02a1 @ smulwb pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe12f02a1) " @ smulwb pc, r1, r2")
TEST_RR( "smulwt r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "smulwtge r7, r",8, VAL3,", r",9, VAL1,"")
TEST_R( "smulwt lr, r",1, VAL2,", r13")
- TEST_UNSUPPORTED(".word 0xe12f02e1 @ smulwt pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe12f02e1) " @ smulwt pc, r1, r2")
TEST_RRRR( "smlalbb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "smlalbble r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "smlalbb r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe14f1382 @ smlalbb pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe141f382 @ smlalbb r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe14f1382) " @ smlalbb pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe141f382) " @ smlalbb r1, pc, r2, r3")
TEST_RRRR( "smlaltb r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "smlaltble r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "smlaltb r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe14f13a2 @ smlaltb pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe141f3a2 @ smlaltb r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe14f13a2) " @ smlaltb pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe141f3a2) " @ smlaltb r1, pc, r2, r3")
TEST_RRRR( "smlalbt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "smlalbtle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "smlalbt r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe14f13c2 @ smlalbt pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe141f3c2 @ smlalbt r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe14f13c2) " @ smlalbt pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe141f3c2) " @ smlalbt r1, pc, r2, r3")
TEST_RRRR( "smlaltt r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "smlalttle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "smlaltt r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe14f13e2 @ smlalbb pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe140f3e2 @ smlalbb r0, pc, r2, r3")
- TEST_UNSUPPORTED(".word 0xe14013ef @ smlalbb r0, r1, pc, r3")
- TEST_UNSUPPORTED(".word 0xe1401fe2 @ smlalbb r0, r1, r2, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe14f13e2) " @ smlalbb pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe140f3e2) " @ smlalbb r0, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe14013ef) " @ smlalbb r0, r1, pc, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe1401fe2) " @ smlalbb r0, r1, r2, pc")
TEST_RR( "smulbb r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "smulbbge r7, r",8, VAL3,", r",9, VAL1,"")
TEST_R( "smulbb lr, r",1, VAL2,", r13")
- TEST_UNSUPPORTED(".word 0xe16f0281 @ smulbb pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe16f0281) " @ smulbb pc, r1, r2")
TEST_RR( "smultb r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "smultbge r7, r",8, VAL3,", r",9, VAL1,"")
TEST_R( "smultb lr, r",1, VAL2,", r13")
- TEST_UNSUPPORTED(".word 0xe16f02a1 @ smultb pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe16f02a1) " @ smultb pc, r1, r2")
TEST_RR( "smulbt r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "smulbtge r7, r",8, VAL3,", r",9, VAL1,"")
TEST_R( "smulbt lr, r",1, VAL2,", r13")
- TEST_UNSUPPORTED(".word 0xe16f02c1 @ smultb pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe16f02c1) " @ smultb pc, r1, r2")
TEST_RR( "smultt r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "smulttge r7, r",8, VAL3,", r",9, VAL1,"")
TEST_R( "smultt lr, r",1, VAL2,", r13")
- TEST_UNSUPPORTED(".word 0xe16f02e1 @ smultt pc, r1, r2")
- TEST_UNSUPPORTED(".word 0xe16002ef @ smultt r0, pc, r2")
- TEST_UNSUPPORTED(".word 0xe1600fe1 @ smultt r0, r1, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe16f02e1) " @ smultt pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe16002ef) " @ smultt r0, pc, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe1600fe1) " @ smultt r0, r1, pc")
+#endif
TEST_GROUP("Multiply and multiply-accumulate")
TEST_RR( "mul r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "mulls r7, r",8, VAL2,", r",9, VAL2,"")
TEST_R( "mul lr, r",4, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe00f0291 @ mul pc, r1, r2")
- TEST_UNSUPPORTED(".word 0xe000029f @ mul r0, pc, r2")
- TEST_UNSUPPORTED(".word 0xe0000f91 @ mul r0, r1, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe00f0291) " @ mul pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe000029f) " @ mul r0, pc, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe0000f91) " @ mul r0, r1, pc")
TEST_RR( "muls r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "mullss r7, r",8, VAL2,", r",9, VAL2,"")
TEST_R( "muls lr, r",4, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe01f0291 @ muls pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_arm(0xe01f0291) " @ muls pc, r1, r2")
TEST_RRR( "mla r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "mlahi r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "mla lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe02f3291 @ mla pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe02f3291) " @ mla pc, r1, r2, r3")
TEST_RRR( "mlas r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "mlahis r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "mlas lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe03f3291 @ mlas pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe03f3291) " @ mlas pc, r1, r2, r3")
#if __LINUX_ARM_ARCH__ >= 6
TEST_RR( "umaal r0, r1, r",2, VAL1,", r",3, VAL2,"")
TEST_RR( "umaalls r7, r8, r",9, VAL2,", r",10, VAL1,"")
TEST_R( "umaal lr, r12, r",11,VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe041f392 @ umaal pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe04f0392 @ umaal r0, pc, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0500090 @ undef")
- TEST_UNSUPPORTED(".word 0xe05fff9f @ undef")
+ TEST_UNSUPPORTED(__inst_arm(0xe041f392) " @ umaal pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe04f0392) " @ umaal r0, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0500090) " @ undef")
+ TEST_UNSUPPORTED(__inst_arm(0xe05fff9f) " @ undef")
#endif
#if __LINUX_ARM_ARCH__ >= 7
TEST_RRR( "mls r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "mlshi r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
TEST_RR( "mls lr, r",1, VAL2,", r",2, VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe06f3291 @ mls pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe060329f @ mls r0, pc, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0603f91 @ mls r0, r1, pc, r3")
- TEST_UNSUPPORTED(".word 0xe060f291 @ mls r0, r1, r2, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe06f3291) " @ mls pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe060329f) " @ mls r0, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0603f91) " @ mls r0, r1, pc, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe060f291) " @ mls r0, r1, r2, pc")
#endif
- TEST_UNSUPPORTED(".word 0xe0700090 @ undef")
- TEST_UNSUPPORTED(".word 0xe07fff9f @ undef")
+ TEST_UNSUPPORTED(__inst_arm(0xe0700090) " @ undef")
+ TEST_UNSUPPORTED(__inst_arm(0xe07fff9f) " @ undef")
TEST_RR( "umull r0, r1, r",2, VAL1,", r",3, VAL2,"")
TEST_RR( "umullls r7, r8, r",9, VAL2,", r",10, VAL1,"")
TEST_R( "umull lr, r12, r",11,VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe081f392 @ umull pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe08f1392 @ umull r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe081f392) " @ umull pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe08f1392) " @ umull r1, pc, r2, r3")
TEST_RR( "umulls r0, r1, r",2, VAL1,", r",3, VAL2,"")
TEST_RR( "umulllss r7, r8, r",9, VAL2,", r",10, VAL1,"")
TEST_R( "umulls lr, r12, r",11,VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe091f392 @ umulls pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe09f1392 @ umulls r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe091f392) " @ umulls pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe09f1392) " @ umulls r1, pc, r2, r3")
TEST_RRRR( "umlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "umlalle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "umlal r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe0af1392 @ umlal pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0a1f392 @ umlal r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0af1392) " @ umlal pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0a1f392) " @ umlal r1, pc, r2, r3")
TEST_RRRR( "umlals r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "umlalles r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "umlals r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe0bf1392 @ umlals pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0b1f392 @ umlals r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0bf1392) " @ umlals pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0b1f392) " @ umlals r1, pc, r2, r3")
TEST_RR( "smull r0, r1, r",2, VAL1,", r",3, VAL2,"")
TEST_RR( "smullls r7, r8, r",9, VAL2,", r",10, VAL1,"")
TEST_R( "smull lr, r12, r",11,VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe0c1f392 @ smull pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0cf1392 @ smull r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0c1f392) " @ smull pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0cf1392) " @ smull r1, pc, r2, r3")
TEST_RR( "smulls r0, r1, r",2, VAL1,", r",3, VAL2,"")
TEST_RR( "smulllss r7, r8, r",9, VAL2,", r",10, VAL1,"")
TEST_R( "smulls lr, r12, r",11,VAL3,", r13")
- TEST_UNSUPPORTED(".word 0xe0d1f392 @ smulls pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0df1392 @ smulls r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0d1f392) " @ smulls pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0df1392) " @ smulls r1, pc, r2, r3")
TEST_RRRR( "smlal r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "smlalle r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "smlal r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe0ef1392 @ smlal pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0e1f392 @ smlal r1, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0ef1392) " @ smlal pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0e1f392) " @ smlal r1, pc, r2, r3")
TEST_RRRR( "smlals r",0, VAL1,", r",1, VAL2,", r",2, VAL3,", r",3, VAL4)
TEST_RRRR( "smlalles r",8, VAL4,", r",9, VAL1,", r",10,VAL2,", r",11,VAL3)
TEST_RRR( "smlals r",14,VAL3,", r",7, VAL4,", r",5, VAL1,", r13")
- TEST_UNSUPPORTED(".word 0xe0ff1392 @ smlals pc, r1, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0f0f392 @ smlals r0, pc, r2, r3")
- TEST_UNSUPPORTED(".word 0xe0f0139f @ smlals r0, r1, pc, r3")
- TEST_UNSUPPORTED(".word 0xe0f01f92 @ smlals r0, r1, r2, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe0ff1392) " @ smlals pc, r1, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0f0f392) " @ smlals r0, pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0f0139f) " @ smlals r0, r1, pc, r3")
+ TEST_UNSUPPORTED(__inst_arm(0xe0f01f92) " @ smlals r0, r1, r2, pc")
TEST_GROUP("Synchronization primitives")
@@ -434,28 +440,28 @@ void kprobe_arm_test_cases(void)
TEST_R( "swpvs r0, r",1,VAL1,", [sp]")
TEST_RP("swp sp, r",14,VAL2,", [r",12,13*4,"]")
#else
- TEST_UNSUPPORTED(".word 0xe108e097 @ swp lr, r7, [r8]")
- TEST_UNSUPPORTED(".word 0x610d0091 @ swpvs r0, r1, [sp]")
- TEST_UNSUPPORTED(".word 0xe10cd09e @ swp sp, r14 [r12]")
+ TEST_UNSUPPORTED(__inst_arm(0xe108e097) " @ swp lr, r7, [r8]")
+ TEST_UNSUPPORTED(__inst_arm(0x610d0091) " @ swpvs r0, r1, [sp]")
+ TEST_UNSUPPORTED(__inst_arm(0xe10cd09e) " @ swp sp, r14 [r12]")
#endif
- TEST_UNSUPPORTED(".word 0xe102f091 @ swp pc, r1, [r2]")
- TEST_UNSUPPORTED(".word 0xe102009f @ swp r0, pc, [r2]")
- TEST_UNSUPPORTED(".word 0xe10f0091 @ swp r0, r1, [pc]")
+ TEST_UNSUPPORTED(__inst_arm(0xe102f091) " @ swp pc, r1, [r2]")
+ TEST_UNSUPPORTED(__inst_arm(0xe102009f) " @ swp r0, pc, [r2]")
+ TEST_UNSUPPORTED(__inst_arm(0xe10f0091) " @ swp r0, r1, [pc]")
#if __LINUX_ARM_ARCH__ < 6
TEST_RP("swpb lr, r",7,VAL2,", [r",8,0,"]")
TEST_R( "swpvsb r0, r",1,VAL1,", [sp]")
#else
- TEST_UNSUPPORTED(".word 0xe148e097 @ swpb lr, r7, [r8]")
- TEST_UNSUPPORTED(".word 0x614d0091 @ swpvsb r0, r1, [sp]")
+ TEST_UNSUPPORTED(__inst_arm(0xe148e097) " @ swpb lr, r7, [r8]")
+ TEST_UNSUPPORTED(__inst_arm(0x614d0091) " @ swpvsb r0, r1, [sp]")
#endif
- TEST_UNSUPPORTED(".word 0xe142f091 @ swpb pc, r1, [r2]")
-
- TEST_UNSUPPORTED(".word 0xe1100090") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe1200090") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe1300090") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe1500090") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe1600090") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe1700090") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe142f091) " @ swpb pc, r1, [r2]")
+
+ TEST_UNSUPPORTED(__inst_arm(0xe1100090)) /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe1200090)) /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe1300090)) /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe1500090)) /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe1600090)) /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe1700090)) /* Unallocated space */
#if __LINUX_ARM_ARCH__ >= 6
TEST_UNSUPPORTED("ldrex r2, [sp]")
#endif
@@ -475,9 +481,9 @@ void kprobe_arm_test_cases(void)
TEST_RPR( "strneh r",12,VAL2,", [r",11,48,", -r",10,24,"]!")
TEST_RPR( "strh r",2, VAL1,", [r",3, 24,"], r",4, 48,"")
TEST_RPR( "strh r",10,VAL2,", [r",9, 48,"], -r",11,24,"")
- TEST_UNSUPPORTED(".word 0xe1afc0ba @ strh r12, [pc, r10]!")
- TEST_UNSUPPORTED(".word 0xe089f0bb @ strh pc, [r9], r11")
- TEST_UNSUPPORTED(".word 0xe089a0bf @ strh r10, [r9], pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe1afc0ba) " @ strh r12, [pc, r10]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe089f0bb) " @ strh pc, [r9], r11")
+ TEST_UNSUPPORTED(__inst_arm(0xe089a0bf) " @ strh r10, [r9], pc")
TEST_PR( "ldrh r0, [r",0, 48,", -r",2, 24,"]")
TEST_PR( "ldrcsh r14, [r",13,0, ", r",12, 48,"]")
@@ -485,9 +491,9 @@ void kprobe_arm_test_cases(void)
TEST_PR( "ldrcch r12, [r",11,48,", -r",10,24,"]!")
TEST_PR( "ldrh r2, [r",3, 24,"], r",4, 48,"")
TEST_PR( "ldrh r10, [r",9, 48,"], -r",11,24,"")
- TEST_UNSUPPORTED(".word 0xe1bfc0ba @ ldrh r12, [pc, r10]!")
- TEST_UNSUPPORTED(".word 0xe099f0bb @ ldrh pc, [r9], r11")
- TEST_UNSUPPORTED(".word 0xe099a0bf @ ldrh r10, [r9], pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe1bfc0ba) " @ ldrh r12, [pc, r10]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe099f0bb) " @ ldrh pc, [r9], r11")
+ TEST_UNSUPPORTED(__inst_arm(0xe099a0bf) " @ ldrh r10, [r9], pc")
TEST_RP( "strh r",0, VAL1,", [r",1, 24,", #-2]")
TEST_RP( "strmih r",14,VAL2,", [r",13,0, ", #2]")
@@ -495,8 +501,8 @@ void kprobe_arm_test_cases(void)
TEST_RP( "strplh r",12,VAL2,", [r",11,24,", #-4]!")
TEST_RP( "strh r",2, VAL1,", [r",3, 24,"], #48")
TEST_RP( "strh r",10,VAL2,", [r",9, 64,"], #-48")
- TEST_UNSUPPORTED(".word 0xe1efc3b0 @ strh r12, [pc, #48]!")
- TEST_UNSUPPORTED(".word 0xe0c9f3b0 @ strh pc, [r9], #48")
+ TEST_UNSUPPORTED(__inst_arm(0xe1efc3b0) " @ strh r12, [pc, #48]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe0c9f3b0) " @ strh pc, [r9], #48")
TEST_P( "ldrh r0, [r",0, 24,", #-2]")
TEST_P( "ldrvsh r14, [r",13,0, ", #2]")
@@ -505,8 +511,8 @@ void kprobe_arm_test_cases(void)
TEST_P( "ldrh r2, [r",3, 24,"], #48")
TEST_P( "ldrh r10, [r",9, 64,"], #-48")
TEST( "ldrh r0, [pc, #0]")
- TEST_UNSUPPORTED(".word 0xe1ffc3b0 @ ldrh r12, [pc, #48]!")
- TEST_UNSUPPORTED(".word 0xe0d9f3b0 @ ldrh pc, [r9], #48")
+ TEST_UNSUPPORTED(__inst_arm(0xe1ffc3b0) " @ ldrh r12, [pc, #48]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe0d9f3b0) " @ ldrh pc, [r9], #48")
TEST_PR( "ldrsb r0, [r",0, 48,", -r",2, 24,"]")
TEST_PR( "ldrhisb r14, [r",13,0,", r",12, 48,"]")
@@ -514,8 +520,8 @@ void kprobe_arm_test_cases(void)
TEST_PR( "ldrlssb r12, [r",11,48,", -r",10,24,"]!")
TEST_PR( "ldrsb r2, [r",3, 24,"], r",4, 48,"")
TEST_PR( "ldrsb r10, [r",9, 48,"], -r",11,24,"")
- TEST_UNSUPPORTED(".word 0xe1bfc0da @ ldrsb r12, [pc, r10]!")
- TEST_UNSUPPORTED(".word 0xe099f0db @ ldrsb pc, [r9], r11")
+ TEST_UNSUPPORTED(__inst_arm(0xe1bfc0da) " @ ldrsb r12, [pc, r10]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe099f0db) " @ ldrsb pc, [r9], r11")
TEST_P( "ldrsb r0, [r",0, 24,", #-1]")
TEST_P( "ldrgesb r14, [r",13,0, ", #1]")
@@ -524,8 +530,8 @@ void kprobe_arm_test_cases(void)
TEST_P( "ldrsb r2, [r",3, 24,"], #48")
TEST_P( "ldrsb r10, [r",9, 64,"], #-48")
TEST( "ldrsb r0, [pc, #0]")
- TEST_UNSUPPORTED(".word 0xe1ffc3d0 @ ldrsb r12, [pc, #48]!")
- TEST_UNSUPPORTED(".word 0xe0d9f3d0 @ ldrsb pc, [r9], #48")
+ TEST_UNSUPPORTED(__inst_arm(0xe1ffc3d0) " @ ldrsb r12, [pc, #48]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe0d9f3d0) " @ ldrsb pc, [r9], #48")
TEST_PR( "ldrsh r0, [r",0, 48,", -r",2, 24,"]")
TEST_PR( "ldrgtsh r14, [r",13,0, ", r",12, 48,"]")
@@ -533,8 +539,8 @@ void kprobe_arm_test_cases(void)
TEST_PR( "ldrlesh r12, [r",11,48,", -r",10,24,"]!")
TEST_PR( "ldrsh r2, [r",3, 24,"], r",4, 48,"")
TEST_PR( "ldrsh r10, [r",9, 48,"], -r",11,24,"")
- TEST_UNSUPPORTED(".word 0xe1bfc0fa @ ldrsh r12, [pc, r10]!")
- TEST_UNSUPPORTED(".word 0xe099f0fb @ ldrsh pc, [r9], r11")
+ TEST_UNSUPPORTED(__inst_arm(0xe1bfc0fa) " @ ldrsh r12, [pc, r10]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe099f0fb) " @ ldrsh pc, [r9], r11")
TEST_P( "ldrsh r0, [r",0, 24,", #-1]")
TEST_P( "ldreqsh r14, [r",13,0 ,", #1]")
@@ -543,8 +549,8 @@ void kprobe_arm_test_cases(void)
TEST_P( "ldrsh r2, [r",3, 24,"], #48")
TEST_P( "ldrsh r10, [r",9, 64,"], #-48")
TEST( "ldrsh r0, [pc, #0]")
- TEST_UNSUPPORTED(".word 0xe1ffc3f0 @ ldrsh r12, [pc, #48]!")
- TEST_UNSUPPORTED(".word 0xe0d9f3f0 @ ldrsh pc, [r9], #48")
+ TEST_UNSUPPORTED(__inst_arm(0xe1ffc3f0) " @ ldrsh r12, [pc, #48]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe0d9f3f0) " @ ldrsh pc, [r9], #48")
#if __LINUX_ARM_ARCH__ >= 7
TEST_UNSUPPORTED("strht r1, [r2], r3")
@@ -557,13 +563,14 @@ void kprobe_arm_test_cases(void)
TEST_UNSUPPORTED("ldrsht r1, [r2], #48")
#endif
+#if __LINUX_ARM_ARCH__ >= 5
TEST_RPR( "strd r",0, VAL1,", [r",1, 48,", -r",2,24,"]")
TEST_RPR( "strccd r",8, VAL2,", [r",13,0, ", r",12,48,"]")
TEST_RPR( "strd r",4, VAL1,", [r",2, 24,", r",3, 48,"]!")
TEST_RPR( "strcsd r",12,VAL2,", [r",11,48,", -r",10,24,"]!")
TEST_RPR( "strd r",2, VAL1,", [r",5, 24,"], r",4,48,"")
TEST_RPR( "strd r",10,VAL2,", [r",9, 48,"], -r",7,24,"")
- TEST_UNSUPPORTED(".word 0xe1afc0fa @ strd r12, [pc, r10]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe1afc0fa) " @ strd r12, [pc, r10]!")
TEST_PR( "ldrd r0, [r",0, 48,", -r",2,24,"]")
TEST_PR( "ldrmid r8, [r",13,0, ", r",12,48,"]")
@@ -571,10 +578,10 @@ void kprobe_arm_test_cases(void)
TEST_PR( "ldrpld r6, [r",11,48,", -r",10,24,"]!")
TEST_PR( "ldrd r2, [r",5, 24,"], r",4,48,"")
TEST_PR( "ldrd r10, [r",9,48,"], -r",7,24,"")
- TEST_UNSUPPORTED(".word 0xe1afc0da @ ldrd r12, [pc, r10]!")
- TEST_UNSUPPORTED(".word 0xe089f0db @ ldrd pc, [r9], r11")
- TEST_UNSUPPORTED(".word 0xe089e0db @ ldrd lr, [r9], r11")
- TEST_UNSUPPORTED(".word 0xe089c0df @ ldrd r12, [r9], pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe1afc0da) " @ ldrd r12, [pc, r10]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe089f0db) " @ ldrd pc, [r9], r11")
+ TEST_UNSUPPORTED(__inst_arm(0xe089e0db) " @ ldrd lr, [r9], r11")
+ TEST_UNSUPPORTED(__inst_arm(0xe089c0df) " @ ldrd r12, [r9], pc")
TEST_RP( "strd r",0, VAL1,", [r",1, 24,", #-8]")
TEST_RP( "strvsd r",8, VAL2,", [r",13,0, ", #8]")
@@ -582,7 +589,7 @@ void kprobe_arm_test_cases(void)
TEST_RP( "strvcd r",12,VAL2,", [r",11,24,", #-16]!")
TEST_RP( "strd r",2, VAL1,", [r",4, 24,"], #48")
TEST_RP( "strd r",10,VAL2,", [r",9, 64,"], #-48")
- TEST_UNSUPPORTED(".word 0xe1efc3f0 @ strd r12, [pc, #48]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe1efc3f0) " @ strd r12, [pc, #48]!")
TEST_P( "ldrd r0, [r",0, 24,", #-8]")
TEST_P( "ldrhid r8, [r",13,0, ", #8]")
@@ -590,9 +597,10 @@ void kprobe_arm_test_cases(void)
TEST_P( "ldrlsd r6, [r",11,24,", #-16]!")
TEST_P( "ldrd r2, [r",5, 24,"], #48")
TEST_P( "ldrd r10, [r",9,6,"], #-48")
- TEST_UNSUPPORTED(".word 0xe1efc3d0 @ ldrd r12, [pc, #48]!")
- TEST_UNSUPPORTED(".word 0xe0c9f3d0 @ ldrd pc, [r9], #48")
- TEST_UNSUPPORTED(".word 0xe0c9e3d0 @ ldrd lr, [r9], #48")
+ TEST_UNSUPPORTED(__inst_arm(0xe1efc3d0) " @ ldrd r12, [pc, #48]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe0c9f3d0) " @ ldrd pc, [r9], #48")
+ TEST_UNSUPPORTED(__inst_arm(0xe0c9e3d0) " @ ldrd lr, [r9], #48")
+#endif
TEST_GROUP("Miscellaneous")
@@ -600,11 +608,11 @@ void kprobe_arm_test_cases(void)
TEST("movw r0, #0")
TEST("movw r0, #0xffff")
TEST("movw lr, #0xffff")
- TEST_UNSUPPORTED(".word 0xe300f000 @ movw pc, #0")
+ TEST_UNSUPPORTED(__inst_arm(0xe300f000) " @ movw pc, #0")
TEST_R("movt r",0, VAL1,", #0")
TEST_R("movt r",0, VAL2,", #0xffff")
TEST_R("movt r",14,VAL1,", #0xffff")
- TEST_UNSUPPORTED(".word 0xe340f000 @ movt pc, #0")
+ TEST_UNSUPPORTED(__inst_arm(0xe340f000) " @ movt pc, #0")
#endif
TEST_UNSUPPORTED("msr cpsr, 0x13")
@@ -672,20 +680,20 @@ void kprobe_arm_test_cases(void)
#ifdef CONFIG_THUMB2_KERNEL
TEST_ARM_TO_THUMB_INTERWORK_P("ldr pc, [r",0,0,", #15*4]")
#endif
- TEST_UNSUPPORTED(".word 0xe5af6008 @ str r6, [pc, #8]!")
- TEST_UNSUPPORTED(".word 0xe7af6008 @ str r6, [pc, r8]!")
- TEST_UNSUPPORTED(".word 0xe5bf6008 @ ldr r6, [pc, #8]!")
- TEST_UNSUPPORTED(".word 0xe7bf6008 @ ldr r6, [pc, r8]!")
- TEST_UNSUPPORTED(".word 0xe788600f @ str r6, [r8, pc]")
- TEST_UNSUPPORTED(".word 0xe798600f @ ldr r6, [r8, pc]")
+ TEST_UNSUPPORTED(__inst_arm(0xe5af6008) " @ str r6, [pc, #8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe7af6008) " @ str r6, [pc, r8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe5bf6008) " @ ldr r6, [pc, #8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe7bf6008) " @ ldr r6, [pc, r8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe788600f) " @ str r6, [r8, pc]")
+ TEST_UNSUPPORTED(__inst_arm(0xe798600f) " @ ldr r6, [r8, pc]")
LOAD_STORE("b")
- TEST_UNSUPPORTED(".word 0xe5f7f008 @ ldrb pc, [r7, #8]!")
- TEST_UNSUPPORTED(".word 0xe7f7f008 @ ldrb pc, [r7, r8]!")
- TEST_UNSUPPORTED(".word 0xe5ef6008 @ strb r6, [pc, #8]!")
- TEST_UNSUPPORTED(".word 0xe7ef6008 @ strb r6, [pc, r3]!")
- TEST_UNSUPPORTED(".word 0xe5ff6008 @ ldrb r6, [pc, #8]!")
- TEST_UNSUPPORTED(".word 0xe7ff6008 @ ldrb r6, [pc, r3]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe5f7f008) " @ ldrb pc, [r7, #8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe7f7f008) " @ ldrb pc, [r7, r8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe5ef6008) " @ strb r6, [pc, #8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe7ef6008) " @ strb r6, [pc, r3]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe5ff6008) " @ ldrb r6, [pc, #8]!")
+ TEST_UNSUPPORTED(__inst_arm(0xe7ff6008) " @ ldrb r6, [pc, r3]!")
TEST_UNSUPPORTED("ldrt r0, [r1], #4")
TEST_UNSUPPORTED("ldrt r1, [r2], r3")
@@ -699,153 +707,153 @@ void kprobe_arm_test_cases(void)
#if __LINUX_ARM_ARCH__ >= 7
TEST_GROUP("Parallel addition and subtraction, signed")
- TEST_UNSUPPORTED(".word 0xe6000010") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe60fffff") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe6000010) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe60fffff) "") /* Unallocated space */
TEST_RR( "sadd16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "sadd16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe61cff1a @ sadd16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe61cff1a) " @ sadd16 pc, r12, r10")
TEST_RR( "sasx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "sasx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe61cff3a @ sasx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe61cff3a) " @ sasx pc, r12, r10")
TEST_RR( "ssax r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "ssax r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe61cff5a @ ssax pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe61cff5a) " @ ssax pc, r12, r10")
TEST_RR( "ssub16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "ssub16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe61cff7a @ ssub16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe61cff7a) " @ ssub16 pc, r12, r10")
TEST_RR( "sadd8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "sadd8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe61cff9a @ sadd8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe61000b0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe61fffbf") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe61000d0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe61fffdf") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe61cff9a) " @ sadd8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe61000b0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe61fffbf) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe61000d0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe61fffdf) "") /* Unallocated space */
TEST_RR( "ssub8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "ssub8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe61cfffa @ ssub8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe61cfffa) " @ ssub8 pc, r12, r10")
TEST_RR( "qadd16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "qadd16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe62cff1a @ qadd16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe62cff1a) " @ qadd16 pc, r12, r10")
TEST_RR( "qasx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "qasx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe62cff3a @ qasx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe62cff3a) " @ qasx pc, r12, r10")
TEST_RR( "qsax r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "qsax r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe62cff5a @ qsax pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe62cff5a) " @ qsax pc, r12, r10")
TEST_RR( "qsub16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "qsub16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe62cff7a @ qsub16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe62cff7a) " @ qsub16 pc, r12, r10")
TEST_RR( "qadd8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "qadd8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe62cff9a @ qadd8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe62000b0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe62fffbf") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe62000d0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe62fffdf") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe62cff9a) " @ qadd8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe62000b0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe62fffbf) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe62000d0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe62fffdf) "") /* Unallocated space */
TEST_RR( "qsub8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "qsub8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe62cfffa @ qsub8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe62cfffa) " @ qsub8 pc, r12, r10")
TEST_RR( "shadd16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "shadd16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe63cff1a @ shadd16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe63cff1a) " @ shadd16 pc, r12, r10")
TEST_RR( "shasx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "shasx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe63cff3a @ shasx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe63cff3a) " @ shasx pc, r12, r10")
TEST_RR( "shsax r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "shsax r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe63cff5a @ shsax pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe63cff5a) " @ shsax pc, r12, r10")
TEST_RR( "shsub16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "shsub16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe63cff7a @ shsub16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe63cff7a) " @ shsub16 pc, r12, r10")
TEST_RR( "shadd8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "shadd8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe63cff9a @ shadd8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe63000b0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe63fffbf") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe63000d0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe63fffdf") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe63cff9a) " @ shadd8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe63000b0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe63fffbf) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe63000d0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe63fffdf) "") /* Unallocated space */
TEST_RR( "shsub8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "shsub8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe63cfffa @ shsub8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe63cfffa) " @ shsub8 pc, r12, r10")
TEST_GROUP("Parallel addition and subtraction, unsigned")
- TEST_UNSUPPORTED(".word 0xe6400010") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe64fffff") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe6400010) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe64fffff) "") /* Unallocated space */
TEST_RR( "uadd16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uadd16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe65cff1a @ uadd16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe65cff1a) " @ uadd16 pc, r12, r10")
TEST_RR( "uasx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uasx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe65cff3a @ uasx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe65cff3a) " @ uasx pc, r12, r10")
TEST_RR( "usax r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "usax r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe65cff5a @ usax pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe65cff5a) " @ usax pc, r12, r10")
TEST_RR( "usub16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "usub16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe65cff7a @ usub16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe65cff7a) " @ usub16 pc, r12, r10")
TEST_RR( "uadd8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uadd8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe65cff9a @ uadd8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe65000b0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe65fffbf") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe65000d0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe65fffdf") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe65cff9a) " @ uadd8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe65000b0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe65fffbf) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe65000d0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe65fffdf) "") /* Unallocated space */
TEST_RR( "usub8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "usub8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe65cfffa @ usub8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe65cfffa) " @ usub8 pc, r12, r10")
TEST_RR( "uqadd16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uqadd16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe66cff1a @ uqadd16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe66cff1a) " @ uqadd16 pc, r12, r10")
TEST_RR( "uqasx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uqasx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe66cff3a @ uqasx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe66cff3a) " @ uqasx pc, r12, r10")
TEST_RR( "uqsax r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uqsax r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe66cff5a @ uqsax pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe66cff5a) " @ uqsax pc, r12, r10")
TEST_RR( "uqsub16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uqsub16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe66cff7a @ uqsub16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe66cff7a) " @ uqsub16 pc, r12, r10")
TEST_RR( "uqadd8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uqadd8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe66cff9a @ uqadd8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe66000b0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe66fffbf") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe66000d0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe66fffdf") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe66cff9a) " @ uqadd8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe66000b0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe66fffbf) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe66000d0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe66fffdf) "") /* Unallocated space */
TEST_RR( "uqsub8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uqsub8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe66cfffa @ uqsub8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe66cfffa) " @ uqsub8 pc, r12, r10")
TEST_RR( "uhadd16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uhadd16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe67cff1a @ uhadd16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe67cff1a) " @ uhadd16 pc, r12, r10")
TEST_RR( "uhasx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uhasx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe67cff3a @ uhasx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe67cff3a) " @ uhasx pc, r12, r10")
TEST_RR( "uhsax r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uhsax r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe67cff5a @ uhsax pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe67cff5a) " @ uhsax pc, r12, r10")
TEST_RR( "uhsub16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uhsub16 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe67cff7a @ uhsub16 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe67cff7a) " @ uhsub16 pc, r12, r10")
TEST_RR( "uhadd8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uhadd8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe67cff9a @ uhadd8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe67000b0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe67fffbf") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe67000d0") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe67fffdf") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe67cff9a) " @ uhadd8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe67000b0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe67fffbf) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe67000d0) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe67fffdf) "") /* Unallocated space */
TEST_RR( "uhsub8 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uhsub8 r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe67cfffa @ uhsub8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe67feffa @ uhsub8 r14, pc, r10")
- TEST_UNSUPPORTED(".word 0xe67cefff @ uhsub8 r14, r12, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe67cfffa) " @ uhsub8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe67feffa) " @ uhsub8 r14, pc, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe67cefff) " @ uhsub8 r14, r12, pc")
#endif /* __LINUX_ARM_ARCH__ >= 7 */
#if __LINUX_ARM_ARCH__ >= 6
@@ -853,99 +861,99 @@ void kprobe_arm_test_cases(void)
TEST_RR( "pkhbt r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "pkhbt r14,r",12, HH1,", r",10,HH2,", lsl #2")
- TEST_UNSUPPORTED(".word 0xe68cf11a @ pkhbt pc, r12, r10, lsl #2")
+ TEST_UNSUPPORTED(__inst_arm(0xe68cf11a) " @ pkhbt pc, r12, r10, lsl #2")
TEST_RR( "pkhtb r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "pkhtb r14,r",12, HH1,", r",10,HH2,", asr #2")
- TEST_UNSUPPORTED(".word 0xe68cf15a @ pkhtb pc, r12, r10, asr #2")
- TEST_UNSUPPORTED(".word 0xe68fe15a @ pkhtb r14, pc, r10, asr #2")
- TEST_UNSUPPORTED(".word 0xe68ce15f @ pkhtb r14, r12, pc, asr #2")
- TEST_UNSUPPORTED(".word 0xe6900010") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe69fffdf") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe68cf15a) " @ pkhtb pc, r12, r10, asr #2")
+ TEST_UNSUPPORTED(__inst_arm(0xe68fe15a) " @ pkhtb r14, pc, r10, asr #2")
+ TEST_UNSUPPORTED(__inst_arm(0xe68ce15f) " @ pkhtb r14, r12, pc, asr #2")
+ TEST_UNSUPPORTED(__inst_arm(0xe6900010) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe69fffdf) "") /* Unallocated space */
TEST_R( "ssat r0, #24, r",0, VAL1,"")
TEST_R( "ssat r14, #24, r",12, VAL2,"")
TEST_R( "ssat r0, #24, r",0, VAL1,", lsl #8")
TEST_R( "ssat r14, #24, r",12, VAL2,", asr #8")
- TEST_UNSUPPORTED(".word 0xe6b7f01c @ ssat pc, #24, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6b7f01c) " @ ssat pc, #24, r12")
TEST_R( "usat r0, #24, r",0, VAL1,"")
TEST_R( "usat r14, #24, r",12, VAL2,"")
TEST_R( "usat r0, #24, r",0, VAL1,", lsl #8")
TEST_R( "usat r14, #24, r",12, VAL2,", asr #8")
- TEST_UNSUPPORTED(".word 0xe6f7f01c @ usat pc, #24, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6f7f01c) " @ usat pc, #24, r12")
TEST_RR( "sxtab16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "sxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "sxtb16 r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".word 0xe68cf47a @ sxtab16 pc,r12, r10, ror #8")
+ TEST_UNSUPPORTED(__inst_arm(0xe68cf47a) " @ sxtab16 pc,r12, r10, ror #8")
TEST_RR( "sel r0, r",0, VAL1,", r",1, VAL2,"")
TEST_RR( "sel r14, r",12,VAL1,", r",10, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe68cffba @ sel pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe68fefba @ sel r14, pc, r10")
- TEST_UNSUPPORTED(".word 0xe68cefbf @ sel r14, r12, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe68cffba) " @ sel pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe68fefba) " @ sel r14, pc, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe68cefbf) " @ sel r14, r12, pc")
TEST_R( "ssat16 r0, #12, r",0, HH1,"")
TEST_R( "ssat16 r14, #12, r",12, HH2,"")
- TEST_UNSUPPORTED(".word 0xe6abff3c @ ssat16 pc, #12, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6abff3c) " @ ssat16 pc, #12, r12")
TEST_RR( "sxtab r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "sxtab r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "sxtb r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".word 0xe6acf47a @ sxtab pc,r12, r10, ror #8")
+ TEST_UNSUPPORTED(__inst_arm(0xe6acf47a) " @ sxtab pc,r12, r10, ror #8")
TEST_R( "rev r0, r",0, VAL1,"")
TEST_R( "rev r14, r",12, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe6bfff3c @ rev pc, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6bfff3c) " @ rev pc, r12")
TEST_RR( "sxtah r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "sxtah r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "sxth r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".word 0xe6bcf47a @ sxtah pc,r12, r10, ror #8")
+ TEST_UNSUPPORTED(__inst_arm(0xe6bcf47a) " @ sxtah pc,r12, r10, ror #8")
TEST_R( "rev16 r0, r",0, VAL1,"")
TEST_R( "rev16 r14, r",12, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe6bfffbc @ rev16 pc, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6bfffbc) " @ rev16 pc, r12")
TEST_RR( "uxtab16 r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uxtab16 r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "uxtb16 r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".word 0xe6ccf47a @ uxtab16 pc,r12, r10, ror #8")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ccf47a) " @ uxtab16 pc,r12, r10, ror #8")
TEST_R( "usat16 r0, #12, r",0, HH1,"")
TEST_R( "usat16 r14, #12, r",12, HH2,"")
- TEST_UNSUPPORTED(".word 0xe6ecff3c @ usat16 pc, #12, r12")
- TEST_UNSUPPORTED(".word 0xe6ecef3f @ usat16 r14, #12, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ecff3c) " @ usat16 pc, #12, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ecef3f) " @ usat16 r14, #12, pc")
TEST_RR( "uxtab r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uxtab r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "uxtb r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".word 0xe6ecf47a @ uxtab pc,r12, r10, ror #8")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ecf47a) " @ uxtab pc,r12, r10, ror #8")
#if __LINUX_ARM_ARCH__ >= 7
TEST_R( "rbit r0, r",0, VAL1,"")
TEST_R( "rbit r14, r",12, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe6ffff3c @ rbit pc, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ffff3c) " @ rbit pc, r12")
#endif
TEST_RR( "uxtah r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uxtah r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "uxth r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".word 0xe6fff077 @ uxth pc, r7")
- TEST_UNSUPPORTED(".word 0xe6ff807f @ uxth r8, pc")
- TEST_UNSUPPORTED(".word 0xe6fcf47a @ uxtah pc, r12, r10, ror #8")
- TEST_UNSUPPORTED(".word 0xe6fce47f @ uxtah r14, r12, pc, ror #8")
+ TEST_UNSUPPORTED(__inst_arm(0xe6fff077) " @ uxth pc, r7")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ff807f) " @ uxth r8, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe6fcf47a) " @ uxtah pc, r12, r10, ror #8")
+ TEST_UNSUPPORTED(__inst_arm(0xe6fce47f) " @ uxtah r14, r12, pc, ror #8")
TEST_R( "revsh r0, r",0, VAL1,"")
TEST_R( "revsh r14, r",12, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe6ffff3c @ revsh pc, r12")
- TEST_UNSUPPORTED(".word 0xe6ffef3f @ revsh r14, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ffff3c) " @ revsh pc, r12")
+ TEST_UNSUPPORTED(__inst_arm(0xe6ffef3f) " @ revsh r14, pc")
- TEST_UNSUPPORTED(".word 0xe6900070") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe69fff7f") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe6900070) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe69fff7f) "") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe6d00070") /* Unallocated space */
- TEST_UNSUPPORTED(".word 0xe6dfff7f") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe6d00070) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_arm(0xe6dfff7f) "") /* Unallocated space */
#endif /* __LINUX_ARM_ARCH__ >= 6 */
#if __LINUX_ARM_ARCH__ >= 6
@@ -953,79 +961,79 @@ void kprobe_arm_test_cases(void)
TEST_RRR( "smlad r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"")
TEST_RRR( "smlad r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe70f8a1c @ smlad pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe70f8a1c) " @ smlad pc, r12, r10, r8")
TEST_RRR( "smladx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"")
TEST_RRR( "smladx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe70f8a3c @ smladx pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe70f8a3c) " @ smladx pc, r12, r10, r8")
TEST_RR( "smuad r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "smuad r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe70ffa1c @ smuad pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe70ffa1c) " @ smuad pc, r12, r10")
TEST_RR( "smuadx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "smuadx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe70ffa3c @ smuadx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe70ffa3c) " @ smuadx pc, r12, r10")
TEST_RRR( "smlsd r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"")
TEST_RRR( "smlsd r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe70f8a5c @ smlsd pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe70f8a5c) " @ smlsd pc, r12, r10, r8")
TEST_RRR( "smlsdx r0, r",0, HH1,", r",1, HH2,", r",2, VAL1,"")
TEST_RRR( "smlsdx r14, r",12,HH2,", r",10,HH1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe70f8a7c @ smlsdx pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe70f8a7c) " @ smlsdx pc, r12, r10, r8")
TEST_RR( "smusd r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "smusd r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe70ffa5c @ smusd pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe70ffa5c) " @ smusd pc, r12, r10")
TEST_RR( "smusdx r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "smusdx r14, r",12,HH2,", r",10,HH1,"")
- TEST_UNSUPPORTED(".word 0xe70ffa7c @ smusdx pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe70ffa7c) " @ smusdx pc, r12, r10")
TEST_RRRR( "smlald r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2)
TEST_RRRR( "smlald r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1)
- TEST_UNSUPPORTED(".word 0xe74af819 @ smlald pc, r10, r9, r8")
- TEST_UNSUPPORTED(".word 0xe74fb819 @ smlald r11, pc, r9, r8")
- TEST_UNSUPPORTED(".word 0xe74ab81f @ smlald r11, r10, pc, r8")
- TEST_UNSUPPORTED(".word 0xe74abf19 @ smlald r11, r10, r9, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe74af819) " @ smlald pc, r10, r9, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe74fb819) " @ smlald r11, pc, r9, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe74ab81f) " @ smlald r11, r10, pc, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe74abf19) " @ smlald r11, r10, r9, pc")
TEST_RRRR( "smlaldx r",0, VAL1,", r",1, VAL2, ", r",0, HH1,", r",1, HH2)
TEST_RRRR( "smlaldx r",11,VAL2,", r",10,VAL1, ", r",9, HH2,", r",8, HH1)
- TEST_UNSUPPORTED(".word 0xe74af839 @ smlaldx pc, r10, r9, r8")
- TEST_UNSUPPORTED(".word 0xe74fb839 @ smlaldx r11, pc, r9, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe74af839) " @ smlaldx pc, r10, r9, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe74fb839) " @ smlaldx r11, pc, r9, r8")
TEST_RRR( "smmla r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"")
TEST_RRR( "smmla r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe75f8a1c @ smmla pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe75f8a1c) " @ smmla pc, r12, r10, r8")
TEST_RRR( "smmlar r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"")
TEST_RRR( "smmlar r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe75f8a3c @ smmlar pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe75f8a3c) " @ smmlar pc, r12, r10, r8")
TEST_RR( "smmul r0, r",0, VAL1,", r",1, VAL2,"")
TEST_RR( "smmul r14, r",12,VAL2,", r",10,VAL1,"")
- TEST_UNSUPPORTED(".word 0xe75ffa1c @ smmul pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe75ffa1c) " @ smmul pc, r12, r10")
TEST_RR( "smmulr r0, r",0, VAL1,", r",1, VAL2,"")
TEST_RR( "smmulr r14, r",12,VAL2,", r",10,VAL1,"")
- TEST_UNSUPPORTED(".word 0xe75ffa3c @ smmulr pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe75ffa3c) " @ smmulr pc, r12, r10")
TEST_RRR( "smmls r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"")
TEST_RRR( "smmls r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe75f8adc @ smmls pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe75f8adc) " @ smmls pc, r12, r10, r8")
TEST_RRR( "smmlsr r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL1,"")
TEST_RRR( "smmlsr r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL2,"")
- TEST_UNSUPPORTED(".word 0xe75f8afc @ smmlsr pc, r12, r10, r8")
- TEST_UNSUPPORTED(".word 0xe75e8aff @ smmlsr r14, pc, r10, r8")
- TEST_UNSUPPORTED(".word 0xe75e8ffc @ smmlsr r14, r12, pc, r8")
- TEST_UNSUPPORTED(".word 0xe75efafc @ smmlsr r14, r12, r10, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe75f8afc) " @ smmlsr pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe75e8aff) " @ smmlsr r14, pc, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe75e8ffc) " @ smmlsr r14, r12, pc, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe75efafc) " @ smmlsr r14, r12, r10, pc")
TEST_RR( "usad8 r0, r",0, VAL1,", r",1, VAL2,"")
TEST_RR( "usad8 r14, r",12,VAL2,", r",10,VAL1,"")
- TEST_UNSUPPORTED(".word 0xe75ffa1c @ usad8 pc, r12, r10")
- TEST_UNSUPPORTED(".word 0xe75efa1f @ usad8 r14, pc, r10")
- TEST_UNSUPPORTED(".word 0xe75eff1c @ usad8 r14, r12, pc")
+ TEST_UNSUPPORTED(__inst_arm(0xe75ffa1c) " @ usad8 pc, r12, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe75efa1f) " @ usad8 r14, pc, r10")
+ TEST_UNSUPPORTED(__inst_arm(0xe75eff1c) " @ usad8 r14, r12, pc")
TEST_RRR( "usada8 r0, r",0, VAL1,", r",1, VAL2,", r",2, VAL3,"")
TEST_RRR( "usada8 r14, r",12,VAL2,", r",10,VAL1,", r",8, VAL3,"")
- TEST_UNSUPPORTED(".word 0xe78f8a1c @ usada8 pc, r12, r10, r8")
- TEST_UNSUPPORTED(".word 0xe78e8a1f @ usada8 r14, pc, r10, r8")
- TEST_UNSUPPORTED(".word 0xe78e8f1c @ usada8 r14, r12, pc, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe78f8a1c) " @ usada8 pc, r12, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe78e8a1f) " @ usada8 r14, pc, r10, r8")
+ TEST_UNSUPPORTED(__inst_arm(0xe78e8f1c) " @ usada8 r14, r12, pc, r8")
#endif /* __LINUX_ARM_ARCH__ >= 6 */
#if __LINUX_ARM_ARCH__ >= 7
@@ -1034,26 +1042,26 @@ void kprobe_arm_test_cases(void)
TEST_R( "sbfx r0, r",0 , VAL1,", #0, #31")
TEST_R( "sbfxeq r14, r",12, VAL2,", #8, #16")
TEST_R( "sbfx r4, r",10, VAL1,", #16, #15")
- TEST_UNSUPPORTED(".word 0xe7aff45c @ sbfx pc, r12, #8, #16")
+ TEST_UNSUPPORTED(__inst_arm(0xe7aff45c) " @ sbfx pc, r12, #8, #16")
TEST_R( "ubfx r0, r",0 , VAL1,", #0, #31")
TEST_R( "ubfxcs r14, r",12, VAL2,", #8, #16")
TEST_R( "ubfx r4, r",10, VAL1,", #16, #15")
- TEST_UNSUPPORTED(".word 0xe7eff45c @ ubfx pc, r12, #8, #16")
- TEST_UNSUPPORTED(".word 0xe7efc45f @ ubfx r12, pc, #8, #16")
+ TEST_UNSUPPORTED(__inst_arm(0xe7eff45c) " @ ubfx pc, r12, #8, #16")
+ TEST_UNSUPPORTED(__inst_arm(0xe7efc45f) " @ ubfx r12, pc, #8, #16")
TEST_R( "bfc r",0, VAL1,", #4, #20")
TEST_R( "bfcvs r",14,VAL2,", #4, #20")
TEST_R( "bfc r",7, VAL1,", #0, #31")
TEST_R( "bfc r",8, VAL2,", #0, #31")
- TEST_UNSUPPORTED(".word 0xe7def01f @ bfc pc, #0, #31");
+ TEST_UNSUPPORTED(__inst_arm(0xe7def01f) " @ bfc pc, #0, #31");
TEST_RR( "bfi r",0, VAL1,", r",0 , VAL2,", #0, #31")
TEST_RR( "bfipl r",12,VAL1,", r",14 , VAL2,", #4, #20")
- TEST_UNSUPPORTED(".word 0xe7d7f21e @ bfi pc, r14, #4, #20")
+ TEST_UNSUPPORTED(__inst_arm(0xe7d7f21e) " @ bfi pc, r14, #4, #20")
- TEST_UNSUPPORTED(".word 0x07f000f0") /* Permanently UNDEFINED */
- TEST_UNSUPPORTED(".word 0x07ffffff") /* Permanently UNDEFINED */
+ TEST_UNSUPPORTED(__inst_arm(0x07f000f0) "") /* Permanently UNDEFINED */
+ TEST_UNSUPPORTED(__inst_arm(0x07ffffff) "") /* Permanently UNDEFINED */
#endif /* __LINUX_ARM_ARCH__ >= 6 */
TEST_GROUP("Branch, branch with link, and block data transfer")
@@ -1180,43 +1188,43 @@ void kprobe_arm_test_cases(void)
\
TEST_COPROCESSOR( "stc"two" 0, cr0, [r15, #4]") \
TEST_COPROCESSOR( "stc"two" 0, cr0, [r15, #-4]") \
- TEST_UNSUPPORTED(".word 0x"cc"daf0001 @ stc"two" 0, cr0, [r15, #4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"d2f0001 @ stc"two" 0, cr0, [r15, #-4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"caf0001 @ stc"two" 0, cr0, [r15], #4") \
- TEST_UNSUPPORTED(".word 0x"cc"c2f0001 @ stc"two" 0, cr0, [r15], #-4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##daf0001) " @ stc"two" 0, cr0, [r15, #4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##d2f0001) " @ stc"two" 0, cr0, [r15, #-4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##caf0001) " @ stc"two" 0, cr0, [r15], #4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c2f0001) " @ stc"two" 0, cr0, [r15], #-4") \
TEST_COPROCESSOR( "stc"two" 0, cr0, [r15], {1}") \
TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15, #4]") \
TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15, #-4]") \
- TEST_UNSUPPORTED(".word 0x"cc"def0001 @ stc"two"l 0, cr0, [r15, #4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"d6f0001 @ stc"two"l 0, cr0, [r15, #-4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"cef0001 @ stc"two"l 0, cr0, [r15], #4") \
- TEST_UNSUPPORTED(".word 0x"cc"c6f0001 @ stc"two"l 0, cr0, [r15], #-4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##def0001) " @ stc"two"l 0, cr0, [r15, #4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##d6f0001) " @ stc"two"l 0, cr0, [r15, #-4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##cef0001) " @ stc"two"l 0, cr0, [r15], #4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c6f0001) " @ stc"two"l 0, cr0, [r15], #-4") \
TEST_COPROCESSOR( "stc"two"l 0, cr0, [r15], {1}") \
TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15, #4]") \
TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15, #-4]") \
- TEST_UNSUPPORTED(".word 0x"cc"dbf0001 @ ldc"two" 0, cr0, [r15, #4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"d3f0001 @ ldc"two" 0, cr0, [r15, #-4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"cbf0001 @ ldc"two" 0, cr0, [r15], #4") \
- TEST_UNSUPPORTED(".word 0x"cc"c3f0001 @ ldc"two" 0, cr0, [r15], #-4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##dbf0001) " @ ldc"two" 0, cr0, [r15, #4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##d3f0001) " @ ldc"two" 0, cr0, [r15, #-4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##cbf0001) " @ ldc"two" 0, cr0, [r15], #4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c3f0001) " @ ldc"two" 0, cr0, [r15], #-4") \
TEST_COPROCESSOR( "ldc"two" 0, cr0, [r15], {1}") \
TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15, #4]") \
TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15, #-4]") \
- TEST_UNSUPPORTED(".word 0x"cc"dff0001 @ ldc"two"l 0, cr0, [r15, #4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"d7f0001 @ ldc"two"l 0, cr0, [r15, #-4]!") \
- TEST_UNSUPPORTED(".word 0x"cc"cff0001 @ ldc"two"l 0, cr0, [r15], #4") \
- TEST_UNSUPPORTED(".word 0x"cc"c7f0001 @ ldc"two"l 0, cr0, [r15], #-4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##dff0001) " @ ldc"two"l 0, cr0, [r15, #4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##d7f0001) " @ ldc"two"l 0, cr0, [r15, #-4]!") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##cff0001) " @ ldc"two"l 0, cr0, [r15], #4") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c7f0001) " @ ldc"two"l 0, cr0, [r15], #-4") \
TEST_COPROCESSOR( "ldc"two"l 0, cr0, [r15], {1}")
#define COPROCESSOR_INSTRUCTIONS_MC_MR(two,cc) \
\
TEST_COPROCESSOR( "mcrr"two" 0, 15, r0, r14, cr0") \
TEST_COPROCESSOR( "mcrr"two" 15, 0, r14, r0, cr15") \
- TEST_UNSUPPORTED(".word 0x"cc"c4f00f0 @ mcrr"two" 0, 15, r0, r15, cr0") \
- TEST_UNSUPPORTED(".word 0x"cc"c40ff0f @ mcrr"two" 15, 0, r15, r0, cr15") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c4f00f0) " @ mcrr"two" 0, 15, r0, r15, cr0") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c40ff0f) " @ mcrr"two" 15, 0, r15, r0, cr15") \
TEST_COPROCESSOR( "mrrc"two" 0, 15, r0, r14, cr0") \
TEST_COPROCESSOR( "mrrc"two" 15, 0, r14, r0, cr15") \
- TEST_UNSUPPORTED(".word 0x"cc"c5f00f0 @ mrrc"two" 0, 15, r0, r15, cr0") \
- TEST_UNSUPPORTED(".word 0x"cc"c50ff0f @ mrrc"two" 15, 0, r15, r0, cr15") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c5f00f0) " @ mrrc"two" 0, 15, r0, r15, cr0") \
+ TEST_UNSUPPORTED(__inst_arm(0x##cc##c50ff0f) " @ mrrc"two" 15, 0, r15, r0, cr15") \
TEST_COPROCESSOR( "cdp"two" 15, 15, cr15, cr15, cr15, 7") \
TEST_COPROCESSOR( "cdp"two" 0, 0, cr0, cr0, cr0, 0") \
TEST_COPROCESSOR( "mcr"two" 15, 7, r15, cr15, cr15, 7") \
@@ -1224,8 +1232,10 @@ void kprobe_arm_test_cases(void)
TEST_COPROCESSOR( "mrc"two" 15, 7, r15, cr15, cr15, 7") \
TEST_COPROCESSOR( "mrc"two" 0, 0, r0, cr0, cr0, 0")
- COPROCESSOR_INSTRUCTIONS_ST_LD("","e")
- COPROCESSOR_INSTRUCTIONS_MC_MR("","e")
+ COPROCESSOR_INSTRUCTIONS_ST_LD("",e)
+#if __LINUX_ARM_ARCH__ >= 5
+ COPROCESSOR_INSTRUCTIONS_MC_MR("",e)
+#endif
TEST_UNSUPPORTED("svc 0")
TEST_UNSUPPORTED("svc 0xffffff")
@@ -1251,14 +1261,14 @@ void kprobe_arm_test_cases(void)
TEST_UNSUPPORTED("rfedb sp!")
TEST_UNSUPPORTED("rfeia sp!")
TEST_UNSUPPORTED("rfeib sp!")
- TEST_UNSUPPORTED(".word 0xf81d0a00 @ rfeda pc")
- TEST_UNSUPPORTED(".word 0xf91d0a00 @ rfedb pc")
- TEST_UNSUPPORTED(".word 0xf89d0a00 @ rfeia pc")
- TEST_UNSUPPORTED(".word 0xf99d0a00 @ rfeib pc")
- TEST_UNSUPPORTED(".word 0xf83d0a00 @ rfeda pc!")
- TEST_UNSUPPORTED(".word 0xf93d0a00 @ rfedb pc!")
- TEST_UNSUPPORTED(".word 0xf8bd0a00 @ rfeia pc!")
- TEST_UNSUPPORTED(".word 0xf9bd0a00 @ rfeib pc!")
+ TEST_UNSUPPORTED(__inst_arm(0xf81d0a00) " @ rfeda pc")
+ TEST_UNSUPPORTED(__inst_arm(0xf91d0a00) " @ rfedb pc")
+ TEST_UNSUPPORTED(__inst_arm(0xf89d0a00) " @ rfeia pc")
+ TEST_UNSUPPORTED(__inst_arm(0xf99d0a00) " @ rfeib pc")
+ TEST_UNSUPPORTED(__inst_arm(0xf83d0a00) " @ rfeda pc!")
+ TEST_UNSUPPORTED(__inst_arm(0xf93d0a00) " @ rfedb pc!")
+ TEST_UNSUPPORTED(__inst_arm(0xf8bd0a00) " @ rfeia pc!")
+ TEST_UNSUPPORTED(__inst_arm(0xf9bd0a00) " @ rfeib pc!")
#endif /* __LINUX_ARM_ARCH__ >= 6 */
#if __LINUX_ARM_ARCH__ >= 6
@@ -1285,9 +1295,11 @@ void kprobe_arm_test_cases(void)
TEST( "blx __dummy_thumb_subroutine_odd")
#endif /* __LINUX_ARM_ARCH__ >= 6 */
- COPROCESSOR_INSTRUCTIONS_ST_LD("2","f")
+#if __LINUX_ARM_ARCH__ >= 5
+ COPROCESSOR_INSTRUCTIONS_ST_LD("2",f)
+#endif
#if __LINUX_ARM_ARCH__ >= 6
- COPROCESSOR_INSTRUCTIONS_MC_MR("2","f")
+ COPROCESSOR_INSTRUCTIONS_MC_MR("2",f)
#endif
TEST_GROUP("Miscellaneous instructions, memory hints, and Advanced SIMD instructions")
@@ -1317,9 +1329,9 @@ void kprobe_arm_test_cases(void)
#endif
#if __LINUX_ARM_ARCH__ >= 7
- TEST_SUPPORTED( ".word 0xf590f000 @ pldw [r0, #0]")
- TEST_SUPPORTED( ".word 0xf797f000 @ pldw [r7, r0]")
- TEST_SUPPORTED( ".word 0xf798f18c @ pldw [r8, r12, lsl #3]");
+ TEST_SUPPORTED( __inst_arm(0xf590f000) " @ pldw [r0, #0]")
+ TEST_SUPPORTED( __inst_arm(0xf797f000) " @ pldw [r7, r0]")
+ TEST_SUPPORTED( __inst_arm(0xf798f18c) " @ pldw [r8, r12, lsl #3]");
#endif
#if __LINUX_ARM_ARCH__ >= 7
diff --git a/arch/arm/kernel/kprobes-test-thumb.c b/arch/arm/kernel/kprobes-test-thumb.c
index 5d8b8579222..844dd10d859 100644
--- a/arch/arm/kernel/kprobes-test-thumb.c
+++ b/arch/arm/kernel/kprobes-test-thumb.c
@@ -10,6 +10,7 @@
#include <linux/kernel.h>
#include <linux/module.h>
+#include <asm/opcodes.h>
#include "kprobes-test.h"
@@ -119,7 +120,7 @@ void kprobe_thumb16_test_cases(void)
TEST_R( "add sp" ", r",8,-8, "")
TEST_R( "add r",14,VAL1,", pc")
TEST_BF_R("add pc" ", r",0,2f-1f-8,"")
- TEST_UNSUPPORTED(".short 0x44ff @ add pc, pc")
+ TEST_UNSUPPORTED(__inst_thumb16(0x44ff) " @ add pc, pc")
TEST_RR( "cmp r",3,VAL1,", r",8,VAL2,"")
TEST_RR( "cmp r",8,VAL2,", r",0,VAL1,"")
@@ -150,7 +151,7 @@ void kprobe_thumb16_test_cases(void)
TEST_BF_R("blx r",0, 2f+1,"")
TEST_BB_R("blx r",14,2f+1,"")
- TEST_UNSUPPORTED(".short 0x47f8 @ blx pc")
+ TEST_UNSUPPORTED(__inst_thumb16(0x47f8) " @ blx pc")
TEST_GROUP("Load from Literal Pool")
@@ -237,8 +238,8 @@ DONT_TEST_IN_ITBLOCK(
TEST_R("rev r7, r",0, VAL2,"")
TEST_R("rev16 r0, r",7, VAL1,"")
TEST_R("rev16 r7, r",0, VAL2,"")
- TEST_UNSUPPORTED(".short 0xba80")
- TEST_UNSUPPORTED(".short 0xbabf")
+ TEST_UNSUPPORTED(__inst_thumb16(0xba80) "")
+ TEST_UNSUPPORTED(__inst_thumb16(0xbabf) "")
TEST_R("revsh r0, r",7, VAL1,"")
TEST_R("revsh r7, r",0, VAL2,"")
@@ -272,8 +273,8 @@ DONT_TEST_IN_ITBLOCK(
TEST("nop")
TEST("wfi")
TEST_SUPPORTED("wfe")
- TEST_UNSUPPORTED(".short 0xbf50") /* Unassigned hints */
- TEST_UNSUPPORTED(".short 0xbff0") /* Unassigned hints */
+ TEST_UNSUPPORTED(__inst_thumb16(0xbf50) "") /* Unassigned hints */
+ TEST_UNSUPPORTED(__inst_thumb16(0xbff0) "") /* Unassigned hints */
#define TEST_IT(code, code2) \
TESTCASE_START(code) \
@@ -310,8 +311,8 @@ CONDITION_INSTRUCTIONS(8,
TEST_BF("bgt 2f")
TEST_BB("blt 2b")
)
- TEST_UNSUPPORTED(".short 0xde00")
- TEST_UNSUPPORTED(".short 0xdeff")
+ TEST_UNSUPPORTED(__inst_thumb16(0xde00) "")
+ TEST_UNSUPPORTED(__inst_thumb16(0xdeff) "")
TEST_UNSUPPORTED("svc #0x00")
TEST_UNSUPPORTED("svc #0xff")
@@ -380,13 +381,13 @@ void kprobe_thumb32_test_cases(void)
TEST_THUMB_TO_ARM_INTERWORK_P("ldmia r",0,14*4,", {r12,pc}")
TEST_THUMB_TO_ARM_INTERWORK_P("ldmia r",13,2*4,", {r0-r12,pc}")
- TEST_UNSUPPORTED(".short 0xe88f,0x0101 @ stmia pc, {r0,r8}")
- TEST_UNSUPPORTED(".short 0xe92f,0x5f00 @ stmdb pc!, {r8-r12,r14}")
- TEST_UNSUPPORTED(".short 0xe8bd,0xc000 @ ldmia r13!, {r14,pc}")
- TEST_UNSUPPORTED(".short 0xe93e,0xc000 @ ldmdb r14!, {r14,pc}")
- TEST_UNSUPPORTED(".short 0xe8a7,0x3f00 @ stmia r7!, {r8-r12,sp}")
- TEST_UNSUPPORTED(".short 0xe8a7,0x9f00 @ stmia r7!, {r8-r12,pc}")
- TEST_UNSUPPORTED(".short 0xe93e,0x2010 @ ldmdb r14!, {r4,sp}")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe88f0101) " @ stmia pc, {r0,r8}")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe92f5f00) " @ stmdb pc!, {r8-r12,r14}")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8bdc000) " @ ldmia r13!, {r14,pc}")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe93ec000) " @ ldmdb r14!, {r14,pc}")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8a73f00) " @ stmia r7!, {r8-r12,sp}")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8a79f00) " @ stmia r7!, {r8-r12,pc}")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe93e2010) " @ ldmdb r14!, {r4,sp}")
TEST_GROUP("Load/store double or exclusive, table branch")
@@ -402,12 +403,12 @@ void kprobe_thumb32_test_cases(void)
"3: .word "__stringify(VAL1)" \n\t"
" .word "__stringify(VAL2))
- TEST_UNSUPPORTED(".short 0xe9ff,0xec04 @ ldrd r14, r12, [pc, #16]!")
- TEST_UNSUPPORTED(".short 0xe8ff,0xec04 @ ldrd r14, r12, [pc], #16")
- TEST_UNSUPPORTED(".short 0xe9d4,0xd800 @ ldrd sp, r8, [r4]")
- TEST_UNSUPPORTED(".short 0xe9d4,0xf800 @ ldrd pc, r8, [r4]")
- TEST_UNSUPPORTED(".short 0xe9d4,0x7d00 @ ldrd r7, sp, [r4]")
- TEST_UNSUPPORTED(".short 0xe9d4,0x7f00 @ ldrd r7, pc, [r4]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe9ffec04) " @ ldrd r14, r12, [pc, #16]!")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8ffec04) " @ ldrd r14, r12, [pc], #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe9d4d800) " @ ldrd sp, r8, [r4]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe9d4f800) " @ ldrd pc, r8, [r4]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe9d47d00) " @ ldrd r7, sp, [r4]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe9d47f00) " @ ldrd r7, pc, [r4]")
TEST_RRP("strd r",0, VAL1,", r",1, VAL2,", [r",1, 24,", #-16]")
TEST_RR( "strd r",12,VAL2,", r",14,VAL1,", [sp, #16]")
@@ -415,8 +416,8 @@ void kprobe_thumb32_test_cases(void)
TEST_RR( "strd r",14,VAL2,", r",12,VAL1,", [sp, #16]!")
TEST_RRP("strd r",1, VAL1,", r",0, VAL2,", [r",7, 24,"], #16")
TEST_RR( "strd r",7, VAL2,", r",8, VAL1,", [sp], #-16")
- TEST_UNSUPPORTED(".short 0xe9ef,0xec04 @ strd r14, r12, [pc, #16]!")
- TEST_UNSUPPORTED(".short 0xe8ef,0xec04 @ strd r14, r12, [pc], #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe9efec04) " @ strd r14, r12, [pc, #16]!")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8efec04) " @ strd r14, r12, [pc], #16")
TEST_RX("tbb [pc, r",0, (9f-(1f+4)),"]",
"9: \n\t"
@@ -460,9 +461,9 @@ void kprobe_thumb32_test_cases(void)
"3: mvn r0, r0 \n\t"
"2: nop \n\t")
- TEST_UNSUPPORTED(".short 0xe8d1,0xf01f @ tbh [r1, pc]")
- TEST_UNSUPPORTED(".short 0xe8d1,0xf01d @ tbh [r1, sp]")
- TEST_UNSUPPORTED(".short 0xe8dd,0xf012 @ tbh [sp, r2]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8d1f01f) " @ tbh [r1, pc]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8d1f01d) " @ tbh [r1, sp]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xe8ddf012) " @ tbh [sp, r2]")
TEST_UNSUPPORTED("strexb r0, r1, [r2]")
TEST_UNSUPPORTED("strexh r0, r1, [r2]")
@@ -540,40 +541,40 @@ void kprobe_thumb32_test_cases(void)
TEST_RR("pkhtb r0, r",0, HH1,", r",1, HH2,"")
TEST_RR("pkhtb r14,r",12, HH1,", r",10,HH2,", asr #2")
- TEST_UNSUPPORTED(".short 0xea17,0x0f0d @ tst.w r7, sp")
- TEST_UNSUPPORTED(".short 0xea17,0x0f0f @ tst.w r7, pc")
- TEST_UNSUPPORTED(".short 0xea1d,0x0f07 @ tst.w sp, r7")
- TEST_UNSUPPORTED(".short 0xea1f,0x0f07 @ tst.w pc, r7")
- TEST_UNSUPPORTED(".short 0xf01d,0x1f08 @ tst sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf01f,0x1f08 @ tst pc, #0x00080008")
-
- TEST_UNSUPPORTED(".short 0xea97,0x0f0d @ teq.w r7, sp")
- TEST_UNSUPPORTED(".short 0xea97,0x0f0f @ teq.w r7, pc")
- TEST_UNSUPPORTED(".short 0xea9d,0x0f07 @ teq.w sp, r7")
- TEST_UNSUPPORTED(".short 0xea9f,0x0f07 @ teq.w pc, r7")
- TEST_UNSUPPORTED(".short 0xf09d,0x1f08 @ tst sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf09f,0x1f08 @ tst pc, #0x00080008")
-
- TEST_UNSUPPORTED(".short 0xeb17,0x0f0d @ cmn.w r7, sp")
- TEST_UNSUPPORTED(".short 0xeb17,0x0f0f @ cmn.w r7, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea170f0d) " @ tst.w r7, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea170f0f) " @ tst.w r7, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea1d0f07) " @ tst.w sp, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea1f0f07) " @ tst.w pc, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf01d1f08) " @ tst sp, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf01f1f08) " @ tst pc, #0x00080008")
+
+ TEST_UNSUPPORTED(__inst_thumb32(0xea970f0d) " @ teq.w r7, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea970f0f) " @ teq.w r7, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea9d0f07) " @ teq.w sp, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea9f0f07) " @ teq.w pc, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf09d1f08) " @ tst sp, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf09f1f08) " @ tst pc, #0x00080008")
+
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb170f0d) " @ cmn.w r7, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb170f0f) " @ cmn.w r7, pc")
TEST_P("cmn.w sp, r",7,0,"")
- TEST_UNSUPPORTED(".short 0xeb1f,0x0f07 @ cmn.w pc, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb1f0f07) " @ cmn.w pc, r7")
TEST( "cmn sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf11f,0x1f08 @ cmn pc, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf11f1f08) " @ cmn pc, #0x00080008")
- TEST_UNSUPPORTED(".short 0xebb7,0x0f0d @ cmp.w r7, sp")
- TEST_UNSUPPORTED(".short 0xebb7,0x0f0f @ cmp.w r7, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xebb70f0d) " @ cmp.w r7, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xebb70f0f) " @ cmp.w r7, pc")
TEST_P("cmp.w sp, r",7,0,"")
- TEST_UNSUPPORTED(".short 0xebbf,0x0f07 @ cmp.w pc, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xebbf0f07) " @ cmp.w pc, r7")
TEST( "cmp sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf1bf,0x1f08 @ cmp pc, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf1bf1f08) " @ cmp pc, #0x00080008")
- TEST_UNSUPPORTED(".short 0xea5f,0x070d @ movs.w r7, sp")
- TEST_UNSUPPORTED(".short 0xea5f,0x070f @ movs.w r7, pc")
- TEST_UNSUPPORTED(".short 0xea5f,0x0d07 @ movs.w sp, r7")
- TEST_UNSUPPORTED(".short 0xea4f,0x0f07 @ mov.w pc, r7")
- TEST_UNSUPPORTED(".short 0xf04f,0x1d08 @ mov sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf04f,0x1f08 @ mov pc, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea5f070d) " @ movs.w r7, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea5f070f) " @ movs.w r7, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea5f0d07) " @ movs.w sp, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea4f0f07) " @ mov.w pc, r7")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf04f1d08) " @ mov sp, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf04f1f08) " @ mov pc, #0x00080008")
TEST_R("add.w r0, sp, r",1, 4,"")
TEST_R("adds r0, sp, r",1, 4,", asl #3")
@@ -581,15 +582,15 @@ void kprobe_thumb32_test_cases(void)
TEST_R("add r0, sp, r",1, 16,", ror #1")
TEST_R("add.w sp, sp, r",1, 4,"")
TEST_R("add sp, sp, r",1, 4,", asl #3")
- TEST_UNSUPPORTED(".short 0xeb0d,0x1d01 @ add sp, sp, r1, asl #4")
- TEST_UNSUPPORTED(".short 0xeb0d,0x0d71 @ add sp, sp, r1, ror #1")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0d1d01) " @ add sp, sp, r1, asl #4")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d71) " @ add sp, sp, r1, ror #1")
TEST( "add.w r0, sp, #24")
TEST( "add.w sp, sp, #24")
- TEST_UNSUPPORTED(".short 0xeb0d,0x0f01 @ add pc, sp, r1")
- TEST_UNSUPPORTED(".short 0xeb0d,0x000f @ add r0, sp, pc")
- TEST_UNSUPPORTED(".short 0xeb0d,0x000d @ add r0, sp, sp")
- TEST_UNSUPPORTED(".short 0xeb0d,0x0d0f @ add sp, sp, pc")
- TEST_UNSUPPORTED(".short 0xeb0d,0x0d0d @ add sp, sp, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0f01) " @ add pc, sp, r1")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0d000f) " @ add r0, sp, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0d000d) " @ add r0, sp, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d0f) " @ add sp, sp, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0d0d0d) " @ add sp, sp, sp")
TEST_R("sub.w r0, sp, r",1, 4,"")
TEST_R("subs r0, sp, r",1, 4,", asl #3")
@@ -597,54 +598,54 @@ void kprobe_thumb32_test_cases(void)
TEST_R("sub r0, sp, r",1, 16,", ror #1")
TEST_R("sub.w sp, sp, r",1, 4,"")
TEST_R("sub sp, sp, r",1, 4,", asl #3")
- TEST_UNSUPPORTED(".short 0xebad,0x1d01 @ sub sp, sp, r1, asl #4")
- TEST_UNSUPPORTED(".short 0xebad,0x0d71 @ sub sp, sp, r1, ror #1")
- TEST_UNSUPPORTED(".short 0xebad,0x0f01 @ sub pc, sp, r1")
+ TEST_UNSUPPORTED(__inst_thumb32(0xebad1d01) " @ sub sp, sp, r1, asl #4")
+ TEST_UNSUPPORTED(__inst_thumb32(0xebad0d71) " @ sub sp, sp, r1, ror #1")
+ TEST_UNSUPPORTED(__inst_thumb32(0xebad0f01) " @ sub pc, sp, r1")
TEST( "sub.w r0, sp, #24")
TEST( "sub.w sp, sp, #24")
- TEST_UNSUPPORTED(".short 0xea02,0x010f @ and r1, r2, pc")
- TEST_UNSUPPORTED(".short 0xea0f,0x0103 @ and r1, pc, r3")
- TEST_UNSUPPORTED(".short 0xea02,0x0f03 @ and pc, r2, r3")
- TEST_UNSUPPORTED(".short 0xea02,0x010d @ and r1, r2, sp")
- TEST_UNSUPPORTED(".short 0xea0d,0x0103 @ and r1, sp, r3")
- TEST_UNSUPPORTED(".short 0xea02,0x0d03 @ and sp, r2, r3")
- TEST_UNSUPPORTED(".short 0xf00d,0x1108 @ and r1, sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf00f,0x1108 @ and r1, pc, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf002,0x1d08 @ and sp, r8, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf002,0x1f08 @ and pc, r8, #0x00080008")
-
- TEST_UNSUPPORTED(".short 0xeb02,0x010f @ add r1, r2, pc")
- TEST_UNSUPPORTED(".short 0xeb0f,0x0103 @ add r1, pc, r3")
- TEST_UNSUPPORTED(".short 0xeb02,0x0f03 @ add pc, r2, r3")
- TEST_UNSUPPORTED(".short 0xeb02,0x010d @ add r1, r2, sp")
- TEST_SUPPORTED( ".short 0xeb0d,0x0103 @ add r1, sp, r3")
- TEST_UNSUPPORTED(".short 0xeb02,0x0d03 @ add sp, r2, r3")
- TEST_SUPPORTED( ".short 0xf10d,0x1108 @ add r1, sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf10d,0x1f08 @ add pc, sp, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf10f,0x1108 @ add r1, pc, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf102,0x1d08 @ add sp, r8, #0x00080008")
- TEST_UNSUPPORTED(".short 0xf102,0x1f08 @ add pc, r8, #0x00080008")
-
- TEST_UNSUPPORTED(".short 0xeaa0,0x0000")
- TEST_UNSUPPORTED(".short 0xeaf0,0x0000")
- TEST_UNSUPPORTED(".short 0xeb20,0x0000")
- TEST_UNSUPPORTED(".short 0xeb80,0x0000")
- TEST_UNSUPPORTED(".short 0xebe0,0x0000")
-
- TEST_UNSUPPORTED(".short 0xf0a0,0x0000")
- TEST_UNSUPPORTED(".short 0xf0c0,0x0000")
- TEST_UNSUPPORTED(".short 0xf0f0,0x0000")
- TEST_UNSUPPORTED(".short 0xf120,0x0000")
- TEST_UNSUPPORTED(".short 0xf180,0x0000")
- TEST_UNSUPPORTED(".short 0xf1e0,0x0000")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea02010f) " @ and r1, r2, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea0f0103) " @ and r1, pc, r3")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea020f03) " @ and pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea02010d) " @ and r1, r2, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea0d0103) " @ and r1, sp, r3")
+ TEST_UNSUPPORTED(__inst_thumb32(0xea020d03) " @ and sp, r2, r3")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf00d1108) " @ and r1, sp, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf00f1108) " @ and r1, pc, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf0021d08) " @ and sp, r8, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf0021f08) " @ and pc, r8, #0x00080008")
+
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb02010f) " @ add r1, r2, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb0f0103) " @ add r1, pc, r3")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb020f03) " @ add pc, r2, r3")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb02010d) " @ add r1, r2, sp")
+ TEST_SUPPORTED( __inst_thumb32(0xeb0d0103) " @ add r1, sp, r3")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb020d03) " @ add sp, r2, r3")
+ TEST_SUPPORTED( __inst_thumb32(0xf10d1108) " @ add r1, sp, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf10d1f08) " @ add pc, sp, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf10f1108) " @ add r1, pc, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf1021d08) " @ add sp, r8, #0x00080008")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf1021f08) " @ add pc, r8, #0x00080008")
+
+ TEST_UNSUPPORTED(__inst_thumb32(0xeaa00000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeaf00000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb200000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeb800000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xebe00000) "")
+
+ TEST_UNSUPPORTED(__inst_thumb32(0xf0a00000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf0c00000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf0f00000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf1200000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf1800000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf1e00000) "")
TEST_GROUP("Coprocessor instructions")
- TEST_UNSUPPORTED(".short 0xec00,0x0000")
- TEST_UNSUPPORTED(".short 0xeff0,0x0000")
- TEST_UNSUPPORTED(".short 0xfc00,0x0000")
- TEST_UNSUPPORTED(".short 0xfff0,0x0000")
+ TEST_UNSUPPORTED(__inst_thumb32(0xec000000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xeff00000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfc000000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfff00000) "")
TEST_GROUP("Data-processing (plain binary immediate)")
@@ -652,92 +653,92 @@ void kprobe_thumb32_test_cases(void)
TEST( "addw r14, sp, #0xf5a")
TEST( "addw sp, sp, #0x20")
TEST( "addw r7, pc, #0x888")
- TEST_UNSUPPORTED(".short 0xf20f,0x1f20 @ addw pc, pc, #0x120")
- TEST_UNSUPPORTED(".short 0xf20d,0x1f20 @ addw pc, sp, #0x120")
- TEST_UNSUPPORTED(".short 0xf20f,0x1d20 @ addw sp, pc, #0x120")
- TEST_UNSUPPORTED(".short 0xf200,0x1d20 @ addw sp, r0, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf20f1f20) " @ addw pc, pc, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf20d1f20) " @ addw pc, sp, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf20f1d20) " @ addw sp, pc, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2001d20) " @ addw sp, r0, #0x120")
TEST_R("subw r0, r",1, VAL1,", #0x123")
TEST( "subw r14, sp, #0xf5a")
TEST( "subw sp, sp, #0x20")
TEST( "subw r7, pc, #0x888")
- TEST_UNSUPPORTED(".short 0xf2af,0x1f20 @ subw pc, pc, #0x120")
- TEST_UNSUPPORTED(".short 0xf2ad,0x1f20 @ subw pc, sp, #0x120")
- TEST_UNSUPPORTED(".short 0xf2af,0x1d20 @ subw sp, pc, #0x120")
- TEST_UNSUPPORTED(".short 0xf2a0,0x1d20 @ subw sp, r0, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2af1f20) " @ subw pc, pc, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2ad1f20) " @ subw pc, sp, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2af1d20) " @ subw sp, pc, #0x120")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2a01d20) " @ subw sp, r0, #0x120")
TEST("movw r0, #0")
TEST("movw r0, #0xffff")
TEST("movw lr, #0xffff")
- TEST_UNSUPPORTED(".short 0xf240,0x0d00 @ movw sp, #0")
- TEST_UNSUPPORTED(".short 0xf240,0x0f00 @ movw pc, #0")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2400d00) " @ movw sp, #0")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2400f00) " @ movw pc, #0")
TEST_R("movt r",0, VAL1,", #0")
TEST_R("movt r",0, VAL2,", #0xffff")
TEST_R("movt r",14,VAL1,", #0xffff")
- TEST_UNSUPPORTED(".short 0xf2c0,0x0d00 @ movt sp, #0")
- TEST_UNSUPPORTED(".short 0xf2c0,0x0f00 @ movt pc, #0")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2c00d00) " @ movt sp, #0")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf2c00f00) " @ movt pc, #0")
TEST_R( "ssat r0, #24, r",0, VAL1,"")
TEST_R( "ssat r14, #24, r",12, VAL2,"")
TEST_R( "ssat r0, #24, r",0, VAL1,", lsl #8")
TEST_R( "ssat r14, #24, r",12, VAL2,", asr #8")
- TEST_UNSUPPORTED(".short 0xf30c,0x0d17 @ ssat sp, #24, r12")
- TEST_UNSUPPORTED(".short 0xf30c,0x0f17 @ ssat pc, #24, r12")
- TEST_UNSUPPORTED(".short 0xf30d,0x0c17 @ ssat r12, #24, sp")
- TEST_UNSUPPORTED(".short 0xf30f,0x0c17 @ ssat r12, #24, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf30c0d17) " @ ssat sp, #24, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf30c0f17) " @ ssat pc, #24, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf30d0c17) " @ ssat r12, #24, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf30f0c17) " @ ssat r12, #24, pc")
TEST_R( "usat r0, #24, r",0, VAL1,"")
TEST_R( "usat r14, #24, r",12, VAL2,"")
TEST_R( "usat r0, #24, r",0, VAL1,", lsl #8")
TEST_R( "usat r14, #24, r",12, VAL2,", asr #8")
- TEST_UNSUPPORTED(".short 0xf38c,0x0d17 @ usat sp, #24, r12")
- TEST_UNSUPPORTED(".short 0xf38c,0x0f17 @ usat pc, #24, r12")
- TEST_UNSUPPORTED(".short 0xf38d,0x0c17 @ usat r12, #24, sp")
- TEST_UNSUPPORTED(".short 0xf38f,0x0c17 @ usat r12, #24, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf38c0d17) " @ usat sp, #24, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf38c0f17) " @ usat pc, #24, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf38d0c17) " @ usat r12, #24, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf38f0c17) " @ usat r12, #24, pc")
TEST_R( "ssat16 r0, #12, r",0, HH1,"")
TEST_R( "ssat16 r14, #12, r",12, HH2,"")
- TEST_UNSUPPORTED(".short 0xf32c,0x0d0b @ ssat16 sp, #12, r12")
- TEST_UNSUPPORTED(".short 0xf32c,0x0f0b @ ssat16 pc, #12, r12")
- TEST_UNSUPPORTED(".short 0xf32d,0x0c0b @ ssat16 r12, #12, sp")
- TEST_UNSUPPORTED(".short 0xf32f,0x0c0b @ ssat16 r12, #12, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf32c0d0b) " @ ssat16 sp, #12, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf32c0f0b) " @ ssat16 pc, #12, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf32d0c0b) " @ ssat16 r12, #12, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf32f0c0b) " @ ssat16 r12, #12, pc")
TEST_R( "usat16 r0, #12, r",0, HH1,"")
TEST_R( "usat16 r14, #12, r",12, HH2,"")
- TEST_UNSUPPORTED(".short 0xf3ac,0x0d0b @ usat16 sp, #12, r12")
- TEST_UNSUPPORTED(".short 0xf3ac,0x0f0b @ usat16 pc, #12, r12")
- TEST_UNSUPPORTED(".short 0xf3ad,0x0c0b @ usat16 r12, #12, sp")
- TEST_UNSUPPORTED(".short 0xf3af,0x0c0b @ usat16 r12, #12, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3ac0d0b) " @ usat16 sp, #12, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3ac0f0b) " @ usat16 pc, #12, r12")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3ad0c0b) " @ usat16 r12, #12, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3af0c0b) " @ usat16 r12, #12, pc")
TEST_R( "sbfx r0, r",0 , VAL1,", #0, #31")
TEST_R( "sbfx r14, r",12, VAL2,", #8, #16")
TEST_R( "sbfx r4, r",10, VAL1,", #16, #15")
- TEST_UNSUPPORTED(".short 0xf34c,0x2d0f @ sbfx sp, r12, #8, #16")
- TEST_UNSUPPORTED(".short 0xf34c,0x2f0f @ sbfx pc, r12, #8, #16")
- TEST_UNSUPPORTED(".short 0xf34d,0x2c0f @ sbfx r12, sp, #8, #16")
- TEST_UNSUPPORTED(".short 0xf34f,0x2c0f @ sbfx r12, pc, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf34c2d0f) " @ sbfx sp, r12, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf34c2f0f) " @ sbfx pc, r12, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf34d2c0f) " @ sbfx r12, sp, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf34f2c0f) " @ sbfx r12, pc, #8, #16")
TEST_R( "ubfx r0, r",0 , VAL1,", #0, #31")
TEST_R( "ubfx r14, r",12, VAL2,", #8, #16")
TEST_R( "ubfx r4, r",10, VAL1,", #16, #15")
- TEST_UNSUPPORTED(".short 0xf3cc,0x2d0f @ ubfx sp, r12, #8, #16")
- TEST_UNSUPPORTED(".short 0xf3cc,0x2f0f @ ubfx pc, r12, #8, #16")
- TEST_UNSUPPORTED(".short 0xf3cd,0x2c0f @ ubfx r12, sp, #8, #16")
- TEST_UNSUPPORTED(".short 0xf3cf,0x2c0f @ ubfx r12, pc, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3cc2d0f) " @ ubfx sp, r12, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3cc2f0f) " @ ubfx pc, r12, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3cd2c0f) " @ ubfx r12, sp, #8, #16")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3cf2c0f) " @ ubfx r12, pc, #8, #16")
TEST_R( "bfc r",0, VAL1,", #4, #20")
TEST_R( "bfc r",14,VAL2,", #4, #20")
TEST_R( "bfc r",7, VAL1,", #0, #31")
TEST_R( "bfc r",8, VAL2,", #0, #31")
- TEST_UNSUPPORTED(".short 0xf36f,0x0d1e @ bfc sp, #0, #31")
- TEST_UNSUPPORTED(".short 0xf36f,0x0f1e @ bfc pc, #0, #31")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf36f0d1e) " @ bfc sp, #0, #31")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf36f0f1e) " @ bfc pc, #0, #31")
TEST_RR( "bfi r",0, VAL1,", r",0 , VAL2,", #0, #31")
TEST_RR( "bfi r",12,VAL1,", r",14 , VAL2,", #4, #20")
- TEST_UNSUPPORTED(".short 0xf36e,0x1d17 @ bfi sp, r14, #4, #20")
- TEST_UNSUPPORTED(".short 0xf36e,0x1f17 @ bfi pc, r14, #4, #20")
- TEST_UNSUPPORTED(".short 0xf36d,0x1e17 @ bfi r14, sp, #4, #20")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf36e1d17) " @ bfi sp, r14, #4, #20")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf36e1f17) " @ bfi pc, r14, #4, #20")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf36d1e17) " @ bfi r14, sp, #4, #20")
TEST_GROUP("Branches and miscellaneous control")
@@ -775,14 +776,14 @@ CONDITION_INSTRUCTIONS(22,
TEST("mrs r0, cpsr")
TEST("mrs r14, cpsr")
- TEST_UNSUPPORTED(".short 0xf3ef,0x8d00 @ mrs sp, spsr")
- TEST_UNSUPPORTED(".short 0xf3ef,0x8f00 @ mrs pc, spsr")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8d00) " @ mrs sp, spsr")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf3ef8f00) " @ mrs pc, spsr")
TEST_UNSUPPORTED("mrs r0, spsr")
TEST_UNSUPPORTED("mrs lr, spsr")
- TEST_UNSUPPORTED(".short 0xf7f0,0x8000 @ smc #0")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf7f08000) " @ smc #0")
- TEST_UNSUPPORTED(".short 0xf7f0,0xa000 @ undefeined")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf7f0a000) " @ undefeined")
TEST_BF( "b.w 2f")
TEST_BB( "b.w 2b")
@@ -829,15 +830,15 @@ CONDITION_INSTRUCTIONS(22,
SINGLE_STORE("")
TEST("str sp, [sp]")
- TEST_UNSUPPORTED(".short 0xf8cf,0xe000 @ str r14, [pc]")
- TEST_UNSUPPORTED(".short 0xf8ce,0xf000 @ str pc, [r14]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf8cfe000) " @ str r14, [pc]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf8cef000) " @ str pc, [r14]")
TEST_GROUP("Advanced SIMD element or structure load/store instructions")
- TEST_UNSUPPORTED(".short 0xf900,0x0000")
- TEST_UNSUPPORTED(".short 0xf92f,0xffff")
- TEST_UNSUPPORTED(".short 0xf980,0x0000")
- TEST_UNSUPPORTED(".short 0xf9ef,0xffff")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf9000000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf92fffff) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf9800000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf9efffff) "")
TEST_GROUP("Load single data item and memory hints")
@@ -881,20 +882,20 @@ CONDITION_INSTRUCTIONS(22,
TEST_SUPPORTED("ldr sp, 99f")
TEST_SUPPORTED("ldr pc, 99f")
- TEST_UNSUPPORTED(".short 0xf854,0x700d @ ldr r7, [r4, sp]")
- TEST_UNSUPPORTED(".short 0xf854,0x700f @ ldr r7, [r4, pc]")
- TEST_UNSUPPORTED(".short 0xf814,0x700d @ ldrb r7, [r4, sp]")
- TEST_UNSUPPORTED(".short 0xf814,0x700f @ ldrb r7, [r4, pc]")
- TEST_UNSUPPORTED(".short 0xf89f,0xd004 @ ldrb sp, 99f")
- TEST_UNSUPPORTED(".short 0xf814,0xd008 @ ldrb sp, [r4, r8]")
- TEST_UNSUPPORTED(".short 0xf894,0xd000 @ ldrb sp, [r4]")
-
- TEST_UNSUPPORTED(".short 0xf860,0x0000") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xf9ff,0xffff") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xf950,0x0000") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xf95f,0xffff") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xf800,0x0800") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xf97f,0xfaff") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xf854700d) " @ ldr r7, [r4, sp]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf854700f) " @ ldr r7, [r4, pc]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf814700d) " @ ldrb r7, [r4, sp]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf814700f) " @ ldrb r7, [r4, pc]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf89fd004) " @ ldrb sp, 99f")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf814d008) " @ ldrb sp, [r4, r8]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf894d000) " @ ldrb sp, [r4]")
+
+ TEST_UNSUPPORTED(__inst_thumb32(0xf8600000) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xf9ffffff) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xf9500000) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xf95fffff) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xf8000800) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xf97ffaff) "") /* Unallocated space */
TEST( "pli [pc, #4]")
TEST( "pli [pc, #-4]")
@@ -902,22 +903,22 @@ CONDITION_INSTRUCTIONS(22,
TEST( "pld [pc, #-4]")
TEST_P( "pld [r",0,-1024,", #1024]")
- TEST( ".short 0xf8b0,0xf400 @ pldw [r0, #1024]")
+ TEST( __inst_thumb32(0xf8b0f400) " @ pldw [r0, #1024]")
TEST_P( "pli [r",4, 0b,", #1024]")
TEST_P( "pld [r",7, 120,", #-120]")
- TEST( ".short 0xf837,0xfc78 @ pldw [r7, #-120]")
+ TEST( __inst_thumb32(0xf837fc78) " @ pldw [r7, #-120]")
TEST_P( "pli [r",11,120,", #-120]")
TEST( "pld [sp, #0]")
TEST_PR("pld [r",7, 24, ", r",0, 16,"]")
TEST_PR("pld [r",8, 24, ", r",12,16,", lsl #3]")
- TEST_SUPPORTED(".short 0xf837,0xf000 @ pldw [r7, r0]")
- TEST_SUPPORTED(".short 0xf838,0xf03c @ pldw [r8, r12, lsl #3]");
+ TEST_SUPPORTED(__inst_thumb32(0xf837f000) " @ pldw [r7, r0]")
+ TEST_SUPPORTED(__inst_thumb32(0xf838f03c) " @ pldw [r8, r12, lsl #3]");
TEST_RR("pli [r",12,0b,", r",0, 16,"]")
TEST_RR("pli [r",0, 0b,", r",12,16,", lsl #3]")
TEST_R( "pld [sp, r",1, 16,"]")
- TEST_UNSUPPORTED(".short 0xf817,0xf00d @pld [r7, sp]")
- TEST_UNSUPPORTED(".short 0xf817,0xf00f @pld [r7, pc]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf817f00d) " @pld [r7, sp]")
+ TEST_UNSUPPORTED(__inst_thumb32(0xf817f00f) " @pld [r7, pc]")
TEST_GROUP("Data-processing (register)")
@@ -934,21 +935,21 @@ CONDITION_INSTRUCTIONS(22,
SHIFTS32("ror")
SHIFTS32("rors")
- TEST_UNSUPPORTED(".short 0xfa01,0xff02 @ lsl pc, r1, r2")
- TEST_UNSUPPORTED(".short 0xfa01,0xfd02 @ lsl sp, r1, r2")
- TEST_UNSUPPORTED(".short 0xfa0f,0xf002 @ lsl r0, pc, r2")
- TEST_UNSUPPORTED(".short 0xfa0d,0xf002 @ lsl r0, sp, r2")
- TEST_UNSUPPORTED(".short 0xfa01,0xf00f @ lsl r0, r1, pc")
- TEST_UNSUPPORTED(".short 0xfa01,0xf00d @ lsl r0, r1, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa01ff02) " @ lsl pc, r1, r2")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa01fd02) " @ lsl sp, r1, r2")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff002) " @ lsl r0, pc, r2")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa0df002) " @ lsl r0, sp, r2")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa01f00f) " @ lsl r0, r1, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa01f00d) " @ lsl r0, r1, sp")
TEST_RR( "sxtah r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "sxtah r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "sxth r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".short 0xfa0f,0xff87 @ sxth pc, r7");
- TEST_UNSUPPORTED(".short 0xfa0f,0xfd87 @ sxth sp, r7");
- TEST_UNSUPPORTED(".short 0xfa0f,0xf88f @ sxth r8, pc");
- TEST_UNSUPPORTED(".short 0xfa0f,0xf88d @ sxth r8, sp");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa0fff87) " @ sxth pc, r7");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa0ffd87) " @ sxth sp, r7");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff88f) " @ sxth r8, pc");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa0ff88d) " @ sxth r8, sp");
TEST_RR( "uxtah r0, r",0, HH1,", r",1, HH2,"")
TEST_RR( "uxtah r14,r",12, HH2,", r",10,HH1,", ror #8")
@@ -970,8 +971,8 @@ CONDITION_INSTRUCTIONS(22,
TEST_RR( "uxtab r14,r",12, HH2,", r",10,HH1,", ror #8")
TEST_R( "uxtb r8, r",7, HH1,"")
- TEST_UNSUPPORTED(".short 0xfa60,0x00f0")
- TEST_UNSUPPORTED(".short 0xfa7f,0xffff")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa6000f0) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa7fffff) "")
#define PARALLEL_ADD_SUB(op) \
TEST_RR( op"add16 r0, r",0, HH1,", r",1, HH2,"") \
@@ -1019,10 +1020,10 @@ CONDITION_INSTRUCTIONS(22,
TEST_R("revsh.w r0, r",0, VAL1,"")
TEST_R("revsh r14, r",12, VAL2,"")
- TEST_UNSUPPORTED(".short 0xfa9c,0xff8c @ rev pc, r12");
- TEST_UNSUPPORTED(".short 0xfa9c,0xfd8c @ rev sp, r12");
- TEST_UNSUPPORTED(".short 0xfa9f,0xfe8f @ rev r14, pc");
- TEST_UNSUPPORTED(".short 0xfa9d,0xfe8d @ rev r14, sp");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa9cff8c) " @ rev pc, r12");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa9cfd8c) " @ rev sp, r12");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa9ffe8f) " @ rev r14, pc");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa9dfe8d) " @ rev r14, sp");
TEST_RR("sel r0, r",0, VAL1,", r",1, VAL2,"")
TEST_RR("sel r14, r",12,VAL1,", r",10, VAL2,"")
@@ -1031,31 +1032,31 @@ CONDITION_INSTRUCTIONS(22,
TEST_R("clz r7, r",14,0x1,"")
TEST_R("clz lr, r",7, 0xffffffff,"")
- TEST_UNSUPPORTED(".short 0xfa80,0xf030") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfaff,0xff7f") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfab0,0xf000") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfaff,0xff7f") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfa80f030) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfaffff7f) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfab0f000) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfaffff7f) "") /* Unallocated space */
TEST_GROUP("Multiply, multiply accumulate, and absolute difference operations")
TEST_RR( "mul r0, r",1, VAL1,", r",2, VAL2,"")
TEST_RR( "mul r7, r",8, VAL2,", r",9, VAL2,"")
- TEST_UNSUPPORTED(".short 0xfb08,0xff09 @ mul pc, r8, r9")
- TEST_UNSUPPORTED(".short 0xfb08,0xfd09 @ mul sp, r8, r9")
- TEST_UNSUPPORTED(".short 0xfb0f,0xf709 @ mul r7, pc, r9")
- TEST_UNSUPPORTED(".short 0xfb0d,0xf709 @ mul r7, sp, r9")
- TEST_UNSUPPORTED(".short 0xfb08,0xf70f @ mul r7, r8, pc")
- TEST_UNSUPPORTED(".short 0xfb08,0xf70d @ mul r7, r8, sp")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08ff09) " @ mul pc, r8, r9")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08fd09) " @ mul sp, r8, r9")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb0ff709) " @ mul r7, pc, r9")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb0df709) " @ mul r7, sp, r9")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08f70f) " @ mul r7, r8, pc")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08f70d) " @ mul r7, r8, sp")
TEST_RRR( "mla r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "mla r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
- TEST_UNSUPPORTED(".short 0xfb08,0xaf09 @ mla pc, r8, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb08,0xad09 @ mla sp, r8, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb0f,0xa709 @ mla r7, pc, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb0d,0xa709 @ mla r7, sp, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb08,0xa70f @ mla r7, r8, pc, r10");
- TEST_UNSUPPORTED(".short 0xfb08,0xa70d @ mla r7, r8, sp, r10");
- TEST_UNSUPPORTED(".short 0xfb08,0xd709 @ mla r7, r8, r9, sp");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08af09) " @ mla pc, r8, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08ad09) " @ mla sp, r8, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb0fa709) " @ mla r7, pc, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb0da709) " @ mla r7, sp, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08a70f) " @ mla r7, r8, pc, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08a70d) " @ mla r7, r8, sp, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb08d709) " @ mla r7, r8, r9, sp");
TEST_RRR( "mls r0, r",1, VAL1,", r",2, VAL2,", r",3, VAL3,"")
TEST_RRR( "mls r7, r",8, VAL3,", r",9, VAL1,", r",10, VAL2,"")
@@ -1123,25 +1124,25 @@ CONDITION_INSTRUCTIONS(22,
TEST_RR( "usad8 r0, r",0, VAL1,", r",1, VAL2,"")
TEST_RR( "usad8 r14, r",12,VAL2,", r",10,VAL1,"")
- TEST_UNSUPPORTED(".short 0xfb00,0xf010") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfb0f,0xff1f") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfb70,0xf010") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfb7f,0xff1f") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfb70,0x0010") /* Unallocated space */
- TEST_UNSUPPORTED(".short 0xfb7f,0xff1f") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb00f010) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb0fff1f) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb70f010) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb7fff1f) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb700010) "") /* Unallocated space */
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb7fff1f) "") /* Unallocated space */
TEST_GROUP("Long multiply, long multiply accumulate, and divide")
TEST_RR( "smull r0, r1, r",2, VAL1,", r",3, VAL2,"")
TEST_RR( "smull r7, r8, r",9, VAL2,", r",10, VAL1,"")
- TEST_UNSUPPORTED(".short 0xfb89,0xf80a @ smull pc, r8, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb89,0xd80a @ smull sp, r8, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb89,0x7f0a @ smull r7, pc, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb89,0x7d0a @ smull r7, sp, r9, r10");
- TEST_UNSUPPORTED(".short 0xfb8f,0x780a @ smull r7, r8, pc, r10");
- TEST_UNSUPPORTED(".short 0xfb8d,0x780a @ smull r7, r8, sp, r10");
- TEST_UNSUPPORTED(".short 0xfb89,0x780f @ smull r7, r8, r9, pc");
- TEST_UNSUPPORTED(".short 0xfb89,0x780d @ smull r7, r8, r9, sp");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb89f80a) " @ smull pc, r8, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb89d80a) " @ smull sp, r8, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb897f0a) " @ smull r7, pc, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb897d0a) " @ smull r7, sp, r9, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb8f780a) " @ smull r7, r8, pc, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb8d780a) " @ smull r7, r8, sp, r10");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb89780f) " @ smull r7, r8, r9, pc");
+ TEST_UNSUPPORTED(__inst_thumb32(0xfb89780d) " @ smull r7, r8, r9, sp");
TEST_RR( "umull r0, r1, r",2, VAL1,", r",3, VAL2,"")
TEST_RR( "umull r7, r8, r",9, VAL2,", r",10, VAL1,"")
@@ -1175,8 +1176,8 @@ CONDITION_INSTRUCTIONS(22,
TEST_GROUP("Coprocessor instructions")
- TEST_UNSUPPORTED(".short 0xfc00,0x0000")
- TEST_UNSUPPORTED(".short 0xffff,0xffff")
+ TEST_UNSUPPORTED(__inst_thumb32(0xfc000000) "")
+ TEST_UNSUPPORTED(__inst_thumb32(0xffffffff) "")
TEST_GROUP("Testing instructions in IT blocks")
diff --git a/arch/arm/kernel/kprobes-test.c b/arch/arm/kernel/kprobes-test.c
index 0cd63d080c7..08d731294bc 100644
--- a/arch/arm/kernel/kprobes-test.c
+++ b/arch/arm/kernel/kprobes-test.c
@@ -113,7 +113,7 @@
* @ start of inline data...
* .ascii "mov r0, r7" @ text title for test case
* .byte 0
- * .align 2
+ * .align 2, 0
*
* @ TEST_ARG_REG
* .byte ARG_TYPE_REG
@@ -201,10 +201,14 @@
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/kprobes.h>
-
+#include <linux/errno.h>
+#include <linux/stddef.h>
+#include <linux/bug.h>
#include <asm/opcodes.h>
#include "kprobes.h"
+#include "probes-arm.h"
+#include "probes-thumb.h"
#include "kprobes-test.h"
@@ -221,6 +225,7 @@ static int pre_handler_called;
static int post_handler_called;
static int jprobe_func_called;
static int kretprobe_handler_called;
+static int tests_failed;
#define FUNC_ARG1 0x12345678
#define FUNC_ARG2 0xabcdef
@@ -457,6 +462,13 @@ static int run_api_tests(long (*func)(long, long))
pr_info(" jprobe\n");
ret = test_jprobe(func);
+#if defined(CONFIG_THUMB2_KERNEL) && !defined(MODULE)
+ if (ret == -EINVAL) {
+ pr_err("FAIL: Known longtime bug with jprobe on Thumb kernels\n");
+ tests_failed = ret;
+ ret = 0;
+ }
+#endif
if (ret < 0)
return ret;
@@ -1329,7 +1341,8 @@ static void test_case_failed(const char *message)
static unsigned long next_instruction(unsigned long pc)
{
#ifdef CONFIG_THUMB2_KERNEL
- if ((pc & 1) && !is_wide_instruction(*(u16 *)(pc - 1)))
+ if ((pc & 1) &&
+ !is_wide_instruction(__mem_to_opcode_thumb16(*(u16 *)(pc - 1))))
return pc + 2;
else
#endif
@@ -1374,13 +1387,13 @@ static uintptr_t __used kprobes_test_case_start(const char *title, void *stack)
if (test_case_is_thumb) {
u16 *p = (u16 *)(test_code & ~1);
- current_instruction = p[0];
+ current_instruction = __mem_to_opcode_thumb16(p[0]);
if (is_wide_instruction(current_instruction)) {
- current_instruction <<= 16;
- current_instruction |= p[1];
+ u16 instr2 = __mem_to_opcode_thumb16(p[1]);
+ current_instruction = __opcode_thumb32_compose(current_instruction, instr2);
}
} else {
- current_instruction = *(u32 *)test_code;
+ current_instruction = __mem_to_opcode_arm(*(u32 *)test_code);
}
if (current_title[0] == '.')
@@ -1608,7 +1621,7 @@ static int __init run_all_tests(void)
goto out;
pr_info("ARM instruction simulation\n");
- ret = run_test_cases(kprobe_arm_test_cases, kprobe_decode_arm_table);
+ ret = run_test_cases(kprobe_arm_test_cases, probes_decode_arm_table);
if (ret)
goto out;
@@ -1631,13 +1644,13 @@ static int __init run_all_tests(void)
pr_info("16-bit Thumb instruction simulation\n");
ret = run_test_cases(kprobe_thumb16_test_cases,
- kprobe_decode_thumb16_table);
+ probes_decode_thumb16_table);
if (ret)
goto out;
pr_info("32-bit Thumb instruction simulation\n");
ret = run_test_cases(kprobe_thumb32_test_cases,
- kprobe_decode_thumb32_table);
+ probes_decode_thumb32_table);
if (ret)
goto out;
#endif
@@ -1667,6 +1680,8 @@ static int __init run_all_tests(void)
out:
if (ret == 0)
+ ret = tests_failed;
+ if (ret == 0)
pr_info("Finished kprobe tests OK\n");
else
pr_err("kprobe tests failed\n");
diff --git a/arch/arm/kernel/kprobes-test.h b/arch/arm/kernel/kprobes-test.h
index e28a869b1ae..eecc90a0fd9 100644
--- a/arch/arm/kernel/kprobes-test.h
+++ b/arch/arm/kernel/kprobes-test.h
@@ -115,7 +115,7 @@ struct test_arg_end {
/* multiple strings to be concatenated. */ \
".ascii "#title" \n\t" \
".byte 0 \n\t" \
- ".align 2 \n\t"
+ ".align 2, 0 \n\t"
#define TEST_ARG_REG(reg, val) \
".byte "__stringify(ARG_TYPE_REG)" \n\t" \
diff --git a/arch/arm/kernel/kprobes-thumb.c b/arch/arm/kernel/kprobes-thumb.c
index 6123daf397a..9495d7f3516 100644
--- a/arch/arm/kernel/kprobes-thumb.c
+++ b/arch/arm/kernel/kprobes-thumb.c
@@ -8,41 +8,25 @@
* published by the Free Software Foundation.
*/
+#include <linux/types.h>
#include <linux/kernel.h>
+#include <linux/ptrace.h>
#include <linux/kprobes.h>
-#include <linux/module.h>
#include "kprobes.h"
+#include "probes-thumb.h"
+/* These emulation encodings are functionally equivalent... */
+#define t32_emulate_rd8rn16rm0ra12_noflags \
+ t32_emulate_rdlo12rdhi8rn16rm0_noflags
-/*
- * True if current instruction is in an IT block.
- */
-#define in_it_block(cpsr) ((cpsr & 0x06000c00) != 0x00000000)
-
-/*
- * Return the condition code to check for the currently executing instruction.
- * This is in ITSTATE<7:4> which is in CPSR<15:12> but is only valid if
- * in_it_block returns true.
- */
-#define current_cond(cpsr) ((cpsr >> 12) & 0xf)
-
-/*
- * Return the PC value for a probe in thumb code.
- * This is the address of the probed instruction plus 4.
- * We subtract one because the address will have bit zero set to indicate
- * a pointer to thumb code.
- */
-static inline unsigned long __kprobes thumb_probe_pc(struct kprobe *p)
-{
- return (unsigned long)p->addr - 1 + 4;
-}
+/* t32 thumb actions */
static void __kprobes
-t32_simulate_table_branch(struct kprobe *p, struct pt_regs *regs)
+t32_simulate_table_branch(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -59,19 +43,19 @@ t32_simulate_table_branch(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t32_simulate_mrs(struct kprobe *p, struct pt_regs *regs)
+t32_simulate_mrs(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rd = (insn >> 8) & 0xf;
unsigned long mask = 0xf8ff03df; /* Mask out execution state */
regs->uregs[rd] = regs->ARM_cpsr & mask;
}
static void __kprobes
-t32_simulate_cond_branch(struct kprobe *p, struct pt_regs *regs)
+t32_simulate_cond_branch(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc;
long offset = insn & 0x7ff; /* imm11 */
offset += (insn & 0x003f0000) >> 5; /* imm6 */
@@ -82,20 +66,21 @@ t32_simulate_cond_branch(struct kprobe *p, struct pt_regs *regs)
regs->ARM_pc = pc + (offset * 2);
}
-static enum kprobe_insn __kprobes
-t32_decode_cond_branch(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+static enum probes_insn __kprobes
+t32_decode_cond_branch(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
{
int cc = (insn >> 22) & 0xf;
- asi->insn_check_cc = kprobe_condition_checks[cc];
+ asi->insn_check_cc = probes_condition_checks[cc];
asi->insn_handler = t32_simulate_cond_branch;
return INSN_GOOD_NO_SLOT;
}
static void __kprobes
-t32_simulate_branch(struct kprobe *p, struct pt_regs *regs)
+t32_simulate_branch(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc;
long offset = insn & 0x7ff; /* imm11 */
offset += (insn & 0x03ff0000) >> 5; /* imm10 */
@@ -108,7 +93,7 @@ t32_simulate_branch(struct kprobe *p, struct pt_regs *regs)
if (insn & (1 << 14)) {
/* BL or BLX */
- regs->ARM_lr = (unsigned long)p->addr + 4;
+ regs->ARM_lr = regs->ARM_pc | 1;
if (!(insn & (1 << 12))) {
/* BLX so switch to ARM mode */
regs->ARM_cpsr &= ~PSR_T_BIT;
@@ -120,10 +105,10 @@ t32_simulate_branch(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t32_simulate_ldr_literal(struct kprobe *p, struct pt_regs *regs)
+t32_simulate_ldr_literal(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long addr = thumb_probe_pc(p) & ~3;
+ unsigned long addr = regs->ARM_pc & ~3;
int rt = (insn >> 12) & 0xf;
unsigned long rtv;
@@ -157,24 +142,25 @@ t32_simulate_ldr_literal(struct kprobe *p, struct pt_regs *regs)
regs->uregs[rt] = rtv;
}
-static enum kprobe_insn __kprobes
-t32_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+static enum probes_insn __kprobes
+t32_decode_ldmstm(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
{
- enum kprobe_insn ret = kprobe_decode_ldmstm(insn, asi);
+ enum probes_insn ret = kprobe_decode_ldmstm(insn, asi, d);
/* Fixup modified instruction to have halfwords in correct order...*/
- insn = asi->insn[0];
- ((u16 *)asi->insn)[0] = insn >> 16;
- ((u16 *)asi->insn)[1] = insn & 0xffff;
+ insn = __mem_to_opcode_arm(asi->insn[0]);
+ ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(insn >> 16);
+ ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0xffff);
return ret;
}
static void __kprobes
-t32_emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
+t32_emulate_ldrdstrd(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p) & ~3;
+ unsigned long pc = regs->ARM_pc & ~3;
int rt1 = (insn >> 12) & 0xf;
int rt2 = (insn >> 8) & 0xf;
int rn = (insn >> 16) & 0xf;
@@ -187,7 +173,7 @@ t32_emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
__asm__ __volatile__ (
"blx %[fn]"
: "=r" (rt1v), "=r" (rt2v), "=r" (rnv)
- : "0" (rt1v), "1" (rt2v), "2" (rnv), [fn] "r" (p->ainsn.insn_fn)
+ : "0" (rt1v), "1" (rt2v), "2" (rnv), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -198,9 +184,9 @@ t32_emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t32_emulate_ldrstr(struct kprobe *p, struct pt_regs *regs)
+t32_emulate_ldrstr(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rt = (insn >> 12) & 0xf;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -212,7 +198,7 @@ t32_emulate_ldrstr(struct kprobe *p, struct pt_regs *regs)
__asm__ __volatile__ (
"blx %[fn]"
: "=r" (rtv), "=r" (rnv)
- : "0" (rtv), "1" (rnv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "0" (rtv), "1" (rnv), "r" (rmv), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -224,9 +210,9 @@ t32_emulate_ldrstr(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t32_emulate_rd8rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
+t32_emulate_rd8rn16rm0_rwflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rd = (insn >> 8) & 0xf;
int rn = (insn >> 16) & 0xf;
int rm = insn & 0xf;
@@ -242,7 +228,7 @@ t32_emulate_rd8rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
"mrs %[cpsr], cpsr \n\t"
: "=r" (rdv), [cpsr] "=r" (cpsr)
: "0" (rdv), "r" (rnv), "r" (rmv),
- "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ "1" (cpsr), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -251,10 +237,10 @@ t32_emulate_rd8rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t32_emulate_rd8pc16_noflags(struct kprobe *p, struct pt_regs *regs)
+t32_emulate_rd8pc16_noflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc;
int rd = (insn >> 8) & 0xf;
register unsigned long rdv asm("r1") = regs->uregs[rd];
@@ -263,7 +249,7 @@ t32_emulate_rd8pc16_noflags(struct kprobe *p, struct pt_regs *regs)
__asm__ __volatile__ (
"blx %[fn]"
: "=r" (rdv)
- : "0" (rdv), "r" (rnv), [fn] "r" (p->ainsn.insn_fn)
+ : "0" (rdv), "r" (rnv), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -271,9 +257,9 @@ t32_emulate_rd8pc16_noflags(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t32_emulate_rd8rn16_noflags(struct kprobe *p, struct pt_regs *regs)
+t32_emulate_rd8rn16_noflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rd = (insn >> 8) & 0xf;
int rn = (insn >> 16) & 0xf;
@@ -283,7 +269,7 @@ t32_emulate_rd8rn16_noflags(struct kprobe *p, struct pt_regs *regs)
__asm__ __volatile__ (
"blx %[fn]"
: "=r" (rdv)
- : "0" (rdv), "r" (rnv), [fn] "r" (p->ainsn.insn_fn)
+ : "0" (rdv), "r" (rnv), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -291,9 +277,10 @@ t32_emulate_rd8rn16_noflags(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t32_emulate_rdlo12rdhi8rn16rm0_noflags(struct kprobe *p, struct pt_regs *regs)
+t32_emulate_rdlo12rdhi8rn16rm0_noflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rdlo = (insn >> 12) & 0xf;
int rdhi = (insn >> 8) & 0xf;
int rn = (insn >> 16) & 0xf;
@@ -308,674 +295,43 @@ t32_emulate_rdlo12rdhi8rn16rm0_noflags(struct kprobe *p, struct pt_regs *regs)
"blx %[fn]"
: "=r" (rdlov), "=r" (rdhiv)
: "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv),
- [fn] "r" (p->ainsn.insn_fn)
+ [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
regs->uregs[rdlo] = rdlov;
regs->uregs[rdhi] = rdhiv;
}
-
-/* These emulation encodings are functionally equivalent... */
-#define t32_emulate_rd8rn16rm0ra12_noflags \
- t32_emulate_rdlo12rdhi8rn16rm0_noflags
-
-static const union decode_item t32_table_1110_100x_x0xx[] = {
- /* Load/store multiple instructions */
-
- /* Rn is PC 1110 100x x0xx 1111 xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfe4f0000, 0xe80f0000),
-
- /* SRS 1110 1000 00x0 xxxx xxxx xxxx xxxx xxxx */
- /* RFE 1110 1000 00x1 xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xffc00000, 0xe8000000),
- /* SRS 1110 1001 10x0 xxxx xxxx xxxx xxxx xxxx */
- /* RFE 1110 1001 10x1 xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xffc00000, 0xe9800000),
-
- /* STM Rn, {...pc} 1110 100x x0x0 xxxx 1xxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfe508000, 0xe8008000),
- /* LDM Rn, {...lr,pc} 1110 100x x0x1 xxxx 11xx xxxx xxxx xxxx */
- DECODE_REJECT (0xfe50c000, 0xe810c000),
- /* LDM/STM Rn, {...sp} 1110 100x x0xx xxxx xx1x xxxx xxxx xxxx */
- DECODE_REJECT (0xfe402000, 0xe8002000),
-
- /* STMIA 1110 1000 10x0 xxxx xxxx xxxx xxxx xxxx */
- /* LDMIA 1110 1000 10x1 xxxx xxxx xxxx xxxx xxxx */
- /* STMDB 1110 1001 00x0 xxxx xxxx xxxx xxxx xxxx */
- /* LDMDB 1110 1001 00x1 xxxx xxxx xxxx xxxx xxxx */
- DECODE_CUSTOM (0xfe400000, 0xe8000000, t32_decode_ldmstm),
-
- DECODE_END
-};
-
-static const union decode_item t32_table_1110_100x_x1xx[] = {
- /* Load/store dual, load/store exclusive, table branch */
-
- /* STRD (immediate) 1110 1000 x110 xxxx xxxx xxxx xxxx xxxx */
- /* LDRD (immediate) 1110 1000 x111 xxxx xxxx xxxx xxxx xxxx */
- DECODE_OR (0xff600000, 0xe8600000),
- /* STRD (immediate) 1110 1001 x1x0 xxxx xxxx xxxx xxxx xxxx */
- /* LDRD (immediate) 1110 1001 x1x1 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xff400000, 0xe9400000, t32_emulate_ldrdstrd,
- REGS(NOPCWB, NOSPPC, NOSPPC, 0, 0)),
-
- /* TBB 1110 1000 1101 xxxx xxxx xxxx 0000 xxxx */
- /* TBH 1110 1000 1101 xxxx xxxx xxxx 0001 xxxx */
- DECODE_SIMULATEX(0xfff000e0, 0xe8d00000, t32_simulate_table_branch,
- REGS(NOSP, 0, 0, 0, NOSPPC)),
-
- /* STREX 1110 1000 0100 xxxx xxxx xxxx xxxx xxxx */
- /* LDREX 1110 1000 0101 xxxx xxxx xxxx xxxx xxxx */
- /* STREXB 1110 1000 1100 xxxx xxxx xxxx 0100 xxxx */
- /* STREXH 1110 1000 1100 xxxx xxxx xxxx 0101 xxxx */
- /* STREXD 1110 1000 1100 xxxx xxxx xxxx 0111 xxxx */
- /* LDREXB 1110 1000 1101 xxxx xxxx xxxx 0100 xxxx */
- /* LDREXH 1110 1000 1101 xxxx xxxx xxxx 0101 xxxx */
- /* LDREXD 1110 1000 1101 xxxx xxxx xxxx 0111 xxxx */
- /* And unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item t32_table_1110_101x[] = {
- /* Data-processing (shifted register) */
-
- /* TST 1110 1010 0001 xxxx xxxx 1111 xxxx xxxx */
- /* TEQ 1110 1010 1001 xxxx xxxx 1111 xxxx xxxx */
- DECODE_EMULATEX (0xff700f00, 0xea100f00, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOSPPC, 0, 0, 0, NOSPPC)),
-
- /* CMN 1110 1011 0001 xxxx xxxx 1111 xxxx xxxx */
- DECODE_OR (0xfff00f00, 0xeb100f00),
- /* CMP 1110 1011 1011 xxxx xxxx 1111 xxxx xxxx */
- DECODE_EMULATEX (0xfff00f00, 0xebb00f00, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOPC, 0, 0, 0, NOSPPC)),
-
- /* MOV 1110 1010 010x 1111 xxxx xxxx xxxx xxxx */
- /* MVN 1110 1010 011x 1111 xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xffcf0000, 0xea4f0000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(0, 0, NOSPPC, 0, NOSPPC)),
-
- /* ??? 1110 1010 101x xxxx xxxx xxxx xxxx xxxx */
- /* ??? 1110 1010 111x xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xffa00000, 0xeaa00000),
- /* ??? 1110 1011 001x xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xffe00000, 0xeb200000),
- /* ??? 1110 1011 100x xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xffe00000, 0xeb800000),
- /* ??? 1110 1011 111x xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xffe00000, 0xebe00000),
-
- /* ADD/SUB SP, SP, Rm, LSL #0..3 */
- /* 1110 1011 x0xx 1101 x000 1101 xx00 xxxx */
- DECODE_EMULATEX (0xff4f7f30, 0xeb0d0d00, t32_emulate_rd8rn16rm0_rwflags,
- REGS(SP, 0, SP, 0, NOSPPC)),
-
- /* ADD/SUB SP, SP, Rm, shift */
- /* 1110 1011 x0xx 1101 xxxx 1101 xxxx xxxx */
- DECODE_REJECT (0xff4f0f00, 0xeb0d0d00),
-
- /* ADD/SUB Rd, SP, Rm, shift */
- /* 1110 1011 x0xx 1101 xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xff4f0000, 0xeb0d0000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(SP, 0, NOPC, 0, NOSPPC)),
-
- /* AND 1110 1010 000x xxxx xxxx xxxx xxxx xxxx */
- /* BIC 1110 1010 001x xxxx xxxx xxxx xxxx xxxx */
- /* ORR 1110 1010 010x xxxx xxxx xxxx xxxx xxxx */
- /* ORN 1110 1010 011x xxxx xxxx xxxx xxxx xxxx */
- /* EOR 1110 1010 100x xxxx xxxx xxxx xxxx xxxx */
- /* PKH 1110 1010 110x xxxx xxxx xxxx xxxx xxxx */
- /* ADD 1110 1011 000x xxxx xxxx xxxx xxxx xxxx */
- /* ADC 1110 1011 010x xxxx xxxx xxxx xxxx xxxx */
- /* SBC 1110 1011 011x xxxx xxxx xxxx xxxx xxxx */
- /* SUB 1110 1011 101x xxxx xxxx xxxx xxxx xxxx */
- /* RSB 1110 1011 110x xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfe000000, 0xea000000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
-
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_0x0x___0[] = {
- /* Data-processing (modified immediate) */
-
- /* TST 1111 0x00 0001 xxxx 0xxx 1111 xxxx xxxx */
- /* TEQ 1111 0x00 1001 xxxx 0xxx 1111 xxxx xxxx */
- DECODE_EMULATEX (0xfb708f00, 0xf0100f00, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOSPPC, 0, 0, 0, 0)),
-
- /* CMN 1111 0x01 0001 xxxx 0xxx 1111 xxxx xxxx */
- DECODE_OR (0xfbf08f00, 0xf1100f00),
- /* CMP 1111 0x01 1011 xxxx 0xxx 1111 xxxx xxxx */
- DECODE_EMULATEX (0xfbf08f00, 0xf1b00f00, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOPC, 0, 0, 0, 0)),
-
- /* MOV 1111 0x00 010x 1111 0xxx xxxx xxxx xxxx */
- /* MVN 1111 0x00 011x 1111 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfbcf8000, 0xf04f0000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(0, 0, NOSPPC, 0, 0)),
-
- /* ??? 1111 0x00 101x xxxx 0xxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfbe08000, 0xf0a00000),
- /* ??? 1111 0x00 110x xxxx 0xxx xxxx xxxx xxxx */
- /* ??? 1111 0x00 111x xxxx 0xxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfbc08000, 0xf0c00000),
- /* ??? 1111 0x01 001x xxxx 0xxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfbe08000, 0xf1200000),
- /* ??? 1111 0x01 100x xxxx 0xxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfbe08000, 0xf1800000),
- /* ??? 1111 0x01 111x xxxx 0xxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfbe08000, 0xf1e00000),
-
- /* ADD Rd, SP, #imm 1111 0x01 000x 1101 0xxx xxxx xxxx xxxx */
- /* SUB Rd, SP, #imm 1111 0x01 101x 1101 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfb4f8000, 0xf10d0000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(SP, 0, NOPC, 0, 0)),
-
- /* AND 1111 0x00 000x xxxx 0xxx xxxx xxxx xxxx */
- /* BIC 1111 0x00 001x xxxx 0xxx xxxx xxxx xxxx */
- /* ORR 1111 0x00 010x xxxx 0xxx xxxx xxxx xxxx */
- /* ORN 1111 0x00 011x xxxx 0xxx xxxx xxxx xxxx */
- /* EOR 1111 0x00 100x xxxx 0xxx xxxx xxxx xxxx */
- /* ADD 1111 0x01 000x xxxx 0xxx xxxx xxxx xxxx */
- /* ADC 1111 0x01 010x xxxx 0xxx xxxx xxxx xxxx */
- /* SBC 1111 0x01 011x xxxx 0xxx xxxx xxxx xxxx */
- /* SUB 1111 0x01 101x xxxx 0xxx xxxx xxxx xxxx */
- /* RSB 1111 0x01 110x xxxx 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfa008000, 0xf0000000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOSPPC, 0, NOSPPC, 0, 0)),
-
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_0x1x___0[] = {
- /* Data-processing (plain binary immediate) */
-
- /* ADDW Rd, PC, #imm 1111 0x10 0000 1111 0xxx xxxx xxxx xxxx */
- DECODE_OR (0xfbff8000, 0xf20f0000),
- /* SUBW Rd, PC, #imm 1111 0x10 1010 1111 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfbff8000, 0xf2af0000, t32_emulate_rd8pc16_noflags,
- REGS(PC, 0, NOSPPC, 0, 0)),
-
- /* ADDW SP, SP, #imm 1111 0x10 0000 1101 0xxx 1101 xxxx xxxx */
- DECODE_OR (0xfbff8f00, 0xf20d0d00),
- /* SUBW SP, SP, #imm 1111 0x10 1010 1101 0xxx 1101 xxxx xxxx */
- DECODE_EMULATEX (0xfbff8f00, 0xf2ad0d00, t32_emulate_rd8rn16_noflags,
- REGS(SP, 0, SP, 0, 0)),
-
- /* ADDW 1111 0x10 0000 xxxx 0xxx xxxx xxxx xxxx */
- DECODE_OR (0xfbf08000, 0xf2000000),
- /* SUBW 1111 0x10 1010 xxxx 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfbf08000, 0xf2a00000, t32_emulate_rd8rn16_noflags,
- REGS(NOPCX, 0, NOSPPC, 0, 0)),
-
- /* MOVW 1111 0x10 0100 xxxx 0xxx xxxx xxxx xxxx */
- /* MOVT 1111 0x10 1100 xxxx 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfb708000, 0xf2400000, t32_emulate_rd8rn16_noflags,
- REGS(0, 0, NOSPPC, 0, 0)),
-
- /* SSAT16 1111 0x11 0010 xxxx 0000 xxxx 00xx xxxx */
- /* SSAT 1111 0x11 00x0 xxxx 0xxx xxxx xxxx xxxx */
- /* USAT16 1111 0x11 1010 xxxx 0000 xxxx 00xx xxxx */
- /* USAT 1111 0x11 10x0 xxxx 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfb508000, 0xf3000000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOSPPC, 0, NOSPPC, 0, 0)),
-
- /* SFBX 1111 0x11 0100 xxxx 0xxx xxxx xxxx xxxx */
- /* UFBX 1111 0x11 1100 xxxx 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfb708000, 0xf3400000, t32_emulate_rd8rn16_noflags,
- REGS(NOSPPC, 0, NOSPPC, 0, 0)),
-
- /* BFC 1111 0x11 0110 1111 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfbff8000, 0xf36f0000, t32_emulate_rd8rn16_noflags,
- REGS(0, 0, NOSPPC, 0, 0)),
-
- /* BFI 1111 0x11 0110 xxxx 0xxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfbf08000, 0xf3600000, t32_emulate_rd8rn16_noflags,
- REGS(NOSPPCX, 0, NOSPPC, 0, 0)),
-
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_0xxx___1[] = {
- /* Branches and miscellaneous control */
-
- /* YIELD 1111 0011 1010 xxxx 10x0 x000 0000 0001 */
- DECODE_OR (0xfff0d7ff, 0xf3a08001),
- /* SEV 1111 0011 1010 xxxx 10x0 x000 0000 0100 */
- DECODE_EMULATE (0xfff0d7ff, 0xf3a08004, kprobe_emulate_none),
- /* NOP 1111 0011 1010 xxxx 10x0 x000 0000 0000 */
- /* WFE 1111 0011 1010 xxxx 10x0 x000 0000 0010 */
- /* WFI 1111 0011 1010 xxxx 10x0 x000 0000 0011 */
- DECODE_SIMULATE (0xfff0d7fc, 0xf3a08000, kprobe_simulate_nop),
-
- /* MRS Rd, CPSR 1111 0011 1110 xxxx 10x0 xxxx xxxx xxxx */
- DECODE_SIMULATEX(0xfff0d000, 0xf3e08000, t32_simulate_mrs,
- REGS(0, 0, NOSPPC, 0, 0)),
-
- /*
- * Unsupported instructions
- * 1111 0x11 1xxx xxxx 10x0 xxxx xxxx xxxx
- *
- * MSR 1111 0011 100x xxxx 10x0 xxxx xxxx xxxx
- * DBG hint 1111 0011 1010 xxxx 10x0 x000 1111 xxxx
- * Unallocated hints 1111 0011 1010 xxxx 10x0 x000 xxxx xxxx
- * CPS 1111 0011 1010 xxxx 10x0 xxxx xxxx xxxx
- * CLREX/DSB/DMB/ISB 1111 0011 1011 xxxx 10x0 xxxx xxxx xxxx
- * BXJ 1111 0011 1100 xxxx 10x0 xxxx xxxx xxxx
- * SUBS PC,LR,#<imm8> 1111 0011 1101 xxxx 10x0 xxxx xxxx xxxx
- * MRS Rd, SPSR 1111 0011 1111 xxxx 10x0 xxxx xxxx xxxx
- * SMC 1111 0111 1111 xxxx 1000 xxxx xxxx xxxx
- * UNDEFINED 1111 0111 1111 xxxx 1010 xxxx xxxx xxxx
- * ??? 1111 0111 1xxx xxxx 1010 xxxx xxxx xxxx
- */
- DECODE_REJECT (0xfb80d000, 0xf3808000),
-
- /* Bcc 1111 0xxx xxxx xxxx 10x0 xxxx xxxx xxxx */
- DECODE_CUSTOM (0xf800d000, 0xf0008000, t32_decode_cond_branch),
-
- /* BLX 1111 0xxx xxxx xxxx 11x0 xxxx xxxx xxx0 */
- DECODE_OR (0xf800d001, 0xf000c000),
- /* B 1111 0xxx xxxx xxxx 10x1 xxxx xxxx xxxx */
- /* BL 1111 0xxx xxxx xxxx 11x1 xxxx xxxx xxxx */
- DECODE_SIMULATE (0xf8009000, 0xf0009000, t32_simulate_branch),
-
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_100x_x0x1__1111[] = {
- /* Memory hints */
-
- /* PLD (literal) 1111 1000 x001 1111 1111 xxxx xxxx xxxx */
- /* PLI (literal) 1111 1001 x001 1111 1111 xxxx xxxx xxxx */
- DECODE_SIMULATE (0xfe7ff000, 0xf81ff000, kprobe_simulate_nop),
-
- /* PLD{W} (immediate) 1111 1000 10x1 xxxx 1111 xxxx xxxx xxxx */
- DECODE_OR (0xffd0f000, 0xf890f000),
- /* PLD{W} (immediate) 1111 1000 00x1 xxxx 1111 1100 xxxx xxxx */
- DECODE_OR (0xffd0ff00, 0xf810fc00),
- /* PLI (immediate) 1111 1001 1001 xxxx 1111 xxxx xxxx xxxx */
- DECODE_OR (0xfff0f000, 0xf990f000),
- /* PLI (immediate) 1111 1001 0001 xxxx 1111 1100 xxxx xxxx */
- DECODE_SIMULATEX(0xfff0ff00, 0xf910fc00, kprobe_simulate_nop,
- REGS(NOPCX, 0, 0, 0, 0)),
-
- /* PLD{W} (register) 1111 1000 00x1 xxxx 1111 0000 00xx xxxx */
- DECODE_OR (0xffd0ffc0, 0xf810f000),
- /* PLI (register) 1111 1001 0001 xxxx 1111 0000 00xx xxxx */
- DECODE_SIMULATEX(0xfff0ffc0, 0xf910f000, kprobe_simulate_nop,
- REGS(NOPCX, 0, 0, 0, NOSPPC)),
-
- /* Other unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_100x[] = {
- /* Store/Load single data item */
-
- /* ??? 1111 100x x11x xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfe600000, 0xf8600000),
-
- /* ??? 1111 1001 0101 xxxx xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xfff00000, 0xf9500000),
-
- /* ??? 1111 100x 0xxx xxxx xxxx 10x0 xxxx xxxx */
- DECODE_REJECT (0xfe800d00, 0xf8000800),
-
- /* STRBT 1111 1000 0000 xxxx xxxx 1110 xxxx xxxx */
- /* STRHT 1111 1000 0010 xxxx xxxx 1110 xxxx xxxx */
- /* STRT 1111 1000 0100 xxxx xxxx 1110 xxxx xxxx */
- /* LDRBT 1111 1000 0001 xxxx xxxx 1110 xxxx xxxx */
- /* LDRSBT 1111 1001 0001 xxxx xxxx 1110 xxxx xxxx */
- /* LDRHT 1111 1000 0011 xxxx xxxx 1110 xxxx xxxx */
- /* LDRSHT 1111 1001 0011 xxxx xxxx 1110 xxxx xxxx */
- /* LDRT 1111 1000 0101 xxxx xxxx 1110 xxxx xxxx */
- DECODE_REJECT (0xfe800f00, 0xf8000e00),
-
- /* STR{,B,H} Rn,[PC...] 1111 1000 xxx0 1111 xxxx xxxx xxxx xxxx */
- DECODE_REJECT (0xff1f0000, 0xf80f0000),
-
- /* STR{,B,H} PC,[Rn...] 1111 1000 xxx0 xxxx 1111 xxxx xxxx xxxx */
- DECODE_REJECT (0xff10f000, 0xf800f000),
-
- /* LDR (literal) 1111 1000 x101 1111 xxxx xxxx xxxx xxxx */
- DECODE_SIMULATEX(0xff7f0000, 0xf85f0000, t32_simulate_ldr_literal,
- REGS(PC, ANY, 0, 0, 0)),
-
- /* STR (immediate) 1111 1000 0100 xxxx xxxx 1xxx xxxx xxxx */
- /* LDR (immediate) 1111 1000 0101 xxxx xxxx 1xxx xxxx xxxx */
- DECODE_OR (0xffe00800, 0xf8400800),
- /* STR (immediate) 1111 1000 1100 xxxx xxxx xxxx xxxx xxxx */
- /* LDR (immediate) 1111 1000 1101 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xffe00000, 0xf8c00000, t32_emulate_ldrstr,
- REGS(NOPCX, ANY, 0, 0, 0)),
-
- /* STR (register) 1111 1000 0100 xxxx xxxx 0000 00xx xxxx */
- /* LDR (register) 1111 1000 0101 xxxx xxxx 0000 00xx xxxx */
- DECODE_EMULATEX (0xffe00fc0, 0xf8400000, t32_emulate_ldrstr,
- REGS(NOPCX, ANY, 0, 0, NOSPPC)),
-
- /* LDRB (literal) 1111 1000 x001 1111 xxxx xxxx xxxx xxxx */
- /* LDRSB (literal) 1111 1001 x001 1111 xxxx xxxx xxxx xxxx */
- /* LDRH (literal) 1111 1000 x011 1111 xxxx xxxx xxxx xxxx */
- /* LDRSH (literal) 1111 1001 x011 1111 xxxx xxxx xxxx xxxx */
- DECODE_SIMULATEX(0xfe5f0000, 0xf81f0000, t32_simulate_ldr_literal,
- REGS(PC, NOSPPCX, 0, 0, 0)),
-
- /* STRB (immediate) 1111 1000 0000 xxxx xxxx 1xxx xxxx xxxx */
- /* STRH (immediate) 1111 1000 0010 xxxx xxxx 1xxx xxxx xxxx */
- /* LDRB (immediate) 1111 1000 0001 xxxx xxxx 1xxx xxxx xxxx */
- /* LDRSB (immediate) 1111 1001 0001 xxxx xxxx 1xxx xxxx xxxx */
- /* LDRH (immediate) 1111 1000 0011 xxxx xxxx 1xxx xxxx xxxx */
- /* LDRSH (immediate) 1111 1001 0011 xxxx xxxx 1xxx xxxx xxxx */
- DECODE_OR (0xfec00800, 0xf8000800),
- /* STRB (immediate) 1111 1000 1000 xxxx xxxx xxxx xxxx xxxx */
- /* STRH (immediate) 1111 1000 1010 xxxx xxxx xxxx xxxx xxxx */
- /* LDRB (immediate) 1111 1000 1001 xxxx xxxx xxxx xxxx xxxx */
- /* LDRSB (immediate) 1111 1001 1001 xxxx xxxx xxxx xxxx xxxx */
- /* LDRH (immediate) 1111 1000 1011 xxxx xxxx xxxx xxxx xxxx */
- /* LDRSH (immediate) 1111 1001 1011 xxxx xxxx xxxx xxxx xxxx */
- DECODE_EMULATEX (0xfec00000, 0xf8800000, t32_emulate_ldrstr,
- REGS(NOPCX, NOSPPCX, 0, 0, 0)),
-
- /* STRB (register) 1111 1000 0000 xxxx xxxx 0000 00xx xxxx */
- /* STRH (register) 1111 1000 0010 xxxx xxxx 0000 00xx xxxx */
- /* LDRB (register) 1111 1000 0001 xxxx xxxx 0000 00xx xxxx */
- /* LDRSB (register) 1111 1001 0001 xxxx xxxx 0000 00xx xxxx */
- /* LDRH (register) 1111 1000 0011 xxxx xxxx 0000 00xx xxxx */
- /* LDRSH (register) 1111 1001 0011 xxxx xxxx 0000 00xx xxxx */
- DECODE_EMULATEX (0xfe800fc0, 0xf8000000, t32_emulate_ldrstr,
- REGS(NOPCX, NOSPPCX, 0, 0, NOSPPC)),
-
- /* Other unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_1010___1111[] = {
- /* Data-processing (register) */
-
- /* ??? 1111 1010 011x xxxx 1111 xxxx 1xxx xxxx */
- DECODE_REJECT (0xffe0f080, 0xfa60f080),
-
- /* SXTH 1111 1010 0000 1111 1111 xxxx 1xxx xxxx */
- /* UXTH 1111 1010 0001 1111 1111 xxxx 1xxx xxxx */
- /* SXTB16 1111 1010 0010 1111 1111 xxxx 1xxx xxxx */
- /* UXTB16 1111 1010 0011 1111 1111 xxxx 1xxx xxxx */
- /* SXTB 1111 1010 0100 1111 1111 xxxx 1xxx xxxx */
- /* UXTB 1111 1010 0101 1111 1111 xxxx 1xxx xxxx */
- DECODE_EMULATEX (0xff8ff080, 0xfa0ff080, t32_emulate_rd8rn16rm0_rwflags,
- REGS(0, 0, NOSPPC, 0, NOSPPC)),
-
-
- /* ??? 1111 1010 1xxx xxxx 1111 xxxx 0x11 xxxx */
- DECODE_REJECT (0xff80f0b0, 0xfa80f030),
- /* ??? 1111 1010 1x11 xxxx 1111 xxxx 0xxx xxxx */
- DECODE_REJECT (0xffb0f080, 0xfab0f000),
-
- /* SADD16 1111 1010 1001 xxxx 1111 xxxx 0000 xxxx */
- /* SASX 1111 1010 1010 xxxx 1111 xxxx 0000 xxxx */
- /* SSAX 1111 1010 1110 xxxx 1111 xxxx 0000 xxxx */
- /* SSUB16 1111 1010 1101 xxxx 1111 xxxx 0000 xxxx */
- /* SADD8 1111 1010 1000 xxxx 1111 xxxx 0000 xxxx */
- /* SSUB8 1111 1010 1100 xxxx 1111 xxxx 0000 xxxx */
-
- /* QADD16 1111 1010 1001 xxxx 1111 xxxx 0001 xxxx */
- /* QASX 1111 1010 1010 xxxx 1111 xxxx 0001 xxxx */
- /* QSAX 1111 1010 1110 xxxx 1111 xxxx 0001 xxxx */
- /* QSUB16 1111 1010 1101 xxxx 1111 xxxx 0001 xxxx */
- /* QADD8 1111 1010 1000 xxxx 1111 xxxx 0001 xxxx */
- /* QSUB8 1111 1010 1100 xxxx 1111 xxxx 0001 xxxx */
-
- /* SHADD16 1111 1010 1001 xxxx 1111 xxxx 0010 xxxx */
- /* SHASX 1111 1010 1010 xxxx 1111 xxxx 0010 xxxx */
- /* SHSAX 1111 1010 1110 xxxx 1111 xxxx 0010 xxxx */
- /* SHSUB16 1111 1010 1101 xxxx 1111 xxxx 0010 xxxx */
- /* SHADD8 1111 1010 1000 xxxx 1111 xxxx 0010 xxxx */
- /* SHSUB8 1111 1010 1100 xxxx 1111 xxxx 0010 xxxx */
-
- /* UADD16 1111 1010 1001 xxxx 1111 xxxx 0100 xxxx */
- /* UASX 1111 1010 1010 xxxx 1111 xxxx 0100 xxxx */
- /* USAX 1111 1010 1110 xxxx 1111 xxxx 0100 xxxx */
- /* USUB16 1111 1010 1101 xxxx 1111 xxxx 0100 xxxx */
- /* UADD8 1111 1010 1000 xxxx 1111 xxxx 0100 xxxx */
- /* USUB8 1111 1010 1100 xxxx 1111 xxxx 0100 xxxx */
-
- /* UQADD16 1111 1010 1001 xxxx 1111 xxxx 0101 xxxx */
- /* UQASX 1111 1010 1010 xxxx 1111 xxxx 0101 xxxx */
- /* UQSAX 1111 1010 1110 xxxx 1111 xxxx 0101 xxxx */
- /* UQSUB16 1111 1010 1101 xxxx 1111 xxxx 0101 xxxx */
- /* UQADD8 1111 1010 1000 xxxx 1111 xxxx 0101 xxxx */
- /* UQSUB8 1111 1010 1100 xxxx 1111 xxxx 0101 xxxx */
-
- /* UHADD16 1111 1010 1001 xxxx 1111 xxxx 0110 xxxx */
- /* UHASX 1111 1010 1010 xxxx 1111 xxxx 0110 xxxx */
- /* UHSAX 1111 1010 1110 xxxx 1111 xxxx 0110 xxxx */
- /* UHSUB16 1111 1010 1101 xxxx 1111 xxxx 0110 xxxx */
- /* UHADD8 1111 1010 1000 xxxx 1111 xxxx 0110 xxxx */
- /* UHSUB8 1111 1010 1100 xxxx 1111 xxxx 0110 xxxx */
- DECODE_OR (0xff80f080, 0xfa80f000),
-
- /* SXTAH 1111 1010 0000 xxxx 1111 xxxx 1xxx xxxx */
- /* UXTAH 1111 1010 0001 xxxx 1111 xxxx 1xxx xxxx */
- /* SXTAB16 1111 1010 0010 xxxx 1111 xxxx 1xxx xxxx */
- /* UXTAB16 1111 1010 0011 xxxx 1111 xxxx 1xxx xxxx */
- /* SXTAB 1111 1010 0100 xxxx 1111 xxxx 1xxx xxxx */
- /* UXTAB 1111 1010 0101 xxxx 1111 xxxx 1xxx xxxx */
- DECODE_OR (0xff80f080, 0xfa00f080),
-
- /* QADD 1111 1010 1000 xxxx 1111 xxxx 1000 xxxx */
- /* QDADD 1111 1010 1000 xxxx 1111 xxxx 1001 xxxx */
- /* QSUB 1111 1010 1000 xxxx 1111 xxxx 1010 xxxx */
- /* QDSUB 1111 1010 1000 xxxx 1111 xxxx 1011 xxxx */
- DECODE_OR (0xfff0f0c0, 0xfa80f080),
-
- /* SEL 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */
- DECODE_OR (0xfff0f0f0, 0xfaa0f080),
-
- /* LSL 1111 1010 000x xxxx 1111 xxxx 0000 xxxx */
- /* LSR 1111 1010 001x xxxx 1111 xxxx 0000 xxxx */
- /* ASR 1111 1010 010x xxxx 1111 xxxx 0000 xxxx */
- /* ROR 1111 1010 011x xxxx 1111 xxxx 0000 xxxx */
- DECODE_EMULATEX (0xff80f0f0, 0xfa00f000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
-
- /* CLZ 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */
- DECODE_OR (0xfff0f0f0, 0xfab0f080),
-
- /* REV 1111 1010 1001 xxxx 1111 xxxx 1000 xxxx */
- /* REV16 1111 1010 1001 xxxx 1111 xxxx 1001 xxxx */
- /* RBIT 1111 1010 1001 xxxx 1111 xxxx 1010 xxxx */
- /* REVSH 1111 1010 1001 xxxx 1111 xxxx 1011 xxxx */
- DECODE_EMULATEX (0xfff0f0c0, 0xfa90f080, t32_emulate_rd8rn16_noflags,
- REGS(NOSPPC, 0, NOSPPC, 0, SAMEAS16)),
-
- /* Other unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_1011_0[] = {
- /* Multiply, multiply accumulate, and absolute difference */
-
- /* ??? 1111 1011 0000 xxxx 1111 xxxx 0001 xxxx */
- DECODE_REJECT (0xfff0f0f0, 0xfb00f010),
- /* ??? 1111 1011 0111 xxxx 1111 xxxx 0001 xxxx */
- DECODE_REJECT (0xfff0f0f0, 0xfb70f010),
-
- /* SMULxy 1111 1011 0001 xxxx 1111 xxxx 00xx xxxx */
- DECODE_OR (0xfff0f0c0, 0xfb10f000),
- /* MUL 1111 1011 0000 xxxx 1111 xxxx 0000 xxxx */
- /* SMUAD{X} 1111 1011 0010 xxxx 1111 xxxx 000x xxxx */
- /* SMULWy 1111 1011 0011 xxxx 1111 xxxx 000x xxxx */
- /* SMUSD{X} 1111 1011 0100 xxxx 1111 xxxx 000x xxxx */
- /* SMMUL{R} 1111 1011 0101 xxxx 1111 xxxx 000x xxxx */
- /* USAD8 1111 1011 0111 xxxx 1111 xxxx 0000 xxxx */
- DECODE_EMULATEX (0xff80f0e0, 0xfb00f000, t32_emulate_rd8rn16rm0_rwflags,
- REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
-
- /* ??? 1111 1011 0111 xxxx xxxx xxxx 0001 xxxx */
- DECODE_REJECT (0xfff000f0, 0xfb700010),
-
- /* SMLAxy 1111 1011 0001 xxxx xxxx xxxx 00xx xxxx */
- DECODE_OR (0xfff000c0, 0xfb100000),
- /* MLA 1111 1011 0000 xxxx xxxx xxxx 0000 xxxx */
- /* MLS 1111 1011 0000 xxxx xxxx xxxx 0001 xxxx */
- /* SMLAD{X} 1111 1011 0010 xxxx xxxx xxxx 000x xxxx */
- /* SMLAWy 1111 1011 0011 xxxx xxxx xxxx 000x xxxx */
- /* SMLSD{X} 1111 1011 0100 xxxx xxxx xxxx 000x xxxx */
- /* SMMLA{R} 1111 1011 0101 xxxx xxxx xxxx 000x xxxx */
- /* SMMLS{R} 1111 1011 0110 xxxx xxxx xxxx 000x xxxx */
- /* USADA8 1111 1011 0111 xxxx xxxx xxxx 0000 xxxx */
- DECODE_EMULATEX (0xff8000c0, 0xfb000000, t32_emulate_rd8rn16rm0ra12_noflags,
- REGS(NOSPPC, NOSPPCX, NOSPPC, 0, NOSPPC)),
-
- /* Other unallocated instructions... */
- DECODE_END
-};
-
-static const union decode_item t32_table_1111_1011_1[] = {
- /* Long multiply, long multiply accumulate, and divide */
-
- /* UMAAL 1111 1011 1110 xxxx xxxx xxxx 0110 xxxx */
- DECODE_OR (0xfff000f0, 0xfbe00060),
- /* SMLALxy 1111 1011 1100 xxxx xxxx xxxx 10xx xxxx */
- DECODE_OR (0xfff000c0, 0xfbc00080),
- /* SMLALD{X} 1111 1011 1100 xxxx xxxx xxxx 110x xxxx */
- /* SMLSLD{X} 1111 1011 1101 xxxx xxxx xxxx 110x xxxx */
- DECODE_OR (0xffe000e0, 0xfbc000c0),
- /* SMULL 1111 1011 1000 xxxx xxxx xxxx 0000 xxxx */
- /* UMULL 1111 1011 1010 xxxx xxxx xxxx 0000 xxxx */
- /* SMLAL 1111 1011 1100 xxxx xxxx xxxx 0000 xxxx */
- /* UMLAL 1111 1011 1110 xxxx xxxx xxxx 0000 xxxx */
- DECODE_EMULATEX (0xff9000f0, 0xfb800000, t32_emulate_rdlo12rdhi8rn16rm0_noflags,
- REGS(NOSPPC, NOSPPC, NOSPPC, 0, NOSPPC)),
-
- /* SDIV 1111 1011 1001 xxxx xxxx xxxx 1111 xxxx */
- /* UDIV 1111 1011 1011 xxxx xxxx xxxx 1111 xxxx */
- /* Other unallocated instructions... */
- DECODE_END
-};
-
-const union decode_item kprobe_decode_thumb32_table[] = {
-
- /*
- * Load/store multiple instructions
- * 1110 100x x0xx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xfe400000, 0xe8000000, t32_table_1110_100x_x0xx),
-
- /*
- * Load/store dual, load/store exclusive, table branch
- * 1110 100x x1xx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xfe400000, 0xe8400000, t32_table_1110_100x_x1xx),
-
- /*
- * Data-processing (shifted register)
- * 1110 101x xxxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xfe000000, 0xea000000, t32_table_1110_101x),
-
- /*
- * Coprocessor instructions
- * 1110 11xx xxxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_REJECT (0xfc000000, 0xec000000),
-
- /*
- * Data-processing (modified immediate)
- * 1111 0x0x xxxx xxxx 0xxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xfa008000, 0xf0000000, t32_table_1111_0x0x___0),
-
- /*
- * Data-processing (plain binary immediate)
- * 1111 0x1x xxxx xxxx 0xxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xfa008000, 0xf2000000, t32_table_1111_0x1x___0),
-
- /*
- * Branches and miscellaneous control
- * 1111 0xxx xxxx xxxx 1xxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xf8008000, 0xf0008000, t32_table_1111_0xxx___1),
-
- /*
- * Advanced SIMD element or structure load/store instructions
- * 1111 1001 xxx0 xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_REJECT (0xff100000, 0xf9000000),
-
- /*
- * Memory hints
- * 1111 100x x0x1 xxxx 1111 xxxx xxxx xxxx
- */
- DECODE_TABLE (0xfe50f000, 0xf810f000, t32_table_1111_100x_x0x1__1111),
-
- /*
- * Store single data item
- * 1111 1000 xxx0 xxxx xxxx xxxx xxxx xxxx
- * Load single data items
- * 1111 100x xxx1 xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xfe000000, 0xf8000000, t32_table_1111_100x),
-
- /*
- * Data-processing (register)
- * 1111 1010 xxxx xxxx 1111 xxxx xxxx xxxx
- */
- DECODE_TABLE (0xff00f000, 0xfa00f000, t32_table_1111_1010___1111),
-
- /*
- * Multiply, multiply accumulate, and absolute difference
- * 1111 1011 0xxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xff800000, 0xfb000000, t32_table_1111_1011_0),
-
- /*
- * Long multiply, long multiply accumulate, and divide
- * 1111 1011 1xxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_TABLE (0xff800000, 0xfb800000, t32_table_1111_1011_1),
-
- /*
- * Coprocessor instructions
- * 1111 11xx xxxx xxxx xxxx xxxx xxxx xxxx
- */
- DECODE_END
-};
-#ifdef CONFIG_ARM_KPROBES_TEST_MODULE
-EXPORT_SYMBOL_GPL(kprobe_decode_thumb32_table);
-#endif
+/* t16 thumb actions */
static void __kprobes
-t16_simulate_bxblx(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_bxblx(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc + 2;
int rm = (insn >> 3) & 0xf;
unsigned long rmv = (rm == 15) ? pc : regs->uregs[rm];
if (insn & (1 << 7)) /* BLX ? */
- regs->ARM_lr = (unsigned long)p->addr + 2;
+ regs->ARM_lr = regs->ARM_pc | 1;
bx_write_pc(rmv, regs);
}
static void __kprobes
-t16_simulate_ldr_literal(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_ldr_literal(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long* base = (unsigned long *)(thumb_probe_pc(p) & ~3);
+ unsigned long *base = (unsigned long *)((regs->ARM_pc + 2) & ~3);
long index = insn & 0xff;
int rt = (insn >> 8) & 0x7;
regs->uregs[rt] = base[index];
}
static void __kprobes
-t16_simulate_ldrstr_sp_relative(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_ldrstr_sp_relative(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
unsigned long* base = (unsigned long *)regs->ARM_sp;
long index = insn & 0xff;
int rt = (insn >> 8) & 0x7;
@@ -986,20 +342,20 @@ t16_simulate_ldrstr_sp_relative(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t16_simulate_reladr(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_reladr(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
unsigned long base = (insn & 0x800) ? regs->ARM_sp
- : (thumb_probe_pc(p) & ~3);
+ : ((regs->ARM_pc + 2) & ~3);
long offset = insn & 0xff;
int rt = (insn >> 8) & 0x7;
regs->uregs[rt] = base + offset * 4;
}
static void __kprobes
-t16_simulate_add_sp_imm(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_add_sp_imm(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
long imm = insn & 0x7f;
if (insn & 0x80) /* SUB */
regs->ARM_sp -= imm * 4;
@@ -1008,21 +364,22 @@ t16_simulate_add_sp_imm(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t16_simulate_cbz(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_cbz(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
int rn = insn & 0x7;
- kprobe_opcode_t nonzero = regs->uregs[rn] ? insn : ~insn;
+ probes_opcode_t nonzero = regs->uregs[rn] ? insn : ~insn;
if (nonzero & 0x800) {
long i = insn & 0x200;
long imm5 = insn & 0xf8;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc + 2;
regs->ARM_pc = pc + (i >> 3) + (imm5 >> 2);
}
}
static void __kprobes
-t16_simulate_it(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_it(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
/*
* The 8 IT state bits are split into two parts in CPSR:
@@ -1030,7 +387,6 @@ t16_simulate_it(struct kprobe *p, struct pt_regs *regs)
* ITSTATE<7:2> are in CPSR<15:10>
* The new IT state is in the lower byte of insn.
*/
- kprobe_opcode_t insn = p->opcode;
unsigned long cpsr = regs->ARM_cpsr;
cpsr &= ~PSR_IT_MASK;
cpsr |= (insn & 0xfc) << 8;
@@ -1039,50 +395,54 @@ t16_simulate_it(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t16_singlestep_it(struct kprobe *p, struct pt_regs *regs)
+t16_singlestep_it(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
regs->ARM_pc += 2;
- t16_simulate_it(p, regs);
+ t16_simulate_it(insn, asi, regs);
}
-static enum kprobe_insn __kprobes
-t16_decode_it(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+static enum probes_insn __kprobes
+t16_decode_it(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
{
asi->insn_singlestep = t16_singlestep_it;
return INSN_GOOD_NO_SLOT;
}
static void __kprobes
-t16_simulate_cond_branch(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_cond_branch(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc + 2;
long offset = insn & 0x7f;
offset -= insn & 0x80; /* Apply sign bit */
regs->ARM_pc = pc + (offset * 2);
}
-static enum kprobe_insn __kprobes
-t16_decode_cond_branch(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+static enum probes_insn __kprobes
+t16_decode_cond_branch(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
{
int cc = (insn >> 8) & 0xf;
- asi->insn_check_cc = kprobe_condition_checks[cc];
+ asi->insn_check_cc = probes_condition_checks[cc];
asi->insn_handler = t16_simulate_cond_branch;
return INSN_GOOD_NO_SLOT;
}
static void __kprobes
-t16_simulate_branch(struct kprobe *p, struct pt_regs *regs)
+t16_simulate_branch(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc + 2;
long offset = insn & 0x3ff;
offset -= insn & 0x400; /* Apply sign bit */
regs->ARM_pc = pc + (offset * 2);
}
static unsigned long __kprobes
-t16_emulate_loregs(struct kprobe *p, struct pt_regs *regs)
+t16_emulate_loregs(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
unsigned long oldcpsr = regs->ARM_cpsr;
unsigned long newcpsr;
@@ -1095,7 +455,7 @@ t16_emulate_loregs(struct kprobe *p, struct pt_regs *regs)
"mrs %[newcpsr], cpsr \n\t"
: [newcpsr] "=r" (newcpsr)
: [oldcpsr] "r" (oldcpsr), [regs] "r" (regs),
- [fn] "r" (p->ainsn.insn_fn)
+ [fn] "r" (asi->insn_fn)
: "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
"lr", "memory", "cc"
);
@@ -1104,24 +464,26 @@ t16_emulate_loregs(struct kprobe *p, struct pt_regs *regs)
}
static void __kprobes
-t16_emulate_loregs_rwflags(struct kprobe *p, struct pt_regs *regs)
+t16_emulate_loregs_rwflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- regs->ARM_cpsr = t16_emulate_loregs(p, regs);
+ regs->ARM_cpsr = t16_emulate_loregs(insn, asi, regs);
}
static void __kprobes
-t16_emulate_loregs_noitrwflags(struct kprobe *p, struct pt_regs *regs)
+t16_emulate_loregs_noitrwflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- unsigned long cpsr = t16_emulate_loregs(p, regs);
+ unsigned long cpsr = t16_emulate_loregs(insn, asi, regs);
if (!in_it_block(cpsr))
regs->ARM_cpsr = cpsr;
}
static void __kprobes
-t16_emulate_hiregs(struct kprobe *p, struct pt_regs *regs)
+t16_emulate_hiregs(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
- kprobe_opcode_t insn = p->opcode;
- unsigned long pc = thumb_probe_pc(p);
+ unsigned long pc = regs->ARM_pc + 2;
int rdn = (insn & 0x7) | ((insn & 0x80) >> 4);
int rm = (insn >> 3) & 0xf;
@@ -1137,7 +499,7 @@ t16_emulate_hiregs(struct kprobe *p, struct pt_regs *regs)
"blx %[fn] \n\t"
"mrs %[cpsr], cpsr \n\t"
: "=r" (rdnv), [cpsr] "=r" (cpsr)
- : "0" (rdnv), "r" (rmv), "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ : "0" (rdnv), "r" (rmv), "1" (cpsr), [fn] "r" (asi->insn_fn)
: "lr", "memory", "cc"
);
@@ -1148,18 +510,20 @@ t16_emulate_hiregs(struct kprobe *p, struct pt_regs *regs)
regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
}
-static enum kprobe_insn __kprobes
-t16_decode_hiregs(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+static enum probes_insn __kprobes
+t16_decode_hiregs(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
{
insn &= ~0x00ff;
insn |= 0x001; /* Set Rdn = R1 and Rm = R0 */
- ((u16 *)asi->insn)[0] = insn;
+ ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(insn);
asi->insn_handler = t16_emulate_hiregs;
return INSN_GOOD;
}
static void __kprobes
-t16_emulate_push(struct kprobe *p, struct pt_regs *regs)
+t16_emulate_push(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
__asm__ __volatile__ (
"ldr r9, [%[regs], #13*4] \n\t"
@@ -1168,28 +532,32 @@ t16_emulate_push(struct kprobe *p, struct pt_regs *regs)
"blx %[fn] \n\t"
"str r9, [%[regs], #13*4] \n\t"
:
- : [regs] "r" (regs), [fn] "r" (p->ainsn.insn_fn)
+ : [regs] "r" (regs), [fn] "r" (asi->insn_fn)
: "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9",
"lr", "memory", "cc"
);
}
-static enum kprobe_insn __kprobes
-t16_decode_push(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+static enum probes_insn __kprobes
+t16_decode_push(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
{
/*
* To simulate a PUSH we use a Thumb-2 "STMDB R9!, {registers}"
* and call it with R9=SP and LR in the register list represented
* by R8.
*/
- ((u16 *)asi->insn)[0] = 0xe929; /* 1st half STMDB R9!,{} */
- ((u16 *)asi->insn)[1] = insn & 0x1ff; /* 2nd half (register list) */
+ /* 1st half STMDB R9!,{} */
+ ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(0xe929);
+ /* 2nd half (register list) */
+ ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0x1ff);
asi->insn_handler = t16_emulate_push;
return INSN_GOOD;
}
static void __kprobes
-t16_emulate_pop_nopc(struct kprobe *p, struct pt_regs *regs)
+t16_emulate_pop_nopc(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
__asm__ __volatile__ (
"ldr r9, [%[regs], #13*4] \n\t"
@@ -1198,14 +566,15 @@ t16_emulate_pop_nopc(struct kprobe *p, struct pt_regs *regs)
"stmia %[regs], {r0-r7} \n\t"
"str r9, [%[regs], #13*4] \n\t"
:
- : [regs] "r" (regs), [fn] "r" (p->ainsn.insn_fn)
+ : [regs] "r" (regs), [fn] "r" (asi->insn_fn)
: "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9",
"lr", "memory", "cc"
);
}
static void __kprobes
-t16_emulate_pop_pc(struct kprobe *p, struct pt_regs *regs)
+t16_emulate_pop_pc(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
{
register unsigned long pc asm("r8");
@@ -1216,7 +585,7 @@ t16_emulate_pop_pc(struct kprobe *p, struct pt_regs *regs)
"stmia %[regs], {r0-r7} \n\t"
"str r9, [%[regs], #13*4] \n\t"
: "=r" (pc)
- : [regs] "r" (regs), [fn] "r" (p->ainsn.insn_fn)
+ : [regs] "r" (regs), [fn] "r" (asi->insn_fn)
: "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9",
"lr", "memory", "cc"
);
@@ -1224,246 +593,74 @@ t16_emulate_pop_pc(struct kprobe *p, struct pt_regs *regs)
bx_write_pc(pc, regs);
}
-static enum kprobe_insn __kprobes
-t16_decode_pop(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+static enum probes_insn __kprobes
+t16_decode_pop(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
{
/*
* To simulate a POP we use a Thumb-2 "LDMDB R9!, {registers}"
* and call it with R9=SP and PC in the register list represented
* by R8.
*/
- ((u16 *)asi->insn)[0] = 0xe8b9; /* 1st half LDMIA R9!,{} */
- ((u16 *)asi->insn)[1] = insn & 0x1ff; /* 2nd half (register list) */
+ /* 1st half LDMIA R9!,{} */
+ ((u16 *)asi->insn)[0] = __opcode_to_mem_thumb16(0xe8b9);
+ /* 2nd half (register list) */
+ ((u16 *)asi->insn)[1] = __opcode_to_mem_thumb16(insn & 0x1ff);
asi->insn_handler = insn & 0x100 ? t16_emulate_pop_pc
: t16_emulate_pop_nopc;
return INSN_GOOD;
}
-static const union decode_item t16_table_1011[] = {
- /* Miscellaneous 16-bit instructions */
-
- /* ADD (SP plus immediate) 1011 0000 0xxx xxxx */
- /* SUB (SP minus immediate) 1011 0000 1xxx xxxx */
- DECODE_SIMULATE (0xff00, 0xb000, t16_simulate_add_sp_imm),
-
- /* CBZ 1011 00x1 xxxx xxxx */
- /* CBNZ 1011 10x1 xxxx xxxx */
- DECODE_SIMULATE (0xf500, 0xb100, t16_simulate_cbz),
-
- /* SXTH 1011 0010 00xx xxxx */
- /* SXTB 1011 0010 01xx xxxx */
- /* UXTH 1011 0010 10xx xxxx */
- /* UXTB 1011 0010 11xx xxxx */
- /* REV 1011 1010 00xx xxxx */
- /* REV16 1011 1010 01xx xxxx */
- /* ??? 1011 1010 10xx xxxx */
- /* REVSH 1011 1010 11xx xxxx */
- DECODE_REJECT (0xffc0, 0xba80),
- DECODE_EMULATE (0xf500, 0xb000, t16_emulate_loregs_rwflags),
-
- /* PUSH 1011 010x xxxx xxxx */
- DECODE_CUSTOM (0xfe00, 0xb400, t16_decode_push),
- /* POP 1011 110x xxxx xxxx */
- DECODE_CUSTOM (0xfe00, 0xbc00, t16_decode_pop),
-
- /*
- * If-Then, and hints
- * 1011 1111 xxxx xxxx
- */
-
- /* YIELD 1011 1111 0001 0000 */
- DECODE_OR (0xffff, 0xbf10),
- /* SEV 1011 1111 0100 0000 */
- DECODE_EMULATE (0xffff, 0xbf40, kprobe_emulate_none),
- /* NOP 1011 1111 0000 0000 */
- /* WFE 1011 1111 0010 0000 */
- /* WFI 1011 1111 0011 0000 */
- DECODE_SIMULATE (0xffcf, 0xbf00, kprobe_simulate_nop),
- /* Unassigned hints 1011 1111 xxxx 0000 */
- DECODE_REJECT (0xff0f, 0xbf00),
- /* IT 1011 1111 xxxx xxxx */
- DECODE_CUSTOM (0xff00, 0xbf00, t16_decode_it),
-
- /* SETEND 1011 0110 010x xxxx */
- /* CPS 1011 0110 011x xxxx */
- /* BKPT 1011 1110 xxxx xxxx */
- /* And unallocated instructions... */
- DECODE_END
+const union decode_action kprobes_t16_actions[NUM_PROBES_T16_ACTIONS] = {
+ [PROBES_T16_ADD_SP] = {.handler = t16_simulate_add_sp_imm},
+ [PROBES_T16_CBZ] = {.handler = t16_simulate_cbz},
+ [PROBES_T16_SIGN_EXTEND] = {.handler = t16_emulate_loregs_rwflags},
+ [PROBES_T16_PUSH] = {.decoder = t16_decode_push},
+ [PROBES_T16_POP] = {.decoder = t16_decode_pop},
+ [PROBES_T16_SEV] = {.handler = probes_emulate_none},
+ [PROBES_T16_WFE] = {.handler = probes_simulate_nop},
+ [PROBES_T16_IT] = {.decoder = t16_decode_it},
+ [PROBES_T16_CMP] = {.handler = t16_emulate_loregs_rwflags},
+ [PROBES_T16_ADDSUB] = {.handler = t16_emulate_loregs_noitrwflags},
+ [PROBES_T16_LOGICAL] = {.handler = t16_emulate_loregs_noitrwflags},
+ [PROBES_T16_LDR_LIT] = {.handler = t16_simulate_ldr_literal},
+ [PROBES_T16_BLX] = {.handler = t16_simulate_bxblx},
+ [PROBES_T16_HIREGOPS] = {.decoder = t16_decode_hiregs},
+ [PROBES_T16_LDRHSTRH] = {.handler = t16_emulate_loregs_rwflags},
+ [PROBES_T16_LDRSTR] = {.handler = t16_simulate_ldrstr_sp_relative},
+ [PROBES_T16_ADR] = {.handler = t16_simulate_reladr},
+ [PROBES_T16_LDMSTM] = {.handler = t16_emulate_loregs_rwflags},
+ [PROBES_T16_BRANCH_COND] = {.decoder = t16_decode_cond_branch},
+ [PROBES_T16_BRANCH] = {.handler = t16_simulate_branch},
};
-const union decode_item kprobe_decode_thumb16_table[] = {
-
- /*
- * Shift (immediate), add, subtract, move, and compare
- * 00xx xxxx xxxx xxxx
- */
-
- /* CMP (immediate) 0010 1xxx xxxx xxxx */
- DECODE_EMULATE (0xf800, 0x2800, t16_emulate_loregs_rwflags),
-
- /* ADD (register) 0001 100x xxxx xxxx */
- /* SUB (register) 0001 101x xxxx xxxx */
- /* LSL (immediate) 0000 0xxx xxxx xxxx */
- /* LSR (immediate) 0000 1xxx xxxx xxxx */
- /* ASR (immediate) 0001 0xxx xxxx xxxx */
- /* ADD (immediate, Thumb) 0001 110x xxxx xxxx */
- /* SUB (immediate, Thumb) 0001 111x xxxx xxxx */
- /* MOV (immediate) 0010 0xxx xxxx xxxx */
- /* ADD (immediate, Thumb) 0011 0xxx xxxx xxxx */
- /* SUB (immediate, Thumb) 0011 1xxx xxxx xxxx */
- DECODE_EMULATE (0xc000, 0x0000, t16_emulate_loregs_noitrwflags),
-
- /*
- * 16-bit Thumb data-processing instructions
- * 0100 00xx xxxx xxxx
- */
-
- /* TST (register) 0100 0010 00xx xxxx */
- DECODE_EMULATE (0xffc0, 0x4200, t16_emulate_loregs_rwflags),
- /* CMP (register) 0100 0010 10xx xxxx */
- /* CMN (register) 0100 0010 11xx xxxx */
- DECODE_EMULATE (0xff80, 0x4280, t16_emulate_loregs_rwflags),
- /* AND (register) 0100 0000 00xx xxxx */
- /* EOR (register) 0100 0000 01xx xxxx */
- /* LSL (register) 0100 0000 10xx xxxx */
- /* LSR (register) 0100 0000 11xx xxxx */
- /* ASR (register) 0100 0001 00xx xxxx */
- /* ADC (register) 0100 0001 01xx xxxx */
- /* SBC (register) 0100 0001 10xx xxxx */
- /* ROR (register) 0100 0001 11xx xxxx */
- /* RSB (immediate) 0100 0010 01xx xxxx */
- /* ORR (register) 0100 0011 00xx xxxx */
- /* MUL 0100 0011 00xx xxxx */
- /* BIC (register) 0100 0011 10xx xxxx */
- /* MVN (register) 0100 0011 10xx xxxx */
- DECODE_EMULATE (0xfc00, 0x4000, t16_emulate_loregs_noitrwflags),
-
- /*
- * Special data instructions and branch and exchange
- * 0100 01xx xxxx xxxx
- */
-
- /* BLX pc 0100 0111 1111 1xxx */
- DECODE_REJECT (0xfff8, 0x47f8),
-
- /* BX (register) 0100 0111 0xxx xxxx */
- /* BLX (register) 0100 0111 1xxx xxxx */
- DECODE_SIMULATE (0xff00, 0x4700, t16_simulate_bxblx),
-
- /* ADD pc, pc 0100 0100 1111 1111 */
- DECODE_REJECT (0xffff, 0x44ff),
-
- /* ADD (register) 0100 0100 xxxx xxxx */
- /* CMP (register) 0100 0101 xxxx xxxx */
- /* MOV (register) 0100 0110 xxxx xxxx */
- DECODE_CUSTOM (0xfc00, 0x4400, t16_decode_hiregs),
-
- /*
- * Load from Literal Pool
- * LDR (literal) 0100 1xxx xxxx xxxx
- */
- DECODE_SIMULATE (0xf800, 0x4800, t16_simulate_ldr_literal),
-
- /*
- * 16-bit Thumb Load/store instructions
- * 0101 xxxx xxxx xxxx
- * 011x xxxx xxxx xxxx
- * 100x xxxx xxxx xxxx
- */
-
- /* STR (register) 0101 000x xxxx xxxx */
- /* STRH (register) 0101 001x xxxx xxxx */
- /* STRB (register) 0101 010x xxxx xxxx */
- /* LDRSB (register) 0101 011x xxxx xxxx */
- /* LDR (register) 0101 100x xxxx xxxx */
- /* LDRH (register) 0101 101x xxxx xxxx */
- /* LDRB (register) 0101 110x xxxx xxxx */
- /* LDRSH (register) 0101 111x xxxx xxxx */
- /* STR (immediate, Thumb) 0110 0xxx xxxx xxxx */
- /* LDR (immediate, Thumb) 0110 1xxx xxxx xxxx */
- /* STRB (immediate, Thumb) 0111 0xxx xxxx xxxx */
- /* LDRB (immediate, Thumb) 0111 1xxx xxxx xxxx */
- DECODE_EMULATE (0xc000, 0x4000, t16_emulate_loregs_rwflags),
- /* STRH (immediate, Thumb) 1000 0xxx xxxx xxxx */
- /* LDRH (immediate, Thumb) 1000 1xxx xxxx xxxx */
- DECODE_EMULATE (0xf000, 0x8000, t16_emulate_loregs_rwflags),
- /* STR (immediate, Thumb) 1001 0xxx xxxx xxxx */
- /* LDR (immediate, Thumb) 1001 1xxx xxxx xxxx */
- DECODE_SIMULATE (0xf000, 0x9000, t16_simulate_ldrstr_sp_relative),
-
- /*
- * Generate PC-/SP-relative address
- * ADR (literal) 1010 0xxx xxxx xxxx
- * ADD (SP plus immediate) 1010 1xxx xxxx xxxx
- */
- DECODE_SIMULATE (0xf000, 0xa000, t16_simulate_reladr),
-
- /*
- * Miscellaneous 16-bit instructions
- * 1011 xxxx xxxx xxxx
- */
- DECODE_TABLE (0xf000, 0xb000, t16_table_1011),
-
- /* STM 1100 0xxx xxxx xxxx */
- /* LDM 1100 1xxx xxxx xxxx */
- DECODE_EMULATE (0xf000, 0xc000, t16_emulate_loregs_rwflags),
-
- /*
- * Conditional branch, and Supervisor Call
- */
-
- /* Permanently UNDEFINED 1101 1110 xxxx xxxx */
- /* SVC 1101 1111 xxxx xxxx */
- DECODE_REJECT (0xfe00, 0xde00),
-
- /* Conditional branch 1101 xxxx xxxx xxxx */
- DECODE_CUSTOM (0xf000, 0xd000, t16_decode_cond_branch),
-
- /*
- * Unconditional branch
- * B 1110 0xxx xxxx xxxx
- */
- DECODE_SIMULATE (0xf800, 0xe000, t16_simulate_branch),
-
- DECODE_END
+const union decode_action kprobes_t32_actions[NUM_PROBES_T32_ACTIONS] = {
+ [PROBES_T32_LDMSTM] = {.decoder = t32_decode_ldmstm},
+ [PROBES_T32_LDRDSTRD] = {.handler = t32_emulate_ldrdstrd},
+ [PROBES_T32_TABLE_BRANCH] = {.handler = t32_simulate_table_branch},
+ [PROBES_T32_TST] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_MOV] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_ADDSUB] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_LOGICAL] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_CMP] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_ADDWSUBW_PC] = {.handler = t32_emulate_rd8pc16_noflags,},
+ [PROBES_T32_ADDWSUBW] = {.handler = t32_emulate_rd8rn16_noflags},
+ [PROBES_T32_MOVW] = {.handler = t32_emulate_rd8rn16_noflags},
+ [PROBES_T32_SAT] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_BITFIELD] = {.handler = t32_emulate_rd8rn16_noflags},
+ [PROBES_T32_SEV] = {.handler = probes_emulate_none},
+ [PROBES_T32_WFE] = {.handler = probes_simulate_nop},
+ [PROBES_T32_MRS] = {.handler = t32_simulate_mrs},
+ [PROBES_T32_BRANCH_COND] = {.decoder = t32_decode_cond_branch},
+ [PROBES_T32_BRANCH] = {.handler = t32_simulate_branch},
+ [PROBES_T32_PLDI] = {.handler = probes_simulate_nop},
+ [PROBES_T32_LDR_LIT] = {.handler = t32_simulate_ldr_literal},
+ [PROBES_T32_LDRSTR] = {.handler = t32_emulate_ldrstr},
+ [PROBES_T32_SIGN_EXTEND] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_MEDIA] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_REVERSE] = {.handler = t32_emulate_rd8rn16_noflags},
+ [PROBES_T32_MUL_ADD] = {.handler = t32_emulate_rd8rn16rm0_rwflags},
+ [PROBES_T32_MUL_ADD2] = {.handler = t32_emulate_rd8rn16rm0ra12_noflags},
+ [PROBES_T32_MUL_ADD_LONG] = {
+ .handler = t32_emulate_rdlo12rdhi8rn16rm0_noflags},
};
-#ifdef CONFIG_ARM_KPROBES_TEST_MODULE
-EXPORT_SYMBOL_GPL(kprobe_decode_thumb16_table);
-#endif
-
-static unsigned long __kprobes thumb_check_cc(unsigned long cpsr)
-{
- if (unlikely(in_it_block(cpsr)))
- return kprobe_condition_checks[current_cond(cpsr)](cpsr);
- return true;
-}
-
-static void __kprobes thumb16_singlestep(struct kprobe *p, struct pt_regs *regs)
-{
- regs->ARM_pc += 2;
- p->ainsn.insn_handler(p, regs);
- regs->ARM_cpsr = it_advance(regs->ARM_cpsr);
-}
-
-static void __kprobes thumb32_singlestep(struct kprobe *p, struct pt_regs *regs)
-{
- regs->ARM_pc += 4;
- p->ainsn.insn_handler(p, regs);
- regs->ARM_cpsr = it_advance(regs->ARM_cpsr);
-}
-
-enum kprobe_insn __kprobes
-thumb16_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- asi->insn_singlestep = thumb16_singlestep;
- asi->insn_check_cc = thumb_check_cc;
- return kprobe_decode_insn(insn, asi, kprobe_decode_thumb16_table, true);
-}
-
-enum kprobe_insn __kprobes
-thumb32_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- asi->insn_singlestep = thumb32_singlestep;
- asi->insn_check_cc = thumb_check_cc;
- return kprobe_decode_insn(insn, asi, kprobe_decode_thumb32_table, true);
-}
diff --git a/arch/arm/kernel/kprobes.c b/arch/arm/kernel/kprobes.c
index 170e9f34003..6d644202c8d 100644
--- a/arch/arm/kernel/kprobes.c
+++ b/arch/arm/kernel/kprobes.c
@@ -26,9 +26,14 @@
#include <linux/stop_machine.h>
#include <linux/stringify.h>
#include <asm/traps.h>
+#include <asm/opcodes.h>
#include <asm/cacheflush.h>
+#include <linux/percpu.h>
+#include <linux/bug.h>
#include "kprobes.h"
+#include "probes-arm.h"
+#include "probes-thumb.h"
#include "patch.h"
#define MIN_STACK_SIZE(addr) \
@@ -54,6 +59,7 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p)
unsigned long addr = (unsigned long)p->addr;
bool thumb;
kprobe_decode_insn_t *decode_insn;
+ const union decode_action *actions;
int is;
if (in_exception_text(addr))
@@ -62,25 +68,29 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p)
#ifdef CONFIG_THUMB2_KERNEL
thumb = true;
addr &= ~1; /* Bit 0 would normally be set to indicate Thumb code */
- insn = ((u16 *)addr)[0];
+ insn = __mem_to_opcode_thumb16(((u16 *)addr)[0]);
if (is_wide_instruction(insn)) {
- insn <<= 16;
- insn |= ((u16 *)addr)[1];
- decode_insn = thumb32_kprobe_decode_insn;
- } else
- decode_insn = thumb16_kprobe_decode_insn;
+ u16 inst2 = __mem_to_opcode_thumb16(((u16 *)addr)[1]);
+ insn = __opcode_thumb32_compose(insn, inst2);
+ decode_insn = thumb32_probes_decode_insn;
+ actions = kprobes_t32_actions;
+ } else {
+ decode_insn = thumb16_probes_decode_insn;
+ actions = kprobes_t16_actions;
+ }
#else /* !CONFIG_THUMB2_KERNEL */
thumb = false;
if (addr & 0x3)
return -EINVAL;
- insn = *p->addr;
- decode_insn = arm_kprobe_decode_insn;
+ insn = __mem_to_opcode_arm(*p->addr);
+ decode_insn = arm_probes_decode_insn;
+ actions = kprobes_arm_actions;
#endif
p->opcode = insn;
p->ainsn.insn = tmp_insn;
- switch ((*decode_insn)(insn, &p->ainsn)) {
+ switch ((*decode_insn)(insn, &p->ainsn, true, actions)) {
case INSN_REJECTED: /* not supported */
return -EINVAL;
@@ -92,7 +102,7 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p)
p->ainsn.insn[is] = tmp_insn[is];
flush_insns(p->ainsn.insn,
sizeof(p->ainsn.insn[0]) * MAX_INSN_SIZE);
- p->ainsn.insn_fn = (kprobe_insn_fn_t *)
+ p->ainsn.insn_fn = (probes_insn_fn_t *)
((uintptr_t)p->ainsn.insn | thumb);
break;
@@ -171,13 +181,13 @@ static void __kprobes save_previous_kprobe(struct kprobe_ctlblk *kcb)
static void __kprobes restore_previous_kprobe(struct kprobe_ctlblk *kcb)
{
- __get_cpu_var(current_kprobe) = kcb->prev_kprobe.kp;
+ __this_cpu_write(current_kprobe, kcb->prev_kprobe.kp);
kcb->kprobe_status = kcb->prev_kprobe.status;
}
static void __kprobes set_current_kprobe(struct kprobe *p)
{
- __get_cpu_var(current_kprobe) = p;
+ __this_cpu_write(current_kprobe, p);
}
static void __kprobes
@@ -197,7 +207,7 @@ singlestep_skip(struct kprobe *p, struct pt_regs *regs)
static inline void __kprobes
singlestep(struct kprobe *p, struct pt_regs *regs, struct kprobe_ctlblk *kcb)
{
- p->ainsn.insn_singlestep(p, regs);
+ p->ainsn.insn_singlestep(p->opcode, &p->ainsn, regs);
}
/*
@@ -421,10 +431,10 @@ static __used __kprobes void *trampoline_handler(struct pt_regs *regs)
continue;
if (ri->rp && ri->rp->handler) {
- __get_cpu_var(current_kprobe) = &ri->rp->kp;
+ __this_cpu_write(current_kprobe, &ri->rp->kp);
get_kprobe_ctlblk()->kprobe_status = KPROBE_HIT_ACTIVE;
ri->rp->handler(ri, regs);
- __get_cpu_var(current_kprobe) = NULL;
+ __this_cpu_write(current_kprobe, NULL);
}
orig_ret_address = (unsigned long)ri->ret_addr;
@@ -607,7 +617,7 @@ static struct undef_hook kprobes_arm_break_hook = {
int __init arch_init_kprobes()
{
- arm_kprobe_decode_init();
+ arm_probes_decode_init();
#ifdef CONFIG_THUMB2_KERNEL
register_undef_hook(&kprobes_thumb16_break_hook);
register_undef_hook(&kprobes_thumb32_break_hook);
diff --git a/arch/arm/kernel/kprobes.h b/arch/arm/kernel/kprobes.h
index 38945f78f9f..9a2712ecefc 100644
--- a/arch/arm/kernel/kprobes.h
+++ b/arch/arm/kernel/kprobes.h
@@ -19,6 +19,8 @@
#ifndef _ARM_KERNEL_KPROBES_H
#define _ARM_KERNEL_KPROBES_H
+#include "probes.h"
+
/*
* These undefined instructions must be unique and
* reserved solely for kprobes' use.
@@ -27,402 +29,24 @@
#define KPROBE_THUMB16_BREAKPOINT_INSTRUCTION 0xde18
#define KPROBE_THUMB32_BREAKPOINT_INSTRUCTION 0xf7f0a018
+enum probes_insn __kprobes
+kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *h);
-enum kprobe_insn {
- INSN_REJECTED,
- INSN_GOOD,
- INSN_GOOD_NO_SLOT
-};
-
-typedef enum kprobe_insn (kprobe_decode_insn_t)(kprobe_opcode_t,
- struct arch_specific_insn *);
+typedef enum probes_insn (kprobe_decode_insn_t)(probes_opcode_t,
+ struct arch_probes_insn *,
+ bool,
+ const union decode_action *);
#ifdef CONFIG_THUMB2_KERNEL
-enum kprobe_insn thumb16_kprobe_decode_insn(kprobe_opcode_t,
- struct arch_specific_insn *);
-enum kprobe_insn thumb32_kprobe_decode_insn(kprobe_opcode_t,
- struct arch_specific_insn *);
+extern const union decode_action kprobes_t32_actions[];
+extern const union decode_action kprobes_t16_actions[];
#else /* !CONFIG_THUMB2_KERNEL */
-enum kprobe_insn arm_kprobe_decode_insn(kprobe_opcode_t,
- struct arch_specific_insn *);
-#endif
-
-void __init arm_kprobe_decode_init(void);
-
-extern kprobe_check_cc * const kprobe_condition_checks[16];
-
-
-#if __LINUX_ARM_ARCH__ >= 7
-
-/* str_pc_offset is architecturally defined from ARMv7 onwards */
-#define str_pc_offset 8
-#define find_str_pc_offset()
-
-#else /* __LINUX_ARM_ARCH__ < 7 */
-
-/* We need a run-time check to determine str_pc_offset */
-extern int str_pc_offset;
-void __init find_str_pc_offset(void);
+extern const union decode_action kprobes_arm_actions[];
#endif
-
-/*
- * Update ITSTATE after normal execution of an IT block instruction.
- *
- * The 8 IT state bits are split into two parts in CPSR:
- * ITSTATE<1:0> are in CPSR<26:25>
- * ITSTATE<7:2> are in CPSR<15:10>
- */
-static inline unsigned long it_advance(unsigned long cpsr)
- {
- if ((cpsr & 0x06000400) == 0) {
- /* ITSTATE<2:0> == 0 means end of IT block, so clear IT state */
- cpsr &= ~PSR_IT_MASK;
- } else {
- /* We need to shift left ITSTATE<4:0> */
- const unsigned long mask = 0x06001c00; /* Mask ITSTATE<4:0> */
- unsigned long it = cpsr & mask;
- it <<= 1;
- it |= it >> (27 - 10); /* Carry ITSTATE<2> to correct place */
- it &= mask;
- cpsr &= ~mask;
- cpsr |= it;
- }
- return cpsr;
-}
-
-static inline void __kprobes bx_write_pc(long pcv, struct pt_regs *regs)
-{
- long cpsr = regs->ARM_cpsr;
- if (pcv & 0x1) {
- cpsr |= PSR_T_BIT;
- pcv &= ~0x1;
- } else {
- cpsr &= ~PSR_T_BIT;
- pcv &= ~0x2; /* Avoid UNPREDICTABLE address allignment */
- }
- regs->ARM_cpsr = cpsr;
- regs->ARM_pc = pcv;
-}
-
-
-#if __LINUX_ARM_ARCH__ >= 6
-
-/* Kernels built for >= ARMv6 should never run on <= ARMv5 hardware, so... */
-#define load_write_pc_interworks true
-#define test_load_write_pc_interworking()
-
-#else /* __LINUX_ARM_ARCH__ < 6 */
-
-/* We need run-time testing to determine if load_write_pc() should interwork. */
-extern bool load_write_pc_interworks;
-void __init test_load_write_pc_interworking(void);
-
-#endif
-
-static inline void __kprobes load_write_pc(long pcv, struct pt_regs *regs)
-{
- if (load_write_pc_interworks)
- bx_write_pc(pcv, regs);
- else
- regs->ARM_pc = pcv;
-}
-
-
-#if __LINUX_ARM_ARCH__ >= 7
-
-#define alu_write_pc_interworks true
-#define test_alu_write_pc_interworking()
-
-#elif __LINUX_ARM_ARCH__ <= 5
-
-/* Kernels built for <= ARMv5 should never run on >= ARMv6 hardware, so... */
-#define alu_write_pc_interworks false
-#define test_alu_write_pc_interworking()
-
-#else /* __LINUX_ARM_ARCH__ == 6 */
-
-/* We could be an ARMv6 binary on ARMv7 hardware so we need a run-time check. */
-extern bool alu_write_pc_interworks;
-void __init test_alu_write_pc_interworking(void);
-
-#endif /* __LINUX_ARM_ARCH__ == 6 */
-
-static inline void __kprobes alu_write_pc(long pcv, struct pt_regs *regs)
-{
- if (alu_write_pc_interworks)
- bx_write_pc(pcv, regs);
- else
- regs->ARM_pc = pcv;
-}
-
-
-void __kprobes kprobe_simulate_nop(struct kprobe *p, struct pt_regs *regs);
-void __kprobes kprobe_emulate_none(struct kprobe *p, struct pt_regs *regs);
-
-enum kprobe_insn __kprobes
-kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi);
-
-/*
- * Test if load/store instructions writeback the address register.
- * if P (bit 24) == 0 or W (bit 21) == 1
- */
-#define is_writeback(insn) ((insn ^ 0x01000000) & 0x01200000)
-
-/*
- * The following definitions and macros are used to build instruction
- * decoding tables for use by kprobe_decode_insn.
- *
- * These tables are a concatenation of entries each of which consist of one of
- * the decode_* structs. All of the fields in every type of decode structure
- * are of the union type decode_item, therefore the entire decode table can be
- * viewed as an array of these and declared like:
- *
- * static const union decode_item table_name[] = {};
- *
- * In order to construct each entry in the table, macros are used to
- * initialise a number of sequential decode_item values in a layout which
- * matches the relevant struct. E.g. DECODE_SIMULATE initialise a struct
- * decode_simulate by initialising four decode_item objects like this...
- *
- * {.bits = _type},
- * {.bits = _mask},
- * {.bits = _value},
- * {.handler = _handler},
- *
- * Initialising a specified member of the union means that the compiler
- * will produce a warning if the argument is of an incorrect type.
- *
- * Below is a list of each of the macros used to initialise entries and a
- * description of the action performed when that entry is matched to an
- * instruction. A match is found when (instruction & mask) == value.
- *
- * DECODE_TABLE(mask, value, table)
- * Instruction decoding jumps to parsing the new sub-table 'table'.
- *
- * DECODE_CUSTOM(mask, value, decoder)
- * The custom function 'decoder' is called to the complete decoding
- * of an instruction.
- *
- * DECODE_SIMULATE(mask, value, handler)
- * Set the probes instruction handler to 'handler', this will be used
- * to simulate the instruction when the probe is hit. Decoding returns
- * with INSN_GOOD_NO_SLOT.
- *
- * DECODE_EMULATE(mask, value, handler)
- * Set the probes instruction handler to 'handler', this will be used
- * to emulate the instruction when the probe is hit. The modified
- * instruction (see below) is placed in the probes instruction slot so it
- * may be called by the emulation code. Decoding returns with INSN_GOOD.
- *
- * DECODE_REJECT(mask, value)
- * Instruction decoding fails with INSN_REJECTED
- *
- * DECODE_OR(mask, value)
- * This allows the mask/value test of multiple table entries to be
- * logically ORed. Once an 'or' entry is matched the decoding action to
- * be performed is that of the next entry which isn't an 'or'. E.g.
- *
- * DECODE_OR (mask1, value1)
- * DECODE_OR (mask2, value2)
- * DECODE_SIMULATE (mask3, value3, simulation_handler)
- *
- * This means that if any of the three mask/value pairs match the
- * instruction being decoded, then 'simulation_handler' will be used
- * for it.
- *
- * Both the SIMULATE and EMULATE macros have a second form which take an
- * additional 'regs' argument.
- *
- * DECODE_SIMULATEX(mask, value, handler, regs)
- * DECODE_EMULATEX (mask, value, handler, regs)
- *
- * These are used to specify what kind of CPU register is encoded in each of the
- * least significant 5 nibbles of the instruction being decoded. The regs value
- * is specified using the REGS macro, this takes any of the REG_TYPE_* values
- * from enum decode_reg_type as arguments; only the '*' part of the name is
- * given. E.g.
- *
- * REGS(0, ANY, NOPC, 0, ANY)
- *
- * This indicates an instruction is encoded like:
- *
- * bits 19..16 ignore
- * bits 15..12 any register allowed here
- * bits 11.. 8 any register except PC allowed here
- * bits 7.. 4 ignore
- * bits 3.. 0 any register allowed here
- *
- * This register specification is checked after a decode table entry is found to
- * match an instruction (through the mask/value test). Any invalid register then
- * found in the instruction will cause decoding to fail with INSN_REJECTED. In
- * the above example this would happen if bits 11..8 of the instruction were
- * 1111, indicating R15 or PC.
- *
- * As well as checking for legal combinations of registers, this data is also
- * used to modify the registers encoded in the instructions so that an
- * emulation routines can use it. (See decode_regs() and INSN_NEW_BITS.)
- *
- * Here is a real example which matches ARM instructions of the form
- * "AND <Rd>,<Rn>,<Rm>,<shift> <Rs>"
- *
- * DECODE_EMULATEX (0x0e000090, 0x00000010, emulate_rd12rn16rm0rs8_rwflags,
- * REGS(ANY, ANY, NOPC, 0, ANY)),
- * ^ ^ ^ ^
- * Rn Rd Rs Rm
- *
- * Decoding the instruction "AND R4, R5, R6, ASL R15" will be rejected because
- * Rs == R15
- *
- * Decoding the instruction "AND R4, R5, R6, ASL R7" will be accepted and the
- * instruction will be modified to "AND R0, R2, R3, ASL R1" and then placed into
- * the kprobes instruction slot. This can then be called later by the handler
- * function emulate_rd12rn16rm0rs8_rwflags in order to simulate the instruction.
- */
-
-enum decode_type {
- DECODE_TYPE_END,
- DECODE_TYPE_TABLE,
- DECODE_TYPE_CUSTOM,
- DECODE_TYPE_SIMULATE,
- DECODE_TYPE_EMULATE,
- DECODE_TYPE_OR,
- DECODE_TYPE_REJECT,
- NUM_DECODE_TYPES /* Must be last enum */
-};
-
-#define DECODE_TYPE_BITS 4
-#define DECODE_TYPE_MASK ((1 << DECODE_TYPE_BITS) - 1)
-
-enum decode_reg_type {
- REG_TYPE_NONE = 0, /* Not a register, ignore */
- REG_TYPE_ANY, /* Any register allowed */
- REG_TYPE_SAMEAS16, /* Register should be same as that at bits 19..16 */
- REG_TYPE_SP, /* Register must be SP */
- REG_TYPE_PC, /* Register must be PC */
- REG_TYPE_NOSP, /* Register must not be SP */
- REG_TYPE_NOSPPC, /* Register must not be SP or PC */
- REG_TYPE_NOPC, /* Register must not be PC */
- REG_TYPE_NOPCWB, /* No PC if load/store write-back flag also set */
-
- /* The following types are used when the encoding for PC indicates
- * another instruction form. This distiction only matters for test
- * case coverage checks.
- */
- REG_TYPE_NOPCX, /* Register must not be PC */
- REG_TYPE_NOSPPCX, /* Register must not be SP or PC */
-
- /* Alias to allow '0' arg to be used in REGS macro. */
- REG_TYPE_0 = REG_TYPE_NONE
-};
-
-#define REGS(r16, r12, r8, r4, r0) \
- ((REG_TYPE_##r16) << 16) + \
- ((REG_TYPE_##r12) << 12) + \
- ((REG_TYPE_##r8) << 8) + \
- ((REG_TYPE_##r4) << 4) + \
- (REG_TYPE_##r0)
-
-union decode_item {
- u32 bits;
- const union decode_item *table;
- kprobe_insn_handler_t *handler;
- kprobe_decode_insn_t *decoder;
-};
-
-
-#define DECODE_END \
- {.bits = DECODE_TYPE_END}
-
-
-struct decode_header {
- union decode_item type_regs;
- union decode_item mask;
- union decode_item value;
-};
-
-#define DECODE_HEADER(_type, _mask, _value, _regs) \
- {.bits = (_type) | ((_regs) << DECODE_TYPE_BITS)}, \
- {.bits = (_mask)}, \
- {.bits = (_value)}
-
-
-struct decode_table {
- struct decode_header header;
- union decode_item table;
-};
-
-#define DECODE_TABLE(_mask, _value, _table) \
- DECODE_HEADER(DECODE_TYPE_TABLE, _mask, _value, 0), \
- {.table = (_table)}
-
-
-struct decode_custom {
- struct decode_header header;
- union decode_item decoder;
-};
-
-#define DECODE_CUSTOM(_mask, _value, _decoder) \
- DECODE_HEADER(DECODE_TYPE_CUSTOM, _mask, _value, 0), \
- {.decoder = (_decoder)}
-
-
-struct decode_simulate {
- struct decode_header header;
- union decode_item handler;
-};
-
-#define DECODE_SIMULATEX(_mask, _value, _handler, _regs) \
- DECODE_HEADER(DECODE_TYPE_SIMULATE, _mask, _value, _regs), \
- {.handler = (_handler)}
-
-#define DECODE_SIMULATE(_mask, _value, _handler) \
- DECODE_SIMULATEX(_mask, _value, _handler, 0)
-
-
-struct decode_emulate {
- struct decode_header header;
- union decode_item handler;
-};
-
-#define DECODE_EMULATEX(_mask, _value, _handler, _regs) \
- DECODE_HEADER(DECODE_TYPE_EMULATE, _mask, _value, _regs), \
- {.handler = (_handler)}
-
-#define DECODE_EMULATE(_mask, _value, _handler) \
- DECODE_EMULATEX(_mask, _value, _handler, 0)
-
-
-struct decode_or {
- struct decode_header header;
-};
-
-#define DECODE_OR(_mask, _value) \
- DECODE_HEADER(DECODE_TYPE_OR, _mask, _value, 0)
-
-
-struct decode_reject {
- struct decode_header header;
-};
-
-#define DECODE_REJECT(_mask, _value) \
- DECODE_HEADER(DECODE_TYPE_REJECT, _mask, _value, 0)
-
-
-#ifdef CONFIG_THUMB2_KERNEL
-extern const union decode_item kprobe_decode_thumb16_table[];
-extern const union decode_item kprobe_decode_thumb32_table[];
-#else
-extern const union decode_item kprobe_decode_arm_table[];
-#endif
-
-
-int kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
- const union decode_item *table, bool thumb16);
-
-
#endif /* _ARM_KERNEL_KPROBES_H */
diff --git a/arch/arm/kernel/machine_kexec.c b/arch/arm/kernel/machine_kexec.c
index 57221e349a7..8cf0996aa1a 100644
--- a/arch/arm/kernel/machine_kexec.c
+++ b/arch/arm/kernel/machine_kexec.c
@@ -14,11 +14,12 @@
#include <asm/pgalloc.h>
#include <asm/mmu_context.h>
#include <asm/cacheflush.h>
+#include <asm/fncpy.h>
#include <asm/mach-types.h>
#include <asm/smp_plat.h>
#include <asm/system_misc.h>
-extern const unsigned char relocate_new_kernel[];
+extern void relocate_new_kernel(void);
extern const unsigned int relocate_new_kernel_size;
extern unsigned long kexec_start_address;
@@ -142,6 +143,8 @@ void machine_kexec(struct kimage *image)
{
unsigned long page_list;
unsigned long reboot_code_buffer_phys;
+ unsigned long reboot_entry = (unsigned long)relocate_new_kernel;
+ unsigned long reboot_entry_phys;
void *reboot_code_buffer;
/*
@@ -168,16 +171,23 @@ void machine_kexec(struct kimage *image)
/* copy our kernel relocation code to the control code page */
- memcpy(reboot_code_buffer,
- relocate_new_kernel, relocate_new_kernel_size);
+ reboot_entry = fncpy(reboot_code_buffer,
+ reboot_entry,
+ relocate_new_kernel_size);
+ reboot_entry_phys = (unsigned long)reboot_entry +
+ (reboot_code_buffer_phys - (unsigned long)reboot_code_buffer);
-
- flush_icache_range((unsigned long) reboot_code_buffer,
- (unsigned long) reboot_code_buffer + KEXEC_CONTROL_PAGE_SIZE);
printk(KERN_INFO "Bye!\n");
if (kexec_reinit)
kexec_reinit();
- soft_restart(reboot_code_buffer_phys);
+ soft_restart(reboot_entry_phys);
+}
+
+void arch_crash_save_vmcoreinfo(void)
+{
+#ifdef CONFIG_ARM_LPAE
+ VMCOREINFO_CONFIG(ARM_LPAE);
+#endif
}
diff --git a/arch/arm/kernel/module.c b/arch/arm/kernel/module.c
index 084dc889698..45e47815727 100644
--- a/arch/arm/kernel/module.c
+++ b/arch/arm/kernel/module.c
@@ -24,6 +24,7 @@
#include <asm/sections.h>
#include <asm/smp_plat.h>
#include <asm/unwind.h>
+#include <asm/opcodes.h>
#ifdef CONFIG_XIP_KERNEL
/*
@@ -40,7 +41,7 @@
void *module_alloc(unsigned long size)
{
return __vmalloc_node_range(size, 1, MODULES_VADDR, MODULES_END,
- GFP_KERNEL, PAGE_KERNEL_EXEC, -1,
+ GFP_KERNEL, PAGE_KERNEL_EXEC, NUMA_NO_NODE,
__builtin_return_address(0));
}
#endif
@@ -60,6 +61,7 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
Elf32_Sym *sym;
const char *symname;
s32 offset;
+ u32 tmp;
#ifdef CONFIG_THUMB2_KERNEL
u32 upper, lower, sign, j1, j2;
#endif
@@ -95,7 +97,8 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
case R_ARM_PC24:
case R_ARM_CALL:
case R_ARM_JUMP24:
- offset = (*(u32 *)loc & 0x00ffffff) << 2;
+ offset = __mem_to_opcode_arm(*(u32 *)loc);
+ offset = (offset & 0x00ffffff) << 2;
if (offset & 0x02000000)
offset -= 0x04000000;
@@ -111,9 +114,10 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
}
offset >>= 2;
+ offset &= 0x00ffffff;
- *(u32 *)loc &= 0xff000000;
- *(u32 *)loc |= offset & 0x00ffffff;
+ *(u32 *)loc &= __opcode_to_mem_arm(0xff000000);
+ *(u32 *)loc |= __opcode_to_mem_arm(offset);
break;
case R_ARM_V4BX:
@@ -121,8 +125,8 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
* other bits to re-code instruction as
* MOV PC,Rm.
*/
- *(u32 *)loc &= 0xf000000f;
- *(u32 *)loc |= 0x01a0f000;
+ *(u32 *)loc &= __opcode_to_mem_arm(0xf000000f);
+ *(u32 *)loc |= __opcode_to_mem_arm(0x01a0f000);
break;
case R_ARM_PREL31:
@@ -132,7 +136,7 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
case R_ARM_MOVW_ABS_NC:
case R_ARM_MOVT_ABS:
- offset = *(u32 *)loc;
+ offset = tmp = __mem_to_opcode_arm(*(u32 *)loc);
offset = ((offset & 0xf0000) >> 4) | (offset & 0xfff);
offset = (offset ^ 0x8000) - 0x8000;
@@ -140,16 +144,18 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
if (ELF32_R_TYPE(rel->r_info) == R_ARM_MOVT_ABS)
offset >>= 16;
- *(u32 *)loc &= 0xfff0f000;
- *(u32 *)loc |= ((offset & 0xf000) << 4) |
- (offset & 0x0fff);
+ tmp &= 0xfff0f000;
+ tmp |= ((offset & 0xf000) << 4) |
+ (offset & 0x0fff);
+
+ *(u32 *)loc = __opcode_to_mem_arm(tmp);
break;
#ifdef CONFIG_THUMB2_KERNEL
case R_ARM_THM_CALL:
case R_ARM_THM_JUMP24:
- upper = *(u16 *)loc;
- lower = *(u16 *)(loc + 2);
+ upper = __mem_to_opcode_thumb16(*(u16 *)loc);
+ lower = __mem_to_opcode_thumb16(*(u16 *)(loc + 2));
/*
* 25 bit signed address range (Thumb-2 BL and B.W
@@ -198,17 +204,20 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
sign = (offset >> 24) & 1;
j1 = sign ^ (~(offset >> 23) & 1);
j2 = sign ^ (~(offset >> 22) & 1);
- *(u16 *)loc = (u16)((upper & 0xf800) | (sign << 10) |
+ upper = (u16)((upper & 0xf800) | (sign << 10) |
((offset >> 12) & 0x03ff));
- *(u16 *)(loc + 2) = (u16)((lower & 0xd000) |
- (j1 << 13) | (j2 << 11) |
- ((offset >> 1) & 0x07ff));
+ lower = (u16)((lower & 0xd000) |
+ (j1 << 13) | (j2 << 11) |
+ ((offset >> 1) & 0x07ff));
+
+ *(u16 *)loc = __opcode_to_mem_thumb16(upper);
+ *(u16 *)(loc + 2) = __opcode_to_mem_thumb16(lower);
break;
case R_ARM_THM_MOVW_ABS_NC:
case R_ARM_THM_MOVT_ABS:
- upper = *(u16 *)loc;
- lower = *(u16 *)(loc + 2);
+ upper = __mem_to_opcode_thumb16(*(u16 *)loc);
+ lower = __mem_to_opcode_thumb16(*(u16 *)(loc + 2));
/*
* MOVT/MOVW instructions encoding in Thumb-2:
@@ -229,12 +238,14 @@ apply_relocate(Elf32_Shdr *sechdrs, const char *strtab, unsigned int symindex,
if (ELF32_R_TYPE(rel->r_info) == R_ARM_THM_MOVT_ABS)
offset >>= 16;
- *(u16 *)loc = (u16)((upper & 0xfbf0) |
- ((offset & 0xf000) >> 12) |
- ((offset & 0x0800) >> 1));
- *(u16 *)(loc + 2) = (u16)((lower & 0x8f00) |
- ((offset & 0x0700) << 4) |
- (offset & 0x00ff));
+ upper = (u16)((upper & 0xfbf0) |
+ ((offset & 0xf000) >> 12) |
+ ((offset & 0x0800) >> 1));
+ lower = (u16)((lower & 0x8f00) |
+ ((offset & 0x0700) << 4) |
+ (offset & 0x00ff));
+ *(u16 *)loc = __opcode_to_mem_thumb16(upper);
+ *(u16 *)(loc + 2) = __opcode_to_mem_thumb16(lower);
break;
#endif
diff --git a/arch/arm/kernel/perf_event.c b/arch/arm/kernel/perf_event.c
index e186ee1e63f..4238bcba9d6 100644
--- a/arch/arm/kernel/perf_event.c
+++ b/arch/arm/kernel/perf_event.c
@@ -16,6 +16,8 @@
#include <linux/platform_device.h>
#include <linux/pm_runtime.h>
#include <linux/uaccess.h>
+#include <linux/irq.h>
+#include <linux/irqdesc.h>
#include <asm/irq_regs.h>
#include <asm/pmu.h>
@@ -99,10 +101,6 @@ int armpmu_event_set_period(struct perf_event *event)
s64 period = hwc->sample_period;
int ret = 0;
- /* The period may have been changed by PERF_EVENT_IOC_PERIOD */
- if (unlikely(period != hwc->last_period))
- left = period - (hwc->last_period - left);
-
if (unlikely(left <= -period)) {
left = period;
local64_set(&hwc->period_left, left);
@@ -209,6 +207,8 @@ armpmu_del(struct perf_event *event, int flags)
armpmu_stop(event, PERF_EF_UPDATE);
hw_events->events[idx] = NULL;
clear_bit(idx, hw_events->used_mask);
+ if (armpmu->clear_event_idx)
+ armpmu->clear_event_idx(hw_events, event);
perf_event_update_userpage(event);
}
@@ -256,12 +256,11 @@ validate_event(struct pmu_hw_events *hw_events,
struct perf_event *event)
{
struct arm_pmu *armpmu = to_arm_pmu(event->pmu);
- struct pmu *leader_pmu = event->group_leader->pmu;
if (is_software_event(event))
return 1;
- if (event->pmu != leader_pmu || event->state < PERF_EVENT_STATE_OFF)
+ if (event->state < PERF_EVENT_STATE_OFF)
return 1;
if (event->state == PERF_EVENT_STATE_OFF && !event->attr.enable_on_exec)
@@ -300,14 +299,27 @@ validate_group(struct perf_event *event)
static irqreturn_t armpmu_dispatch_irq(int irq, void *dev)
{
- struct arm_pmu *armpmu = (struct arm_pmu *) dev;
- struct platform_device *plat_device = armpmu->plat_device;
- struct arm_pmu_platdata *plat = dev_get_platdata(&plat_device->dev);
+ struct arm_pmu *armpmu;
+ struct platform_device *plat_device;
+ struct arm_pmu_platdata *plat;
+ int ret;
+ u64 start_clock, finish_clock;
+
+ if (irq_is_percpu(irq))
+ dev = *(void **)dev;
+ armpmu = dev;
+ plat_device = armpmu->plat_device;
+ plat = dev_get_platdata(&plat_device->dev);
+ start_clock = sched_clock();
if (plat && plat->handle_irq)
- return plat->handle_irq(irq, dev, armpmu->handle_irq);
+ ret = plat->handle_irq(irq, dev, armpmu->handle_irq);
else
- return armpmu->handle_irq(irq, dev);
+ ret = armpmu->handle_irq(irq, dev);
+ finish_clock = sched_clock();
+
+ perf_sample_event_took(finish_clock - start_clock);
+ return ret;
}
static void
@@ -398,7 +410,7 @@ __hw_perf_event_init(struct perf_event *event)
*/
hwc->config_base |= (unsigned long)mapping;
- if (!hwc->sample_period) {
+ if (!is_sampling_event(event)) {
/*
* For non-sampling runs, limit the sample_period to half
* of the counter width. That way, the new counter value
diff --git a/arch/arm/kernel/perf_event_cpu.c b/arch/arm/kernel/perf_event_cpu.c
index 8d6147b2001..af9e35e8836 100644
--- a/arch/arm/kernel/perf_event_cpu.c
+++ b/arch/arm/kernel/perf_event_cpu.c
@@ -25,6 +25,8 @@
#include <linux/platform_device.h>
#include <linux/slab.h>
#include <linux/spinlock.h>
+#include <linux/irq.h>
+#include <linux/irqdesc.h>
#include <asm/cputype.h>
#include <asm/irq_regs.h>
@@ -33,6 +35,7 @@
/* Set at runtime when we know what CPU type we are. */
static struct arm_pmu *cpu_pmu;
+static DEFINE_PER_CPU(struct arm_pmu *, percpu_pmu);
static DEFINE_PER_CPU(struct perf_event * [ARMPMU_MAX_HWEVENTS], hw_events);
static DEFINE_PER_CPU(unsigned long [BITS_TO_LONGS(ARMPMU_MAX_HWEVENTS)], used_mask);
static DEFINE_PER_CPU(struct pmu_hw_events, cpu_hw_events);
@@ -68,7 +71,27 @@ EXPORT_SYMBOL_GPL(perf_num_counters);
static struct pmu_hw_events *cpu_pmu_get_cpu_events(void)
{
- return &__get_cpu_var(cpu_hw_events);
+ return this_cpu_ptr(&cpu_hw_events);
+}
+
+static void cpu_pmu_enable_percpu_irq(void *data)
+{
+ struct arm_pmu *cpu_pmu = data;
+ struct platform_device *pmu_device = cpu_pmu->plat_device;
+ int irq = platform_get_irq(pmu_device, 0);
+
+ enable_percpu_irq(irq, IRQ_TYPE_NONE);
+ cpumask_set_cpu(smp_processor_id(), &cpu_pmu->active_irqs);
+}
+
+static void cpu_pmu_disable_percpu_irq(void *data)
+{
+ struct arm_pmu *cpu_pmu = data;
+ struct platform_device *pmu_device = cpu_pmu->plat_device;
+ int irq = platform_get_irq(pmu_device, 0);
+
+ cpumask_clear_cpu(smp_processor_id(), &cpu_pmu->active_irqs);
+ disable_percpu_irq(irq);
}
static void cpu_pmu_free_irq(struct arm_pmu *cpu_pmu)
@@ -78,12 +101,18 @@ static void cpu_pmu_free_irq(struct arm_pmu *cpu_pmu)
irqs = min(pmu_device->num_resources, num_possible_cpus());
- for (i = 0; i < irqs; ++i) {
- if (!cpumask_test_and_clear_cpu(i, &cpu_pmu->active_irqs))
- continue;
- irq = platform_get_irq(pmu_device, i);
- if (irq >= 0)
- free_irq(irq, cpu_pmu);
+ irq = platform_get_irq(pmu_device, 0);
+ if (irq >= 0 && irq_is_percpu(irq)) {
+ on_each_cpu(cpu_pmu_disable_percpu_irq, cpu_pmu, 1);
+ free_percpu_irq(irq, &percpu_pmu);
+ } else {
+ for (i = 0; i < irqs; ++i) {
+ if (!cpumask_test_and_clear_cpu(i, &cpu_pmu->active_irqs))
+ continue;
+ irq = platform_get_irq(pmu_device, i);
+ if (irq >= 0)
+ free_irq(irq, cpu_pmu);
+ }
}
}
@@ -97,37 +126,48 @@ static int cpu_pmu_request_irq(struct arm_pmu *cpu_pmu, irq_handler_t handler)
irqs = min(pmu_device->num_resources, num_possible_cpus());
if (irqs < 1) {
- pr_err("no irqs for PMUs defined\n");
- return -ENODEV;
+ printk_once("perf/ARM: No irqs for PMU defined, sampling events not supported\n");
+ return 0;
}
- for (i = 0; i < irqs; ++i) {
- err = 0;
- irq = platform_get_irq(pmu_device, i);
- if (irq < 0)
- continue;
-
- /*
- * If we have a single PMU interrupt that we can't shift,
- * assume that we're running on a uniprocessor machine and
- * continue. Otherwise, continue without this interrupt.
- */
- if (irq_set_affinity(irq, cpumask_of(i)) && irqs > 1) {
- pr_warning("unable to set irq affinity (irq=%d, cpu=%u)\n",
- irq, i);
- continue;
- }
-
- err = request_irq(irq, handler,
- IRQF_NOBALANCING | IRQF_NO_THREAD, "arm-pmu",
- cpu_pmu);
+ irq = platform_get_irq(pmu_device, 0);
+ if (irq >= 0 && irq_is_percpu(irq)) {
+ err = request_percpu_irq(irq, handler, "arm-pmu", &percpu_pmu);
if (err) {
pr_err("unable to request IRQ%d for ARM PMU counters\n",
irq);
return err;
}
-
- cpumask_set_cpu(i, &cpu_pmu->active_irqs);
+ on_each_cpu(cpu_pmu_enable_percpu_irq, cpu_pmu, 1);
+ } else {
+ for (i = 0; i < irqs; ++i) {
+ err = 0;
+ irq = platform_get_irq(pmu_device, i);
+ if (irq < 0)
+ continue;
+
+ /*
+ * If we have a single PMU interrupt that we can't shift,
+ * assume that we're running on a uniprocessor machine and
+ * continue. Otherwise, continue without this interrupt.
+ */
+ if (irq_set_affinity(irq, cpumask_of(i)) && irqs > 1) {
+ pr_warning("unable to set irq affinity (irq=%d, cpu=%u)\n",
+ irq, i);
+ continue;
+ }
+
+ err = request_irq(irq, handler,
+ IRQF_NOBALANCING | IRQF_NO_THREAD, "arm-pmu",
+ cpu_pmu);
+ if (err) {
+ pr_err("unable to request IRQ%d for ARM PMU counters\n",
+ irq);
+ return err;
+ }
+
+ cpumask_set_cpu(i, &cpu_pmu->active_irqs);
+ }
}
return 0;
@@ -141,6 +181,7 @@ static void cpu_pmu_init(struct arm_pmu *cpu_pmu)
events->events = per_cpu(hw_events, cpu);
events->used_mask = per_cpu(used_mask, cpu);
raw_spin_lock_init(&events->pmu_lock);
+ per_cpu(percpu_pmu, cpu) = cpu_pmu;
}
cpu_pmu->get_hw_events = cpu_pmu_get_cpu_events;
@@ -150,6 +191,10 @@ static void cpu_pmu_init(struct arm_pmu *cpu_pmu)
/* Ensure the PMU has sane values out of reset. */
if (cpu_pmu->reset)
on_each_cpu(cpu_pmu->reset, cpu_pmu, 1);
+
+ /* If no interrupts available, set the corresponding capability flag */
+ if (!platform_get_irq(cpu_pmu->plat_device, 0))
+ cpu_pmu->pmu.capabilities |= PERF_PMU_CAP_NO_INTERRUPT;
}
/*
@@ -180,7 +225,9 @@ static struct notifier_block cpu_pmu_hotplug_notifier = {
* PMU platform driver and devicetree bindings.
*/
static struct of_device_id cpu_pmu_of_device_ids[] = {
+ {.compatible = "arm,cortex-a17-pmu", .data = armv7_a17_pmu_init},
{.compatible = "arm,cortex-a15-pmu", .data = armv7_a15_pmu_init},
+ {.compatible = "arm,cortex-a12-pmu", .data = armv7_a12_pmu_init},
{.compatible = "arm,cortex-a9-pmu", .data = armv7_a9_pmu_init},
{.compatible = "arm,cortex-a8-pmu", .data = armv7_a8_pmu_init},
{.compatible = "arm,cortex-a7-pmu", .data = armv7_a7_pmu_init},
@@ -188,6 +235,7 @@ static struct of_device_id cpu_pmu_of_device_ids[] = {
{.compatible = "arm,arm11mpcore-pmu", .data = armv6mpcore_pmu_init},
{.compatible = "arm,arm1176-pmu", .data = armv6pmu_init},
{.compatible = "arm,arm1136-pmu", .data = armv6pmu_init},
+ {.compatible = "qcom,krait-pmu", .data = krait_pmu_init},
{},
};
@@ -225,15 +273,6 @@ static int probe_current_pmu(struct arm_pmu *pmu)
case ARM_CPU_PART_CORTEX_A9:
ret = armv7_a9_pmu_init(pmu);
break;
- case ARM_CPU_PART_CORTEX_A5:
- ret = armv7_a5_pmu_init(pmu);
- break;
- case ARM_CPU_PART_CORTEX_A15:
- ret = armv7_a15_pmu_init(pmu);
- break;
- case ARM_CPU_PART_CORTEX_A7:
- ret = armv7_a7_pmu_init(pmu);
- break;
}
/* Intel CPUs [xscale]. */
} else if (implementor == ARM_CPU_IMP_INTEL) {
@@ -254,7 +293,7 @@ static int probe_current_pmu(struct arm_pmu *pmu)
static int cpu_pmu_device_probe(struct platform_device *pdev)
{
const struct of_device_id *of_id;
- int (*init_fn)(struct arm_pmu *);
+ const int (*init_fn)(struct arm_pmu *);
struct device_node *node = pdev->dev.of_node;
struct arm_pmu *pmu;
int ret = -ENODEV;
@@ -270,6 +309,9 @@ static int cpu_pmu_device_probe(struct platform_device *pdev)
return -ENOMEM;
}
+ cpu_pmu = pmu;
+ cpu_pmu->plat_device = pdev;
+
if (node && (of_id = of_match_node(cpu_pmu_of_device_ids, pdev->dev.of_node))) {
init_fn = of_id->data;
ret = init_fn(pmu);
@@ -282,8 +324,6 @@ static int cpu_pmu_device_probe(struct platform_device *pdev)
goto out_free;
}
- cpu_pmu = pmu;
- cpu_pmu->plat_device = pdev;
cpu_pmu_init(cpu_pmu);
ret = armpmu_register(cpu_pmu, PERF_TYPE_RAW);
diff --git a/arch/arm/kernel/perf_event_v7.c b/arch/arm/kernel/perf_event_v7.c
index 039cffb053a..1d37568c547 100644
--- a/arch/arm/kernel/perf_event_v7.c
+++ b/arch/arm/kernel/perf_event_v7.c
@@ -18,6 +18,10 @@
#ifdef CONFIG_CPU_V7
+#include <asm/cp15.h>
+#include <asm/vfp.h>
+#include "../vfp/vfpinstr.h"
+
/*
* Common ARMv7 event types
*
@@ -109,6 +113,33 @@ enum armv7_a15_perf_types {
ARMV7_A15_PERFCTR_PC_WRITE_SPEC = 0x76,
};
+/* ARMv7 Cortex-A12 specific event types */
+enum armv7_a12_perf_types {
+ ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ = 0x40,
+ ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE = 0x41,
+
+ ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ = 0x50,
+ ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE = 0x51,
+
+ ARMV7_A12_PERFCTR_PC_WRITE_SPEC = 0x76,
+
+ ARMV7_A12_PERFCTR_PF_TLB_REFILL = 0xe7,
+};
+
+/* ARMv7 Krait specific event types */
+enum krait_perf_types {
+ KRAIT_PMRESR0_GROUP0 = 0xcc,
+ KRAIT_PMRESR1_GROUP0 = 0xd0,
+ KRAIT_PMRESR2_GROUP0 = 0xd4,
+ KRAIT_VPMRESR0_GROUP0 = 0xd8,
+
+ KRAIT_PERFCTR_L1_ICACHE_ACCESS = 0x10011,
+ KRAIT_PERFCTR_L1_ICACHE_MISS = 0x10010,
+
+ KRAIT_PERFCTR_L1_ITLB_ACCESS = 0x12222,
+ KRAIT_PERFCTR_L1_DTLB_ACCESS = 0x12210,
+};
+
/*
* Cortex-A8 HW events mapping
*
@@ -732,6 +763,262 @@ static const unsigned armv7_a7_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
};
/*
+ * Cortex-A12 HW events mapping
+ */
+static const unsigned armv7_a12_perf_map[PERF_COUNT_HW_MAX] = {
+ [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
+ [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
+ [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
+ [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
+ [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A12_PERFCTR_PC_WRITE_SPEC,
+ [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
+ [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = HW_OP_UNSUPPORTED,
+};
+
+static const unsigned armv7_a12_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
+ [PERF_COUNT_HW_CACHE_OP_MAX]
+ [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
+ [C(L1D)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(L1I)] = {
+ /*
+ * Not all performance counters differentiate between read
+ * and write accesses/misses so we're not always strictly
+ * correct, but it's the best we can do. Writes and reads get
+ * combined in these cases.
+ */
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(LL)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(DTLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_A12_PERFCTR_PF_TLB_REFILL,
+ },
+ },
+ [C(ITLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(BPU)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(NODE)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+};
+
+/*
+ * Krait HW events mapping
+ */
+static const unsigned krait_perf_map[PERF_COUNT_HW_MAX] = {
+ [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
+ [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
+ [PERF_COUNT_HW_CACHE_REFERENCES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_CACHE_MISSES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
+ [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
+};
+
+static const unsigned krait_perf_map_no_branch[PERF_COUNT_HW_MAX] = {
+ [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
+ [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
+ [PERF_COUNT_HW_CACHE_REFERENCES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_CACHE_MISSES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
+};
+
+static const unsigned krait_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
+ [PERF_COUNT_HW_CACHE_OP_MAX]
+ [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
+ [C(L1D)] = {
+ /*
+ * The performance counters don't differentiate between read
+ * and write accesses/misses so this isn't strictly correct,
+ * but it's the best we can do. Writes and reads get
+ * combined.
+ */
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(L1I)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ICACHE_ACCESS,
+ [C(RESULT_MISS)] = KRAIT_PERFCTR_L1_ICACHE_MISS,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(LL)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(DTLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(ITLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(BPU)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(NODE)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+};
+
+/*
* Perf Events' indices
*/
#define ARMV7_IDX_CYCLE_COUNTER 0
@@ -1212,6 +1499,24 @@ static int armv7_a7_map_event(struct perf_event *event)
&armv7_a7_perf_cache_map, 0xFF);
}
+static int armv7_a12_map_event(struct perf_event *event)
+{
+ return armpmu_map_event(event, &armv7_a12_perf_map,
+ &armv7_a12_perf_cache_map, 0xFF);
+}
+
+static int krait_map_event(struct perf_event *event)
+{
+ return armpmu_map_event(event, &krait_perf_map,
+ &krait_perf_cache_map, 0xFFFFF);
+}
+
+static int krait_map_event_no_branch(struct perf_event *event)
+{
+ return armpmu_map_event(event, &krait_perf_map_no_branch,
+ &krait_perf_cache_map, 0xFFFFF);
+}
+
static void armv7pmu_init(struct arm_pmu *cpu_pmu)
{
cpu_pmu->handle_irq = armv7pmu_handle_irq;
@@ -1283,6 +1588,415 @@ static int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
return 0;
}
+
+static int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
+{
+ armv7pmu_init(cpu_pmu);
+ cpu_pmu->name = "ARMv7 Cortex-A12";
+ cpu_pmu->map_event = armv7_a12_map_event;
+ cpu_pmu->num_events = armv7_read_num_pmnc_events();
+ cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
+ return 0;
+}
+
+static int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
+{
+ armv7_a12_pmu_init(cpu_pmu);
+ cpu_pmu->name = "ARMv7 Cortex-A17";
+ return 0;
+}
+
+/*
+ * Krait Performance Monitor Region Event Selection Register (PMRESRn)
+ *
+ * 31 30 24 16 8 0
+ * +--------------------------------+
+ * PMRESR0 | EN | CC | CC | CC | CC | N = 1, R = 0
+ * +--------------------------------+
+ * PMRESR1 | EN | CC | CC | CC | CC | N = 1, R = 1
+ * +--------------------------------+
+ * PMRESR2 | EN | CC | CC | CC | CC | N = 1, R = 2
+ * +--------------------------------+
+ * VPMRESR0 | EN | CC | CC | CC | CC | N = 2, R = ?
+ * +--------------------------------+
+ * EN | G=3 | G=2 | G=1 | G=0
+ *
+ * Event Encoding:
+ *
+ * hwc->config_base = 0xNRCCG
+ *
+ * N = prefix, 1 for Krait CPU (PMRESRn), 2 for Venum VFP (VPMRESR)
+ * R = region register
+ * CC = class of events the group G is choosing from
+ * G = group or particular event
+ *
+ * Example: 0x12021 is a Krait CPU event in PMRESR2's group 1 with code 2
+ *
+ * A region (R) corresponds to a piece of the CPU (execution unit, instruction
+ * unit, etc.) while the event code (CC) corresponds to a particular class of
+ * events (interrupts for example). An event code is broken down into
+ * groups (G) that can be mapped into the PMU (irq, fiqs, and irq+fiqs for
+ * example).
+ */
+
+#define KRAIT_EVENT (1 << 16)
+#define VENUM_EVENT (2 << 16)
+#define KRAIT_EVENT_MASK (KRAIT_EVENT | VENUM_EVENT)
+#define PMRESRn_EN BIT(31)
+
+static u32 krait_read_pmresrn(int n)
+{
+ u32 val;
+
+ switch (n) {
+ case 0:
+ asm volatile("mrc p15, 1, %0, c9, c15, 0" : "=r" (val));
+ break;
+ case 1:
+ asm volatile("mrc p15, 1, %0, c9, c15, 1" : "=r" (val));
+ break;
+ case 2:
+ asm volatile("mrc p15, 1, %0, c9, c15, 2" : "=r" (val));
+ break;
+ default:
+ BUG(); /* Should be validated in krait_pmu_get_event_idx() */
+ }
+
+ return val;
+}
+
+static void krait_write_pmresrn(int n, u32 val)
+{
+ switch (n) {
+ case 0:
+ asm volatile("mcr p15, 1, %0, c9, c15, 0" : : "r" (val));
+ break;
+ case 1:
+ asm volatile("mcr p15, 1, %0, c9, c15, 1" : : "r" (val));
+ break;
+ case 2:
+ asm volatile("mcr p15, 1, %0, c9, c15, 2" : : "r" (val));
+ break;
+ default:
+ BUG(); /* Should be validated in krait_pmu_get_event_idx() */
+ }
+}
+
+static u32 krait_read_vpmresr0(void)
+{
+ u32 val;
+ asm volatile("mrc p10, 7, %0, c11, c0, 0" : "=r" (val));
+ return val;
+}
+
+static void krait_write_vpmresr0(u32 val)
+{
+ asm volatile("mcr p10, 7, %0, c11, c0, 0" : : "r" (val));
+}
+
+static void krait_pre_vpmresr0(u32 *venum_orig_val, u32 *fp_orig_val)
+{
+ u32 venum_new_val;
+ u32 fp_new_val;
+
+ BUG_ON(preemptible());
+ /* CPACR Enable CP10 and CP11 access */
+ *venum_orig_val = get_copro_access();
+ venum_new_val = *venum_orig_val | CPACC_SVC(10) | CPACC_SVC(11);
+ set_copro_access(venum_new_val);
+
+ /* Enable FPEXC */
+ *fp_orig_val = fmrx(FPEXC);
+ fp_new_val = *fp_orig_val | FPEXC_EN;
+ fmxr(FPEXC, fp_new_val);
+}
+
+static void krait_post_vpmresr0(u32 venum_orig_val, u32 fp_orig_val)
+{
+ BUG_ON(preemptible());
+ /* Restore FPEXC */
+ fmxr(FPEXC, fp_orig_val);
+ isb();
+ /* Restore CPACR */
+ set_copro_access(venum_orig_val);
+}
+
+static u32 krait_get_pmresrn_event(unsigned int region)
+{
+ static const u32 pmresrn_table[] = { KRAIT_PMRESR0_GROUP0,
+ KRAIT_PMRESR1_GROUP0,
+ KRAIT_PMRESR2_GROUP0 };
+ return pmresrn_table[region];
+}
+
+static void krait_evt_setup(int idx, u32 config_base)
+{
+ u32 val;
+ u32 mask;
+ u32 vval, fval;
+ unsigned int region;
+ unsigned int group;
+ unsigned int code;
+ unsigned int group_shift;
+ bool venum_event;
+
+ venum_event = !!(config_base & VENUM_EVENT);
+ region = (config_base >> 12) & 0xf;
+ code = (config_base >> 4) & 0xff;
+ group = (config_base >> 0) & 0xf;
+
+ group_shift = group * 8;
+ mask = 0xff << group_shift;
+
+ /* Configure evtsel for the region and group */
+ if (venum_event)
+ val = KRAIT_VPMRESR0_GROUP0;
+ else
+ val = krait_get_pmresrn_event(region);
+ val += group;
+ /* Mix in mode-exclusion bits */
+ val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
+ armv7_pmnc_write_evtsel(idx, val);
+
+ asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
+
+ if (venum_event) {
+ krait_pre_vpmresr0(&vval, &fval);
+ val = krait_read_vpmresr0();
+ val &= ~mask;
+ val |= code << group_shift;
+ val |= PMRESRn_EN;
+ krait_write_vpmresr0(val);
+ krait_post_vpmresr0(vval, fval);
+ } else {
+ val = krait_read_pmresrn(region);
+ val &= ~mask;
+ val |= code << group_shift;
+ val |= PMRESRn_EN;
+ krait_write_pmresrn(region, val);
+ }
+}
+
+static u32 krait_clear_pmresrn_group(u32 val, int group)
+{
+ u32 mask;
+ int group_shift;
+
+ group_shift = group * 8;
+ mask = 0xff << group_shift;
+ val &= ~mask;
+
+ /* Don't clear enable bit if entire region isn't disabled */
+ if (val & ~PMRESRn_EN)
+ return val |= PMRESRn_EN;
+
+ return 0;
+}
+
+static void krait_clearpmu(u32 config_base)
+{
+ u32 val;
+ u32 vval, fval;
+ unsigned int region;
+ unsigned int group;
+ bool venum_event;
+
+ venum_event = !!(config_base & VENUM_EVENT);
+ region = (config_base >> 12) & 0xf;
+ group = (config_base >> 0) & 0xf;
+
+ if (venum_event) {
+ krait_pre_vpmresr0(&vval, &fval);
+ val = krait_read_vpmresr0();
+ val = krait_clear_pmresrn_group(val, group);
+ krait_write_vpmresr0(val);
+ krait_post_vpmresr0(vval, fval);
+ } else {
+ val = krait_read_pmresrn(region);
+ val = krait_clear_pmresrn_group(val, group);
+ krait_write_pmresrn(region, val);
+ }
+}
+
+static void krait_pmu_disable_event(struct perf_event *event)
+{
+ unsigned long flags;
+ struct hw_perf_event *hwc = &event->hw;
+ int idx = hwc->idx;
+ struct pmu_hw_events *events = cpu_pmu->get_hw_events();
+
+ /* Disable counter and interrupt */
+ raw_spin_lock_irqsave(&events->pmu_lock, flags);
+
+ /* Disable counter */
+ armv7_pmnc_disable_counter(idx);
+
+ /*
+ * Clear pmresr code (if destined for PMNx counters)
+ */
+ if (hwc->config_base & KRAIT_EVENT_MASK)
+ krait_clearpmu(hwc->config_base);
+
+ /* Disable interrupt for this counter */
+ armv7_pmnc_disable_intens(idx);
+
+ raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
+}
+
+static void krait_pmu_enable_event(struct perf_event *event)
+{
+ unsigned long flags;
+ struct hw_perf_event *hwc = &event->hw;
+ int idx = hwc->idx;
+ struct pmu_hw_events *events = cpu_pmu->get_hw_events();
+
+ /*
+ * Enable counter and interrupt, and set the counter to count
+ * the event that we're interested in.
+ */
+ raw_spin_lock_irqsave(&events->pmu_lock, flags);
+
+ /* Disable counter */
+ armv7_pmnc_disable_counter(idx);
+
+ /*
+ * Set event (if destined for PMNx counters)
+ * We set the event for the cycle counter because we
+ * have the ability to perform event filtering.
+ */
+ if (hwc->config_base & KRAIT_EVENT_MASK)
+ krait_evt_setup(idx, hwc->config_base);
+ else
+ armv7_pmnc_write_evtsel(idx, hwc->config_base);
+
+ /* Enable interrupt for this counter */
+ armv7_pmnc_enable_intens(idx);
+
+ /* Enable counter */
+ armv7_pmnc_enable_counter(idx);
+
+ raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
+}
+
+static void krait_pmu_reset(void *info)
+{
+ u32 vval, fval;
+
+ armv7pmu_reset(info);
+
+ /* Clear all pmresrs */
+ krait_write_pmresrn(0, 0);
+ krait_write_pmresrn(1, 0);
+ krait_write_pmresrn(2, 0);
+
+ krait_pre_vpmresr0(&vval, &fval);
+ krait_write_vpmresr0(0);
+ krait_post_vpmresr0(vval, fval);
+}
+
+static int krait_event_to_bit(struct perf_event *event, unsigned int region,
+ unsigned int group)
+{
+ int bit;
+ struct hw_perf_event *hwc = &event->hw;
+ struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
+
+ if (hwc->config_base & VENUM_EVENT)
+ bit = KRAIT_VPMRESR0_GROUP0;
+ else
+ bit = krait_get_pmresrn_event(region);
+ bit -= krait_get_pmresrn_event(0);
+ bit += group;
+ /*
+ * Lower bits are reserved for use by the counters (see
+ * armv7pmu_get_event_idx() for more info)
+ */
+ bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
+
+ return bit;
+}
+
+/*
+ * We check for column exclusion constraints here.
+ * Two events cant use the same group within a pmresr register.
+ */
+static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc,
+ struct perf_event *event)
+{
+ int idx;
+ int bit = -1;
+ unsigned int prefix;
+ unsigned int region;
+ unsigned int code;
+ unsigned int group;
+ bool krait_event;
+ struct hw_perf_event *hwc = &event->hw;
+
+ region = (hwc->config_base >> 12) & 0xf;
+ code = (hwc->config_base >> 4) & 0xff;
+ group = (hwc->config_base >> 0) & 0xf;
+ krait_event = !!(hwc->config_base & KRAIT_EVENT_MASK);
+
+ if (krait_event) {
+ /* Ignore invalid events */
+ if (group > 3 || region > 2)
+ return -EINVAL;
+ prefix = hwc->config_base & KRAIT_EVENT_MASK;
+ if (prefix != KRAIT_EVENT && prefix != VENUM_EVENT)
+ return -EINVAL;
+ if (prefix == VENUM_EVENT && (code & 0xe0))
+ return -EINVAL;
+
+ bit = krait_event_to_bit(event, region, group);
+ if (test_and_set_bit(bit, cpuc->used_mask))
+ return -EAGAIN;
+ }
+
+ idx = armv7pmu_get_event_idx(cpuc, event);
+ if (idx < 0 && bit >= 0)
+ clear_bit(bit, cpuc->used_mask);
+
+ return idx;
+}
+
+static void krait_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
+ struct perf_event *event)
+{
+ int bit;
+ struct hw_perf_event *hwc = &event->hw;
+ unsigned int region;
+ unsigned int group;
+ bool krait_event;
+
+ region = (hwc->config_base >> 12) & 0xf;
+ group = (hwc->config_base >> 0) & 0xf;
+ krait_event = !!(hwc->config_base & KRAIT_EVENT_MASK);
+
+ if (krait_event) {
+ bit = krait_event_to_bit(event, region, group);
+ clear_bit(bit, cpuc->used_mask);
+ }
+}
+
+static int krait_pmu_init(struct arm_pmu *cpu_pmu)
+{
+ armv7pmu_init(cpu_pmu);
+ cpu_pmu->name = "ARMv7 Krait";
+ /* Some early versions of Krait don't support PC write events */
+ if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node,
+ "qcom,no-pc-write"))
+ cpu_pmu->map_event = krait_map_event_no_branch;
+ else
+ cpu_pmu->map_event = krait_map_event;
+ cpu_pmu->num_events = armv7_read_num_pmnc_events();
+ cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
+ cpu_pmu->reset = krait_pmu_reset;
+ cpu_pmu->enable = krait_pmu_enable_event;
+ cpu_pmu->disable = krait_pmu_disable_event;
+ cpu_pmu->get_event_idx = krait_pmu_get_event_idx;
+ cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx;
+ return 0;
+}
#else
static inline int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
{
@@ -1308,4 +2022,19 @@ static inline int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
{
return -ENODEV;
}
+
+static inline int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
+{
+ return -ENODEV;
+}
+
+static inline int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
+{
+ return -ENODEV;
+}
+
+static inline int krait_pmu_init(struct arm_pmu *cpu_pmu)
+{
+ return -ENODEV;
+}
#endif /* CONFIG_CPU_V7 */
diff --git a/arch/arm/kernel/perf_regs.c b/arch/arm/kernel/perf_regs.c
new file mode 100644
index 00000000000..6e4379c67cb
--- /dev/null
+++ b/arch/arm/kernel/perf_regs.c
@@ -0,0 +1,30 @@
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/perf_event.h>
+#include <linux/bug.h>
+#include <asm/perf_regs.h>
+#include <asm/ptrace.h>
+
+u64 perf_reg_value(struct pt_regs *regs, int idx)
+{
+ if (WARN_ON_ONCE((u32)idx >= PERF_REG_ARM_MAX))
+ return 0;
+
+ return regs->uregs[idx];
+}
+
+#define REG_RESERVED (~((1ULL << PERF_REG_ARM_MAX) - 1))
+
+int perf_reg_validate(u64 mask)
+{
+ if (!mask || mask & REG_RESERVED)
+ return -EINVAL;
+
+ return 0;
+}
+
+u64 perf_reg_abi(struct task_struct *task)
+{
+ return PERF_SAMPLE_REGS_ABI_32;
+}
diff --git a/arch/arm/kernel/pj4-cp0.c b/arch/arm/kernel/pj4-cp0.c
index 679cf4d18c0..8153e36b249 100644
--- a/arch/arm/kernel/pj4-cp0.c
+++ b/arch/arm/kernel/pj4-cp0.c
@@ -17,6 +17,7 @@
#include <linux/init.h>
#include <linux/io.h>
#include <asm/thread_notify.h>
+#include <asm/cputype.h>
static int iwmmxt_do(struct notifier_block *self, unsigned long cmd, void *t)
{
@@ -44,7 +45,7 @@ static int iwmmxt_do(struct notifier_block *self, unsigned long cmd, void *t)
return NOTIFY_DONE;
}
-static struct notifier_block iwmmxt_notifier_block = {
+static struct notifier_block __maybe_unused iwmmxt_notifier_block = {
.notifier_call = iwmmxt_do,
};
@@ -71,6 +72,33 @@ static void __init pj4_cp_access_write(u32 value)
: "=r" (temp) : "r" (value));
}
+static int __init pj4_get_iwmmxt_version(void)
+{
+ u32 cp_access, wcid;
+
+ cp_access = pj4_cp_access_read();
+ pj4_cp_access_write(cp_access | 0xf);
+
+ /* check if coprocessor 0 and 1 are available */
+ if ((pj4_cp_access_read() & 0xf) != 0xf) {
+ pj4_cp_access_write(cp_access);
+ return -ENODEV;
+ }
+
+ /* read iWMMXt coprocessor id register p1, c0 */
+ __asm__ __volatile__ ("mrc p1, 0, %0, c0, c0, 0\n" : "=r" (wcid));
+
+ pj4_cp_access_write(cp_access);
+
+ /* iWMMXt v1 */
+ if ((wcid & 0xffffff00) == 0x56051000)
+ return 1;
+ /* iWMMXt v2 */
+ if ((wcid & 0xffffff00) == 0x56052000)
+ return 2;
+
+ return -EINVAL;
+}
/*
* Disable CP0/CP1 on boot, and let call_fpe() and the iWMMXt lazy
@@ -78,14 +106,26 @@ static void __init pj4_cp_access_write(u32 value)
*/
static int __init pj4_cp0_init(void)
{
- u32 cp_access;
+ u32 __maybe_unused cp_access;
+ int vers;
+
+ if (!cpu_is_pj4())
+ return 0;
+
+ vers = pj4_get_iwmmxt_version();
+ if (vers < 0)
+ return 0;
+#ifndef CONFIG_IWMMXT
+ pr_info("PJ4 iWMMXt coprocessor detected, but kernel support is missing.\n");
+#else
cp_access = pj4_cp_access_read() & ~0xf;
pj4_cp_access_write(cp_access);
- printk(KERN_INFO "PJ4 iWMMXt coprocessor enabled.\n");
+ pr_info("PJ4 iWMMXt v%d coprocessor enabled.\n", vers);
elf_hwcap |= HWCAP_IWMMXT;
thread_register_notifier(&iwmmxt_notifier_block);
+#endif
return 0;
}
diff --git a/arch/arm/kernel/probes-arm.c b/arch/arm/kernel/probes-arm.c
new file mode 100644
index 00000000000..8eaef81d834
--- /dev/null
+++ b/arch/arm/kernel/probes-arm.c
@@ -0,0 +1,734 @@
+/*
+ * arch/arm/kernel/probes-arm.c
+ *
+ * Some code moved here from arch/arm/kernel/kprobes-arm.c
+ *
+ * Copyright (C) 2006, 2007 Motorola Inc.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ */
+
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/stddef.h>
+#include <linux/ptrace.h>
+
+#include "probes.h"
+#include "probes-arm.h"
+
+#define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
+
+#define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
+
+/*
+ * To avoid the complications of mimicing single-stepping on a
+ * processor without a Next-PC or a single-step mode, and to
+ * avoid having to deal with the side-effects of boosting, we
+ * simulate or emulate (almost) all ARM instructions.
+ *
+ * "Simulation" is where the instruction's behavior is duplicated in
+ * C code. "Emulation" is where the original instruction is rewritten
+ * and executed, often by altering its registers.
+ *
+ * By having all behavior of the kprobe'd instruction completed before
+ * returning from the kprobe_handler(), all locks (scheduler and
+ * interrupt) can safely be released. There is no need for secondary
+ * breakpoints, no race with MP or preemptable kernels, nor having to
+ * clean up resources counts at a later time impacting overall system
+ * performance. By rewriting the instruction, only the minimum registers
+ * need to be loaded and saved back optimizing performance.
+ *
+ * Calling the insnslot_*_rwflags version of a function doesn't hurt
+ * anything even when the CPSR flags aren't updated by the
+ * instruction. It's just a little slower in return for saving
+ * a little space by not having a duplicate function that doesn't
+ * update the flags. (The same optimization can be said for
+ * instructions that do or don't perform register writeback)
+ * Also, instructions can either read the flags, only write the
+ * flags, or read and write the flags. To save combinations
+ * rather than for sheer performance, flag functions just assume
+ * read and write of flags.
+ */
+
+void __kprobes simulate_bbl(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
+{
+ long iaddr = (long) regs->ARM_pc - 4;
+ int disp = branch_displacement(insn);
+
+ if (insn & (1 << 24))
+ regs->ARM_lr = iaddr + 4;
+
+ regs->ARM_pc = iaddr + 8 + disp;
+}
+
+void __kprobes simulate_blx1(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
+{
+ long iaddr = (long) regs->ARM_pc - 4;
+ int disp = branch_displacement(insn);
+
+ regs->ARM_lr = iaddr + 4;
+ regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
+ regs->ARM_cpsr |= PSR_T_BIT;
+}
+
+void __kprobes simulate_blx2bx(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
+{
+ int rm = insn & 0xf;
+ long rmv = regs->uregs[rm];
+
+ if (insn & (1 << 5))
+ regs->ARM_lr = (long) regs->ARM_pc;
+
+ regs->ARM_pc = rmv & ~0x1;
+ regs->ARM_cpsr &= ~PSR_T_BIT;
+ if (rmv & 0x1)
+ regs->ARM_cpsr |= PSR_T_BIT;
+}
+
+void __kprobes simulate_mrs(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
+{
+ int rd = (insn >> 12) & 0xf;
+ unsigned long mask = 0xf8ff03df; /* Mask out execution state */
+ regs->uregs[rd] = regs->ARM_cpsr & mask;
+}
+
+void __kprobes simulate_mov_ipsp(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
+{
+ regs->uregs[12] = regs->uregs[13];
+}
+
+/*
+ * For the instruction masking and comparisons in all the "space_*"
+ * functions below, Do _not_ rearrange the order of tests unless
+ * you're very, very sure of what you are doing. For the sake of
+ * efficiency, the masks for some tests sometimes assume other test
+ * have been done prior to them so the number of patterns to test
+ * for an instruction set can be as broad as possible to reduce the
+ * number of tests needed.
+ */
+
+static const union decode_item arm_1111_table[] = {
+ /* Unconditional instructions */
+
+ /* memory hint 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx */
+ /* PLDI (immediate) 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx */
+ /* PLDW (immediate) 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx */
+ /* PLD (immediate) 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xfe300000, 0xf4100000, PROBES_PRELOAD_IMM),
+
+ /* memory hint 1111 0110 x001 xxxx xxxx xxxx xxx0 xxxx */
+ /* PLDI (register) 1111 0110 x101 xxxx xxxx xxxx xxx0 xxxx */
+ /* PLDW (register) 1111 0111 x001 xxxx xxxx xxxx xxx0 xxxx */
+ /* PLD (register) 1111 0111 x101 xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_SIMULATE (0xfe300010, 0xf6100000, PROBES_PRELOAD_REG),
+
+ /* BLX (immediate) 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xfe000000, 0xfa000000, PROBES_BRANCH_IMM),
+
+ /* CPS 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */
+ /* SETEND 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
+ /* SRS 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ /* RFE 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
+
+ /* Coprocessor instructions... */
+ /* MCRR2 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx */
+ /* MRRC2 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* LDC2 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
+ /* STC2 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
+ /* CDP2 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
+ /* MCR2 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
+ /* MRC2 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0001_0xx0____0xxx_table[] = {
+ /* Miscellaneous instructions */
+
+ /* MRS cpsr cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */
+ DECODE_SIMULATEX(0x0ff000f0, 0x01000000, PROBES_MRS,
+ REGS(0, NOPC, 0, 0, 0)),
+
+ /* BX cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_SIMULATE (0x0ff000f0, 0x01200010, PROBES_BRANCH_REG),
+
+ /* BLX (register) cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
+ DECODE_SIMULATEX(0x0ff000f0, 0x01200030, PROBES_BRANCH_REG,
+ REGS(0, 0, 0, 0, NOPC)),
+
+ /* CLZ cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x01600010, PROBES_CLZ,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* QADD cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx */
+ /* QSUB cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx */
+ /* QDADD cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx */
+ /* QDSUB cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx */
+ DECODE_EMULATEX (0x0f9000f0, 0x01000050, PROBES_SATURATING_ARITHMETIC,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* BXJ cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
+ /* MSR cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
+ /* MRS spsr cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */
+ /* BKPT 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
+ /* SMC cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0001_0xx0____1xx0_table[] = {
+ /* Halfword multiply and multiply-accumulate */
+
+ /* SMLALxy cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
+ DECODE_EMULATEX (0x0ff00090, 0x01400080, PROBES_MUL1,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* SMULWy cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
+ DECODE_OR (0x0ff000b0, 0x012000a0),
+ /* SMULxy cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
+ DECODE_EMULATEX (0x0ff00090, 0x01600080, PROBES_MUL2,
+ REGS(NOPC, 0, NOPC, 0, NOPC)),
+
+ /* SMLAxy cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx */
+ DECODE_OR (0x0ff00090, 0x01000080),
+ /* SMLAWy cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx */
+ DECODE_EMULATEX (0x0ff000b0, 0x01200080, PROBES_MUL2,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0000_____1001_table[] = {
+ /* Multiply and multiply-accumulate */
+
+ /* MUL cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx */
+ /* MULS cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0fe000f0, 0x00000090, PROBES_MUL2,
+ REGS(NOPC, 0, NOPC, 0, NOPC)),
+
+ /* MLA cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx */
+ /* MLAS cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_OR (0x0fe000f0, 0x00200090),
+ /* MLS cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x00600090, PROBES_MUL2,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* UMAAL cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_OR (0x0ff000f0, 0x00400090),
+ /* UMULL cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx */
+ /* UMULLS cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx */
+ /* UMLAL cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx */
+ /* UMLALS cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx */
+ /* SMULL cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx */
+ /* SMULLS cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx */
+ /* SMLAL cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx */
+ /* SMLALS cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0f8000f0, 0x00800090, PROBES_MUL1,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0001_____1001_table[] = {
+ /* Synchronization primitives */
+
+#if __LINUX_ARM_ARCH__ < 6
+ /* Deprecated on ARMv6 and may be UNDEFINED on v7 */
+ /* SMP/SWPB cccc 0001 0x00 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0fb000f0, 0x01000090, PROBES_SWP,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+#endif
+ /* LDREX/STREX{,D,B,H} cccc 0001 1xxx xxxx xxxx xxxx 1001 xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_000x_____1xx1_table[] = {
+ /* Extra load/store instructions */
+
+ /* STRHT cccc 0000 xx10 xxxx xxxx xxxx 1011 xxxx */
+ /* ??? cccc 0000 xx10 xxxx xxxx xxxx 11x1 xxxx */
+ /* LDRHT cccc 0000 xx11 xxxx xxxx xxxx 1011 xxxx */
+ /* LDRSBT cccc 0000 xx11 xxxx xxxx xxxx 1101 xxxx */
+ /* LDRSHT cccc 0000 xx11 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_REJECT (0x0f200090, 0x00200090),
+
+ /* LDRD/STRD lr,pc,{... cccc 000x x0x0 xxxx 111x xxxx 1101 xxxx */
+ DECODE_REJECT (0x0e10e0d0, 0x0000e0d0),
+
+ /* LDRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1101 xxxx */
+ /* STRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e5000d0, 0x000000d0, PROBES_LDRSTRD,
+ REGS(NOPCWB, NOPCX, 0, 0, NOPC)),
+
+ /* LDRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1101 xxxx */
+ /* STRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e5000d0, 0x004000d0, PROBES_LDRSTRD,
+ REGS(NOPCWB, NOPCX, 0, 0, 0)),
+
+ /* STRH (register) cccc 000x x0x0 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0e5000f0, 0x000000b0, PROBES_STORE_EXTRA,
+ REGS(NOPCWB, NOPC, 0, 0, NOPC)),
+
+ /* LDRH (register) cccc 000x x0x1 xxxx xxxx xxxx 1011 xxxx */
+ /* LDRSB (register) cccc 000x x0x1 xxxx xxxx xxxx 1101 xxxx */
+ /* LDRSH (register) cccc 000x x0x1 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e500090, 0x00100090, PROBES_LOAD_EXTRA,
+ REGS(NOPCWB, NOPC, 0, 0, NOPC)),
+
+ /* STRH (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0e5000f0, 0x004000b0, PROBES_STORE_EXTRA,
+ REGS(NOPCWB, NOPC, 0, 0, 0)),
+
+ /* LDRH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1011 xxxx */
+ /* LDRSB (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1101 xxxx */
+ /* LDRSH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e500090, 0x00500090, PROBES_LOAD_EXTRA,
+ REGS(NOPCWB, NOPC, 0, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_000x_table[] = {
+ /* Data-processing (register) */
+
+ /* <op>S PC, ... cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_REJECT (0x0e10f000, 0x0010f000),
+
+ /* MOV IP, SP 1110 0001 1010 0000 1100 0000 0000 1101 */
+ DECODE_SIMULATE (0xffffffff, 0xe1a0c00d, PROBES_MOV_IP_SP),
+
+ /* TST (register) cccc 0001 0001 xxxx xxxx xxxx xxx0 xxxx */
+ /* TEQ (register) cccc 0001 0011 xxxx xxxx xxxx xxx0 xxxx */
+ /* CMP (register) cccc 0001 0101 xxxx xxxx xxxx xxx0 xxxx */
+ /* CMN (register) cccc 0001 0111 xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_EMULATEX (0x0f900010, 0x01100000, PROBES_DATA_PROCESSING_REG,
+ REGS(ANY, 0, 0, 0, ANY)),
+
+ /* MOV (register) cccc 0001 101x xxxx xxxx xxxx xxx0 xxxx */
+ /* MVN (register) cccc 0001 111x xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_EMULATEX (0x0fa00010, 0x01a00000, PROBES_DATA_PROCESSING_REG,
+ REGS(0, ANY, 0, 0, ANY)),
+
+ /* AND (register) cccc 0000 000x xxxx xxxx xxxx xxx0 xxxx */
+ /* EOR (register) cccc 0000 001x xxxx xxxx xxxx xxx0 xxxx */
+ /* SUB (register) cccc 0000 010x xxxx xxxx xxxx xxx0 xxxx */
+ /* RSB (register) cccc 0000 011x xxxx xxxx xxxx xxx0 xxxx */
+ /* ADD (register) cccc 0000 100x xxxx xxxx xxxx xxx0 xxxx */
+ /* ADC (register) cccc 0000 101x xxxx xxxx xxxx xxx0 xxxx */
+ /* SBC (register) cccc 0000 110x xxxx xxxx xxxx xxx0 xxxx */
+ /* RSC (register) cccc 0000 111x xxxx xxxx xxxx xxx0 xxxx */
+ /* ORR (register) cccc 0001 100x xxxx xxxx xxxx xxx0 xxxx */
+ /* BIC (register) cccc 0001 110x xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_EMULATEX (0x0e000010, 0x00000000, PROBES_DATA_PROCESSING_REG,
+ REGS(ANY, ANY, 0, 0, ANY)),
+
+ /* TST (reg-shift reg) cccc 0001 0001 xxxx xxxx xxxx 0xx1 xxxx */
+ /* TEQ (reg-shift reg) cccc 0001 0011 xxxx xxxx xxxx 0xx1 xxxx */
+ /* CMP (reg-shift reg) cccc 0001 0101 xxxx xxxx xxxx 0xx1 xxxx */
+ /* CMN (reg-shift reg) cccc 0001 0111 xxxx xxxx xxxx 0xx1 xxxx */
+ DECODE_EMULATEX (0x0f900090, 0x01100010, PROBES_DATA_PROCESSING_REG,
+ REGS(NOPC, 0, NOPC, 0, NOPC)),
+
+ /* MOV (reg-shift reg) cccc 0001 101x xxxx xxxx xxxx 0xx1 xxxx */
+ /* MVN (reg-shift reg) cccc 0001 111x xxxx xxxx xxxx 0xx1 xxxx */
+ DECODE_EMULATEX (0x0fa00090, 0x01a00010, PROBES_DATA_PROCESSING_REG,
+ REGS(0, NOPC, NOPC, 0, NOPC)),
+
+ /* AND (reg-shift reg) cccc 0000 000x xxxx xxxx xxxx 0xx1 xxxx */
+ /* EOR (reg-shift reg) cccc 0000 001x xxxx xxxx xxxx 0xx1 xxxx */
+ /* SUB (reg-shift reg) cccc 0000 010x xxxx xxxx xxxx 0xx1 xxxx */
+ /* RSB (reg-shift reg) cccc 0000 011x xxxx xxxx xxxx 0xx1 xxxx */
+ /* ADD (reg-shift reg) cccc 0000 100x xxxx xxxx xxxx 0xx1 xxxx */
+ /* ADC (reg-shift reg) cccc 0000 101x xxxx xxxx xxxx 0xx1 xxxx */
+ /* SBC (reg-shift reg) cccc 0000 110x xxxx xxxx xxxx 0xx1 xxxx */
+ /* RSC (reg-shift reg) cccc 0000 111x xxxx xxxx xxxx 0xx1 xxxx */
+ /* ORR (reg-shift reg) cccc 0001 100x xxxx xxxx xxxx 0xx1 xxxx */
+ /* BIC (reg-shift reg) cccc 0001 110x xxxx xxxx xxxx 0xx1 xxxx */
+ DECODE_EMULATEX (0x0e000090, 0x00000010, PROBES_DATA_PROCESSING_REG,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_001x_table[] = {
+ /* Data-processing (immediate) */
+
+ /* MOVW cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */
+ /* MOVT cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0fb00000, 0x03000000, PROBES_DATA_PROCESSING_IMM,
+ REGS(0, NOPC, 0, 0, 0)),
+
+ /* YIELD cccc 0011 0010 0000 xxxx xxxx 0000 0001 */
+ DECODE_OR (0x0fff00ff, 0x03200001),
+ /* SEV cccc 0011 0010 0000 xxxx xxxx 0000 0100 */
+ DECODE_EMULATE (0x0fff00ff, 0x03200004, PROBES_EMULATE_NONE),
+ /* NOP cccc 0011 0010 0000 xxxx xxxx 0000 0000 */
+ /* WFE cccc 0011 0010 0000 xxxx xxxx 0000 0010 */
+ /* WFI cccc 0011 0010 0000 xxxx xxxx 0000 0011 */
+ DECODE_SIMULATE (0x0fff00fc, 0x03200000, PROBES_SIMULATE_NOP),
+ /* DBG cccc 0011 0010 0000 xxxx xxxx ffff xxxx */
+ /* unallocated hints cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */
+ /* MSR (immediate) cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0fb00000, 0x03200000),
+
+ /* <op>S PC, ... cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_REJECT (0x0e10f000, 0x0210f000),
+
+ /* TST (immediate) cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx */
+ /* TEQ (immediate) cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx */
+ /* CMP (immediate) cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* CMN (immediate) cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0f900000, 0x03100000, PROBES_DATA_PROCESSING_IMM,
+ REGS(ANY, 0, 0, 0, 0)),
+
+ /* MOV (immediate) cccc 0011 101x xxxx xxxx xxxx xxxx xxxx */
+ /* MVN (immediate) cccc 0011 111x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0fa00000, 0x03a00000, PROBES_DATA_PROCESSING_IMM,
+ REGS(0, ANY, 0, 0, 0)),
+
+ /* AND (immediate) cccc 0010 000x xxxx xxxx xxxx xxxx xxxx */
+ /* EOR (immediate) cccc 0010 001x xxxx xxxx xxxx xxxx xxxx */
+ /* SUB (immediate) cccc 0010 010x xxxx xxxx xxxx xxxx xxxx */
+ /* RSB (immediate) cccc 0010 011x xxxx xxxx xxxx xxxx xxxx */
+ /* ADD (immediate) cccc 0010 100x xxxx xxxx xxxx xxxx xxxx */
+ /* ADC (immediate) cccc 0010 101x xxxx xxxx xxxx xxxx xxxx */
+ /* SBC (immediate) cccc 0010 110x xxxx xxxx xxxx xxxx xxxx */
+ /* RSC (immediate) cccc 0010 111x xxxx xxxx xxxx xxxx xxxx */
+ /* ORR (immediate) cccc 0011 100x xxxx xxxx xxxx xxxx xxxx */
+ /* BIC (immediate) cccc 0011 110x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e000000, 0x02000000, PROBES_DATA_PROCESSING_IMM,
+ REGS(ANY, ANY, 0, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0110_____xxx1_table[] = {
+ /* Media instructions */
+
+ /* SEL cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x068000b0, PROBES_SATURATE,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* SSAT cccc 0110 101x xxxx xxxx xxxx xx01 xxxx */
+ /* USAT cccc 0110 111x xxxx xxxx xxxx xx01 xxxx */
+ DECODE_OR(0x0fa00030, 0x06a00010),
+ /* SSAT16 cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx */
+ /* USAT16 cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx */
+ DECODE_EMULATEX (0x0fb000f0, 0x06a00030, PROBES_SATURATE,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* REV cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
+ /* REV16 cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
+ /* RBIT cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */
+ /* REVSH cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0fb00070, 0x06b00030, PROBES_REV,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* ??? cccc 0110 0x00 xxxx xxxx xxxx xxx1 xxxx */
+ DECODE_REJECT (0x0fb00010, 0x06000010),
+ /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1011 xxxx */
+ DECODE_REJECT (0x0f8000f0, 0x060000b0),
+ /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1101 xxxx */
+ DECODE_REJECT (0x0f8000f0, 0x060000d0),
+ /* SADD16 cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx */
+ /* SADDSUBX cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx */
+ /* SSUBADDX cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx */
+ /* SSUB16 cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx */
+ /* SADD8 cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx */
+ /* SSUB8 cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx */
+ /* QADD16 cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx */
+ /* QADDSUBX cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx */
+ /* QSUBADDX cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx */
+ /* QSUB16 cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx */
+ /* QADD8 cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx */
+ /* QSUB8 cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx */
+ /* SHADD16 cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx */
+ /* SHADDSUBX cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx */
+ /* SHSUBADDX cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx */
+ /* SHSUB16 cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx */
+ /* SHADD8 cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx */
+ /* SHSUB8 cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx */
+ /* UADD16 cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx */
+ /* UADDSUBX cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx */
+ /* USUBADDX cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx */
+ /* USUB16 cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx */
+ /* UADD8 cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx */
+ /* USUB8 cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx */
+ /* UQADD16 cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx */
+ /* UQADDSUBX cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx */
+ /* UQSUBADDX cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx */
+ /* UQSUB16 cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx */
+ /* UQADD8 cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx */
+ /* UQSUB8 cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx */
+ /* UHADD16 cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx */
+ /* UHADDSUBX cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx */
+ /* UHSUBADDX cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx */
+ /* UHSUB16 cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx */
+ /* UHADD8 cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx */
+ /* UHSUB8 cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0f800010, 0x06000010, PROBES_MMI,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* PKHBT cccc 0110 1000 xxxx xxxx xxxx x001 xxxx */
+ /* PKHTB cccc 0110 1000 xxxx xxxx xxxx x101 xxxx */
+ DECODE_EMULATEX (0x0ff00030, 0x06800010, PROBES_PACK,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* ??? cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx */
+ /* ??? cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx */
+ DECODE_REJECT (0x0fb000f0, 0x06900070),
+
+ /* SXTB16 cccc 0110 1000 1111 xxxx xxxx 0111 xxxx */
+ /* SXTB cccc 0110 1010 1111 xxxx xxxx 0111 xxxx */
+ /* SXTH cccc 0110 1011 1111 xxxx xxxx 0111 xxxx */
+ /* UXTB16 cccc 0110 1100 1111 xxxx xxxx 0111 xxxx */
+ /* UXTB cccc 0110 1110 1111 xxxx xxxx 0111 xxxx */
+ /* UXTH cccc 0110 1111 1111 xxxx xxxx 0111 xxxx */
+ DECODE_EMULATEX (0x0f8f00f0, 0x068f0070, PROBES_EXTEND,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* SXTAB16 cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx */
+ /* SXTAB cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx */
+ /* SXTAH cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx */
+ /* UXTAB16 cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx */
+ /* UXTAB cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx */
+ /* UXTAH cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx */
+ DECODE_EMULATEX (0x0f8000f0, 0x06800070, PROBES_EXTEND_ADD,
+ REGS(NOPCX, NOPC, 0, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0111_____xxx1_table[] = {
+ /* Media instructions */
+
+ /* UNDEFINED cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_REJECT (0x0ff000f0, 0x07f000f0),
+
+ /* SMLALD cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
+ /* SMLSLD cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
+ DECODE_EMULATEX (0x0ff00090, 0x07400010, PROBES_MUL_ADD_LONG,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* SMUAD cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx */
+ /* SMUSD cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx */
+ DECODE_OR (0x0ff0f090, 0x0700f010),
+ /* SMMUL cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx */
+ DECODE_OR (0x0ff0f0d0, 0x0750f010),
+ /* USAD8 cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx */
+ DECODE_EMULATEX (0x0ff0f0f0, 0x0780f010, PROBES_MUL_ADD,
+ REGS(NOPC, 0, NOPC, 0, NOPC)),
+
+ /* SMLAD cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx */
+ /* SMLSD cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx */
+ DECODE_OR (0x0ff00090, 0x07000010),
+ /* SMMLA cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx */
+ DECODE_OR (0x0ff000d0, 0x07500010),
+ /* USADA8 cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x07800010, PROBES_MUL_ADD,
+ REGS(NOPC, NOPCX, NOPC, 0, NOPC)),
+
+ /* SMMLS cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx */
+ DECODE_EMULATEX (0x0ff000d0, 0x075000d0, PROBES_MUL_ADD,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* SBFX cccc 0111 101x xxxx xxxx xxxx x101 xxxx */
+ /* UBFX cccc 0111 111x xxxx xxxx xxxx x101 xxxx */
+ DECODE_EMULATEX (0x0fa00070, 0x07a00050, PROBES_BITFIELD,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* BFC cccc 0111 110x xxxx xxxx xxxx x001 1111 */
+ DECODE_EMULATEX (0x0fe0007f, 0x07c0001f, PROBES_BITFIELD,
+ REGS(0, NOPC, 0, 0, 0)),
+
+ /* BFI cccc 0111 110x xxxx xxxx xxxx x001 xxxx */
+ DECODE_EMULATEX (0x0fe00070, 0x07c00010, PROBES_BITFIELD,
+ REGS(0, NOPC, 0, 0, NOPCX)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_01xx_table[] = {
+ /* Load/store word and unsigned byte */
+
+ /* LDRB/STRB pc,[...] cccc 01xx x0xx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0c40f000, 0x0440f000),
+
+ /* STRT cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRT cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
+ /* STRBT cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRBT cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0d200000, 0x04200000),
+
+ /* STR (immediate) cccc 010x x0x0 xxxx xxxx xxxx xxxx xxxx */
+ /* STRB (immediate) cccc 010x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x04000000, PROBES_STORE,
+ REGS(NOPCWB, ANY, 0, 0, 0)),
+
+ /* LDR (immediate) cccc 010x x0x1 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRB (immediate) cccc 010x x1x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x04100000, PROBES_LOAD,
+ REGS(NOPCWB, ANY, 0, 0, 0)),
+
+ /* STR (register) cccc 011x x0x0 xxxx xxxx xxxx xxxx xxxx */
+ /* STRB (register) cccc 011x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x06000000, PROBES_STORE,
+ REGS(NOPCWB, ANY, 0, 0, NOPC)),
+
+ /* LDR (register) cccc 011x x0x1 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRB (register) cccc 011x x1x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x06100000, PROBES_LOAD,
+ REGS(NOPCWB, ANY, 0, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_100x_table[] = {
+ /* Block data transfer instructions */
+
+ /* LDM cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
+ /* STM cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_CUSTOM (0x0e400000, 0x08000000, PROBES_LDMSTM),
+
+ /* STM (user registers) cccc 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDM (user registers) cccc 100x x1x1 xxxx 0xxx xxxx xxxx xxxx */
+ /* LDM (exception ret) cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
+ DECODE_END
+};
+
+const union decode_item probes_decode_arm_table[] = {
+ /*
+ * Unconditional instructions
+ * 1111 xxxx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xf0000000, 0xf0000000, arm_1111_table),
+
+ /*
+ * Miscellaneous instructions
+ * cccc 0001 0xx0 xxxx xxxx xxxx 0xxx xxxx
+ */
+ DECODE_TABLE (0x0f900080, 0x01000000, arm_cccc_0001_0xx0____0xxx_table),
+
+ /*
+ * Halfword multiply and multiply-accumulate
+ * cccc 0001 0xx0 xxxx xxxx xxxx 1xx0 xxxx
+ */
+ DECODE_TABLE (0x0f900090, 0x01000080, arm_cccc_0001_0xx0____1xx0_table),
+
+ /*
+ * Multiply and multiply-accumulate
+ * cccc 0000 xxxx xxxx xxxx xxxx 1001 xxxx
+ */
+ DECODE_TABLE (0x0f0000f0, 0x00000090, arm_cccc_0000_____1001_table),
+
+ /*
+ * Synchronization primitives
+ * cccc 0001 xxxx xxxx xxxx xxxx 1001 xxxx
+ */
+ DECODE_TABLE (0x0f0000f0, 0x01000090, arm_cccc_0001_____1001_table),
+
+ /*
+ * Extra load/store instructions
+ * cccc 000x xxxx xxxx xxxx xxxx 1xx1 xxxx
+ */
+ DECODE_TABLE (0x0e000090, 0x00000090, arm_cccc_000x_____1xx1_table),
+
+ /*
+ * Data-processing (register)
+ * cccc 000x xxxx xxxx xxxx xxxx xxx0 xxxx
+ * Data-processing (register-shifted register)
+ * cccc 000x xxxx xxxx xxxx xxxx 0xx1 xxxx
+ */
+ DECODE_TABLE (0x0e000000, 0x00000000, arm_cccc_000x_table),
+
+ /*
+ * Data-processing (immediate)
+ * cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0x0e000000, 0x02000000, arm_cccc_001x_table),
+
+ /*
+ * Media instructions
+ * cccc 011x xxxx xxxx xxxx xxxx xxx1 xxxx
+ */
+ DECODE_TABLE (0x0f000010, 0x06000010, arm_cccc_0110_____xxx1_table),
+ DECODE_TABLE (0x0f000010, 0x07000010, arm_cccc_0111_____xxx1_table),
+
+ /*
+ * Load/store word and unsigned byte
+ * cccc 01xx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0x0c000000, 0x04000000, arm_cccc_01xx_table),
+
+ /*
+ * Block data transfer instructions
+ * cccc 100x xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0x0e000000, 0x08000000, arm_cccc_100x_table),
+
+ /* B cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
+ /* BL cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATE (0x0e000000, 0x0a000000, PROBES_BRANCH),
+
+ /*
+ * Supervisor Call, and coprocessor instructions
+ */
+
+ /* MCRR cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx */
+ /* MRRC cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* LDC cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
+ /* STC cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
+ /* CDP cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
+ /* MCR cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
+ /* MRC cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
+ /* SVC cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0c000000, 0x0c000000),
+
+ DECODE_END
+};
+#ifdef CONFIG_ARM_KPROBES_TEST_MODULE
+EXPORT_SYMBOL_GPL(probes_decode_arm_table);
+#endif
+
+static void __kprobes arm_singlestep(probes_opcode_t insn,
+ struct arch_probes_insn *asi, struct pt_regs *regs)
+{
+ regs->ARM_pc += 4;
+ asi->insn_handler(insn, asi, regs);
+}
+
+/* Return:
+ * INSN_REJECTED If instruction is one not allowed to kprobe,
+ * INSN_GOOD If instruction is supported and uses instruction slot,
+ * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
+ *
+ * For instructions we don't want to kprobe (INSN_REJECTED return result):
+ * These are generally ones that modify the processor state making
+ * them "hard" to simulate such as switches processor modes or
+ * make accesses in alternate modes. Any of these could be simulated
+ * if the work was put into it, but low return considering they
+ * should also be very rare.
+ */
+enum probes_insn __kprobes
+arm_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ bool emulate, const union decode_action *actions)
+{
+ asi->insn_singlestep = arm_singlestep;
+ asi->insn_check_cc = probes_condition_checks[insn>>28];
+ return probes_decode_insn(insn, asi, probes_decode_arm_table, false,
+ emulate, actions);
+}
diff --git a/arch/arm/kernel/probes-arm.h b/arch/arm/kernel/probes-arm.h
new file mode 100644
index 00000000000..ace6572f6e2
--- /dev/null
+++ b/arch/arm/kernel/probes-arm.h
@@ -0,0 +1,73 @@
+/*
+ * arch/arm/kernel/probes-arm.h
+ *
+ * Copyright 2013 Linaro Ltd.
+ * Written by: David A. Long
+ *
+ * The code contained herein is licensed under the GNU General Public
+ * License. You may obtain a copy of the GNU General Public License
+ * Version 2 or later at the following locations:
+ *
+ * http://www.opensource.org/licenses/gpl-license.html
+ * http://www.gnu.org/copyleft/gpl.html
+ */
+
+#ifndef _ARM_KERNEL_PROBES_ARM_H
+#define _ARM_KERNEL_PROBES_ARM_H
+
+enum probes_arm_action {
+ PROBES_EMULATE_NONE,
+ PROBES_SIMULATE_NOP,
+ PROBES_PRELOAD_IMM,
+ PROBES_PRELOAD_REG,
+ PROBES_BRANCH_IMM,
+ PROBES_BRANCH_REG,
+ PROBES_MRS,
+ PROBES_CLZ,
+ PROBES_SATURATING_ARITHMETIC,
+ PROBES_MUL1,
+ PROBES_MUL2,
+ PROBES_SWP,
+ PROBES_LDRSTRD,
+ PROBES_LOAD,
+ PROBES_STORE,
+ PROBES_LOAD_EXTRA,
+ PROBES_STORE_EXTRA,
+ PROBES_MOV_IP_SP,
+ PROBES_DATA_PROCESSING_REG,
+ PROBES_DATA_PROCESSING_IMM,
+ PROBES_MOV_HALFWORD,
+ PROBES_SEV,
+ PROBES_WFE,
+ PROBES_SATURATE,
+ PROBES_REV,
+ PROBES_MMI,
+ PROBES_PACK,
+ PROBES_EXTEND,
+ PROBES_EXTEND_ADD,
+ PROBES_MUL_ADD_LONG,
+ PROBES_MUL_ADD,
+ PROBES_BITFIELD,
+ PROBES_BRANCH,
+ PROBES_LDMSTM,
+ NUM_PROBES_ARM_ACTIONS
+};
+
+void __kprobes simulate_bbl(probes_opcode_t opcode,
+ struct arch_probes_insn *asi, struct pt_regs *regs);
+void __kprobes simulate_blx1(probes_opcode_t opcode,
+ struct arch_probes_insn *asi, struct pt_regs *regs);
+void __kprobes simulate_blx2bx(probes_opcode_t opcode,
+ struct arch_probes_insn *asi, struct pt_regs *regs);
+void __kprobes simulate_mrs(probes_opcode_t opcode,
+ struct arch_probes_insn *asi, struct pt_regs *regs);
+void __kprobes simulate_mov_ipsp(probes_opcode_t opcode,
+ struct arch_probes_insn *asi, struct pt_regs *regs);
+
+extern const union decode_item probes_decode_arm_table[];
+
+enum probes_insn arm_probes_decode_insn(probes_opcode_t,
+ struct arch_probes_insn *, bool emulate,
+ const union decode_action *actions);
+
+#endif
diff --git a/arch/arm/kernel/probes-thumb.c b/arch/arm/kernel/probes-thumb.c
new file mode 100644
index 00000000000..4131351e812
--- /dev/null
+++ b/arch/arm/kernel/probes-thumb.c
@@ -0,0 +1,882 @@
+/*
+ * arch/arm/kernel/probes-thumb.c
+ *
+ * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include <linux/stddef.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+
+#include "probes.h"
+#include "probes-thumb.h"
+
+
+static const union decode_item t32_table_1110_100x_x0xx[] = {
+ /* Load/store multiple instructions */
+
+ /* Rn is PC 1110 100x x0xx 1111 xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe4f0000, 0xe80f0000),
+
+ /* SRS 1110 1000 00x0 xxxx xxxx xxxx xxxx xxxx */
+ /* RFE 1110 1000 00x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffc00000, 0xe8000000),
+ /* SRS 1110 1001 10x0 xxxx xxxx xxxx xxxx xxxx */
+ /* RFE 1110 1001 10x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffc00000, 0xe9800000),
+
+ /* STM Rn, {...pc} 1110 100x x0x0 xxxx 1xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe508000, 0xe8008000),
+ /* LDM Rn, {...lr,pc} 1110 100x x0x1 xxxx 11xx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe50c000, 0xe810c000),
+ /* LDM/STM Rn, {...sp} 1110 100x x0xx xxxx xx1x xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe402000, 0xe8002000),
+
+ /* STMIA 1110 1000 10x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDMIA 1110 1000 10x1 xxxx xxxx xxxx xxxx xxxx */
+ /* STMDB 1110 1001 00x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDMDB 1110 1001 00x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_CUSTOM (0xfe400000, 0xe8000000, PROBES_T32_LDMSTM),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1110_100x_x1xx[] = {
+ /* Load/store dual, load/store exclusive, table branch */
+
+ /* STRD (immediate) 1110 1000 x110 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRD (immediate) 1110 1000 x111 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_OR (0xff600000, 0xe8600000),
+ /* STRD (immediate) 1110 1001 x1x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRD (immediate) 1110 1001 x1x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xff400000, 0xe9400000, PROBES_T32_LDRDSTRD,
+ REGS(NOPCWB, NOSPPC, NOSPPC, 0, 0)),
+
+ /* TBB 1110 1000 1101 xxxx xxxx xxxx 0000 xxxx */
+ /* TBH 1110 1000 1101 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_SIMULATEX(0xfff000e0, 0xe8d00000, PROBES_T32_TABLE_BRANCH,
+ REGS(NOSP, 0, 0, 0, NOSPPC)),
+
+ /* STREX 1110 1000 0100 xxxx xxxx xxxx xxxx xxxx */
+ /* LDREX 1110 1000 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* STREXB 1110 1000 1100 xxxx xxxx xxxx 0100 xxxx */
+ /* STREXH 1110 1000 1100 xxxx xxxx xxxx 0101 xxxx */
+ /* STREXD 1110 1000 1100 xxxx xxxx xxxx 0111 xxxx */
+ /* LDREXB 1110 1000 1101 xxxx xxxx xxxx 0100 xxxx */
+ /* LDREXH 1110 1000 1101 xxxx xxxx xxxx 0101 xxxx */
+ /* LDREXD 1110 1000 1101 xxxx xxxx xxxx 0111 xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1110_101x[] = {
+ /* Data-processing (shifted register) */
+
+ /* TST 1110 1010 0001 xxxx xxxx 1111 xxxx xxxx */
+ /* TEQ 1110 1010 1001 xxxx xxxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xff700f00, 0xea100f00, PROBES_T32_TST,
+ REGS(NOSPPC, 0, 0, 0, NOSPPC)),
+
+ /* CMN 1110 1011 0001 xxxx xxxx 1111 xxxx xxxx */
+ DECODE_OR (0xfff00f00, 0xeb100f00),
+ /* CMP 1110 1011 1011 xxxx xxxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xfff00f00, 0xebb00f00, PROBES_T32_TST,
+ REGS(NOPC, 0, 0, 0, NOSPPC)),
+
+ /* MOV 1110 1010 010x 1111 xxxx xxxx xxxx xxxx */
+ /* MVN 1110 1010 011x 1111 xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xffcf0000, 0xea4f0000, PROBES_T32_MOV,
+ REGS(0, 0, NOSPPC, 0, NOSPPC)),
+
+ /* ??? 1110 1010 101x xxxx xxxx xxxx xxxx xxxx */
+ /* ??? 1110 1010 111x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffa00000, 0xeaa00000),
+ /* ??? 1110 1011 001x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffe00000, 0xeb200000),
+ /* ??? 1110 1011 100x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffe00000, 0xeb800000),
+ /* ??? 1110 1011 111x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffe00000, 0xebe00000),
+
+ /* ADD/SUB SP, SP, Rm, LSL #0..3 */
+ /* 1110 1011 x0xx 1101 x000 1101 xx00 xxxx */
+ DECODE_EMULATEX (0xff4f7f30, 0xeb0d0d00, PROBES_T32_ADDSUB,
+ REGS(SP, 0, SP, 0, NOSPPC)),
+
+ /* ADD/SUB SP, SP, Rm, shift */
+ /* 1110 1011 x0xx 1101 xxxx 1101 xxxx xxxx */
+ DECODE_REJECT (0xff4f0f00, 0xeb0d0d00),
+
+ /* ADD/SUB Rd, SP, Rm, shift */
+ /* 1110 1011 x0xx 1101 xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xff4f0000, 0xeb0d0000, PROBES_T32_ADDSUB,
+ REGS(SP, 0, NOPC, 0, NOSPPC)),
+
+ /* AND 1110 1010 000x xxxx xxxx xxxx xxxx xxxx */
+ /* BIC 1110 1010 001x xxxx xxxx xxxx xxxx xxxx */
+ /* ORR 1110 1010 010x xxxx xxxx xxxx xxxx xxxx */
+ /* ORN 1110 1010 011x xxxx xxxx xxxx xxxx xxxx */
+ /* EOR 1110 1010 100x xxxx xxxx xxxx xxxx xxxx */
+ /* PKH 1110 1010 110x xxxx xxxx xxxx xxxx xxxx */
+ /* ADD 1110 1011 000x xxxx xxxx xxxx xxxx xxxx */
+ /* ADC 1110 1011 010x xxxx xxxx xxxx xxxx xxxx */
+ /* SBC 1110 1011 011x xxxx xxxx xxxx xxxx xxxx */
+ /* SUB 1110 1011 101x xxxx xxxx xxxx xxxx xxxx */
+ /* RSB 1110 1011 110x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfe000000, 0xea000000, PROBES_T32_LOGICAL,
+ REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_0x0x___0[] = {
+ /* Data-processing (modified immediate) */
+
+ /* TST 1111 0x00 0001 xxxx 0xxx 1111 xxxx xxxx */
+ /* TEQ 1111 0x00 1001 xxxx 0xxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xfb708f00, 0xf0100f00, PROBES_T32_TST,
+ REGS(NOSPPC, 0, 0, 0, 0)),
+
+ /* CMN 1111 0x01 0001 xxxx 0xxx 1111 xxxx xxxx */
+ DECODE_OR (0xfbf08f00, 0xf1100f00),
+ /* CMP 1111 0x01 1011 xxxx 0xxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xfbf08f00, 0xf1b00f00, PROBES_T32_CMP,
+ REGS(NOPC, 0, 0, 0, 0)),
+
+ /* MOV 1111 0x00 010x 1111 0xxx xxxx xxxx xxxx */
+ /* MVN 1111 0x00 011x 1111 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbcf8000, 0xf04f0000, PROBES_T32_MOV,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /* ??? 1111 0x00 101x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf0a00000),
+ /* ??? 1111 0x00 110x xxxx 0xxx xxxx xxxx xxxx */
+ /* ??? 1111 0x00 111x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbc08000, 0xf0c00000),
+ /* ??? 1111 0x01 001x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf1200000),
+ /* ??? 1111 0x01 100x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf1800000),
+ /* ??? 1111 0x01 111x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf1e00000),
+
+ /* ADD Rd, SP, #imm 1111 0x01 000x 1101 0xxx xxxx xxxx xxxx */
+ /* SUB Rd, SP, #imm 1111 0x01 101x 1101 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb4f8000, 0xf10d0000, PROBES_T32_ADDSUB,
+ REGS(SP, 0, NOPC, 0, 0)),
+
+ /* AND 1111 0x00 000x xxxx 0xxx xxxx xxxx xxxx */
+ /* BIC 1111 0x00 001x xxxx 0xxx xxxx xxxx xxxx */
+ /* ORR 1111 0x00 010x xxxx 0xxx xxxx xxxx xxxx */
+ /* ORN 1111 0x00 011x xxxx 0xxx xxxx xxxx xxxx */
+ /* EOR 1111 0x00 100x xxxx 0xxx xxxx xxxx xxxx */
+ /* ADD 1111 0x01 000x xxxx 0xxx xxxx xxxx xxxx */
+ /* ADC 1111 0x01 010x xxxx 0xxx xxxx xxxx xxxx */
+ /* SBC 1111 0x01 011x xxxx 0xxx xxxx xxxx xxxx */
+ /* SUB 1111 0x01 101x xxxx 0xxx xxxx xxxx xxxx */
+ /* RSB 1111 0x01 110x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfa008000, 0xf0000000, PROBES_T32_LOGICAL,
+ REGS(NOSPPC, 0, NOSPPC, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_0x1x___0[] = {
+ /* Data-processing (plain binary immediate) */
+
+ /* ADDW Rd, PC, #imm 1111 0x10 0000 1111 0xxx xxxx xxxx xxxx */
+ DECODE_OR (0xfbff8000, 0xf20f0000),
+ /* SUBW Rd, PC, #imm 1111 0x10 1010 1111 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbff8000, 0xf2af0000, PROBES_T32_ADDWSUBW_PC,
+ REGS(PC, 0, NOSPPC, 0, 0)),
+
+ /* ADDW SP, SP, #imm 1111 0x10 0000 1101 0xxx 1101 xxxx xxxx */
+ DECODE_OR (0xfbff8f00, 0xf20d0d00),
+ /* SUBW SP, SP, #imm 1111 0x10 1010 1101 0xxx 1101 xxxx xxxx */
+ DECODE_EMULATEX (0xfbff8f00, 0xf2ad0d00, PROBES_T32_ADDWSUBW,
+ REGS(SP, 0, SP, 0, 0)),
+
+ /* ADDW 1111 0x10 0000 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_OR (0xfbf08000, 0xf2000000),
+ /* SUBW 1111 0x10 1010 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbf08000, 0xf2a00000, PROBES_T32_ADDWSUBW,
+ REGS(NOPCX, 0, NOSPPC, 0, 0)),
+
+ /* MOVW 1111 0x10 0100 xxxx 0xxx xxxx xxxx xxxx */
+ /* MOVT 1111 0x10 1100 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb708000, 0xf2400000, PROBES_T32_MOVW,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /* SSAT16 1111 0x11 0010 xxxx 0000 xxxx 00xx xxxx */
+ /* SSAT 1111 0x11 00x0 xxxx 0xxx xxxx xxxx xxxx */
+ /* USAT16 1111 0x11 1010 xxxx 0000 xxxx 00xx xxxx */
+ /* USAT 1111 0x11 10x0 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb508000, 0xf3000000, PROBES_T32_SAT,
+ REGS(NOSPPC, 0, NOSPPC, 0, 0)),
+
+ /* SFBX 1111 0x11 0100 xxxx 0xxx xxxx xxxx xxxx */
+ /* UFBX 1111 0x11 1100 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb708000, 0xf3400000, PROBES_T32_BITFIELD,
+ REGS(NOSPPC, 0, NOSPPC, 0, 0)),
+
+ /* BFC 1111 0x11 0110 1111 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbff8000, 0xf36f0000, PROBES_T32_BITFIELD,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /* BFI 1111 0x11 0110 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbf08000, 0xf3600000, PROBES_T32_BITFIELD,
+ REGS(NOSPPCX, 0, NOSPPC, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_0xxx___1[] = {
+ /* Branches and miscellaneous control */
+
+ /* YIELD 1111 0011 1010 xxxx 10x0 x000 0000 0001 */
+ DECODE_OR (0xfff0d7ff, 0xf3a08001),
+ /* SEV 1111 0011 1010 xxxx 10x0 x000 0000 0100 */
+ DECODE_EMULATE (0xfff0d7ff, 0xf3a08004, PROBES_T32_SEV),
+ /* NOP 1111 0011 1010 xxxx 10x0 x000 0000 0000 */
+ /* WFE 1111 0011 1010 xxxx 10x0 x000 0000 0010 */
+ /* WFI 1111 0011 1010 xxxx 10x0 x000 0000 0011 */
+ DECODE_SIMULATE (0xfff0d7fc, 0xf3a08000, PROBES_T32_WFE),
+
+ /* MRS Rd, CPSR 1111 0011 1110 xxxx 10x0 xxxx xxxx xxxx */
+ DECODE_SIMULATEX(0xfff0d000, 0xf3e08000, PROBES_T32_MRS,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /*
+ * Unsupported instructions
+ * 1111 0x11 1xxx xxxx 10x0 xxxx xxxx xxxx
+ *
+ * MSR 1111 0011 100x xxxx 10x0 xxxx xxxx xxxx
+ * DBG hint 1111 0011 1010 xxxx 10x0 x000 1111 xxxx
+ * Unallocated hints 1111 0011 1010 xxxx 10x0 x000 xxxx xxxx
+ * CPS 1111 0011 1010 xxxx 10x0 xxxx xxxx xxxx
+ * CLREX/DSB/DMB/ISB 1111 0011 1011 xxxx 10x0 xxxx xxxx xxxx
+ * BXJ 1111 0011 1100 xxxx 10x0 xxxx xxxx xxxx
+ * SUBS PC,LR,#<imm8> 1111 0011 1101 xxxx 10x0 xxxx xxxx xxxx
+ * MRS Rd, SPSR 1111 0011 1111 xxxx 10x0 xxxx xxxx xxxx
+ * SMC 1111 0111 1111 xxxx 1000 xxxx xxxx xxxx
+ * UNDEFINED 1111 0111 1111 xxxx 1010 xxxx xxxx xxxx
+ * ??? 1111 0111 1xxx xxxx 1010 xxxx xxxx xxxx
+ */
+ DECODE_REJECT (0xfb80d000, 0xf3808000),
+
+ /* Bcc 1111 0xxx xxxx xxxx 10x0 xxxx xxxx xxxx */
+ DECODE_CUSTOM (0xf800d000, 0xf0008000, PROBES_T32_BRANCH_COND),
+
+ /* BLX 1111 0xxx xxxx xxxx 11x0 xxxx xxxx xxx0 */
+ DECODE_OR (0xf800d001, 0xf000c000),
+ /* B 1111 0xxx xxxx xxxx 10x1 xxxx xxxx xxxx */
+ /* BL 1111 0xxx xxxx xxxx 11x1 xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xf8009000, 0xf0009000, PROBES_T32_BRANCH),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_100x_x0x1__1111[] = {
+ /* Memory hints */
+
+ /* PLD (literal) 1111 1000 x001 1111 1111 xxxx xxxx xxxx */
+ /* PLI (literal) 1111 1001 x001 1111 1111 xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xfe7ff000, 0xf81ff000, PROBES_T32_PLDI),
+
+ /* PLD{W} (immediate) 1111 1000 10x1 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_OR (0xffd0f000, 0xf890f000),
+ /* PLD{W} (immediate) 1111 1000 00x1 xxxx 1111 1100 xxxx xxxx */
+ DECODE_OR (0xffd0ff00, 0xf810fc00),
+ /* PLI (immediate) 1111 1001 1001 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_OR (0xfff0f000, 0xf990f000),
+ /* PLI (immediate) 1111 1001 0001 xxxx 1111 1100 xxxx xxxx */
+ DECODE_SIMULATEX(0xfff0ff00, 0xf910fc00, PROBES_T32_PLDI,
+ REGS(NOPCX, 0, 0, 0, 0)),
+
+ /* PLD{W} (register) 1111 1000 00x1 xxxx 1111 0000 00xx xxxx */
+ DECODE_OR (0xffd0ffc0, 0xf810f000),
+ /* PLI (register) 1111 1001 0001 xxxx 1111 0000 00xx xxxx */
+ DECODE_SIMULATEX(0xfff0ffc0, 0xf910f000, PROBES_T32_PLDI,
+ REGS(NOPCX, 0, 0, 0, NOSPPC)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_100x[] = {
+ /* Store/Load single data item */
+
+ /* ??? 1111 100x x11x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe600000, 0xf8600000),
+
+ /* ??? 1111 1001 0101 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfff00000, 0xf9500000),
+
+ /* ??? 1111 100x 0xxx xxxx xxxx 10x0 xxxx xxxx */
+ DECODE_REJECT (0xfe800d00, 0xf8000800),
+
+ /* STRBT 1111 1000 0000 xxxx xxxx 1110 xxxx xxxx */
+ /* STRHT 1111 1000 0010 xxxx xxxx 1110 xxxx xxxx */
+ /* STRT 1111 1000 0100 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRBT 1111 1000 0001 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRSBT 1111 1001 0001 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRHT 1111 1000 0011 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRSHT 1111 1001 0011 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRT 1111 1000 0101 xxxx xxxx 1110 xxxx xxxx */
+ DECODE_REJECT (0xfe800f00, 0xf8000e00),
+
+ /* STR{,B,H} Rn,[PC...] 1111 1000 xxx0 1111 xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xff1f0000, 0xf80f0000),
+
+ /* STR{,B,H} PC,[Rn...] 1111 1000 xxx0 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_REJECT (0xff10f000, 0xf800f000),
+
+ /* LDR (literal) 1111 1000 x101 1111 xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATEX(0xff7f0000, 0xf85f0000, PROBES_T32_LDR_LIT,
+ REGS(PC, ANY, 0, 0, 0)),
+
+ /* STR (immediate) 1111 1000 0100 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDR (immediate) 1111 1000 0101 xxxx xxxx 1xxx xxxx xxxx */
+ DECODE_OR (0xffe00800, 0xf8400800),
+ /* STR (immediate) 1111 1000 1100 xxxx xxxx xxxx xxxx xxxx */
+ /* LDR (immediate) 1111 1000 1101 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xffe00000, 0xf8c00000, PROBES_T32_LDRSTR,
+ REGS(NOPCX, ANY, 0, 0, 0)),
+
+ /* STR (register) 1111 1000 0100 xxxx xxxx 0000 00xx xxxx */
+ /* LDR (register) 1111 1000 0101 xxxx xxxx 0000 00xx xxxx */
+ DECODE_EMULATEX (0xffe00fc0, 0xf8400000, PROBES_T32_LDRSTR,
+ REGS(NOPCX, ANY, 0, 0, NOSPPC)),
+
+ /* LDRB (literal) 1111 1000 x001 1111 xxxx xxxx xxxx xxxx */
+ /* LDRSB (literal) 1111 1001 x001 1111 xxxx xxxx xxxx xxxx */
+ /* LDRH (literal) 1111 1000 x011 1111 xxxx xxxx xxxx xxxx */
+ /* LDRSH (literal) 1111 1001 x011 1111 xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATEX(0xfe5f0000, 0xf81f0000, PROBES_T32_LDR_LIT,
+ REGS(PC, NOSPPCX, 0, 0, 0)),
+
+ /* STRB (immediate) 1111 1000 0000 xxxx xxxx 1xxx xxxx xxxx */
+ /* STRH (immediate) 1111 1000 0010 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRB (immediate) 1111 1000 0001 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRSB (immediate) 1111 1001 0001 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRH (immediate) 1111 1000 0011 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRSH (immediate) 1111 1001 0011 xxxx xxxx 1xxx xxxx xxxx */
+ DECODE_OR (0xfec00800, 0xf8000800),
+ /* STRB (immediate) 1111 1000 1000 xxxx xxxx xxxx xxxx xxxx */
+ /* STRH (immediate) 1111 1000 1010 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRB (immediate) 1111 1000 1001 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRSB (immediate) 1111 1001 1001 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRH (immediate) 1111 1000 1011 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRSH (immediate) 1111 1001 1011 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfec00000, 0xf8800000, PROBES_T32_LDRSTR,
+ REGS(NOPCX, NOSPPCX, 0, 0, 0)),
+
+ /* STRB (register) 1111 1000 0000 xxxx xxxx 0000 00xx xxxx */
+ /* STRH (register) 1111 1000 0010 xxxx xxxx 0000 00xx xxxx */
+ /* LDRB (register) 1111 1000 0001 xxxx xxxx 0000 00xx xxxx */
+ /* LDRSB (register) 1111 1001 0001 xxxx xxxx 0000 00xx xxxx */
+ /* LDRH (register) 1111 1000 0011 xxxx xxxx 0000 00xx xxxx */
+ /* LDRSH (register) 1111 1001 0011 xxxx xxxx 0000 00xx xxxx */
+ DECODE_EMULATEX (0xfe800fc0, 0xf8000000, PROBES_T32_LDRSTR,
+ REGS(NOPCX, NOSPPCX, 0, 0, NOSPPC)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_1010___1111[] = {
+ /* Data-processing (register) */
+
+ /* ??? 1111 1010 011x xxxx 1111 xxxx 1xxx xxxx */
+ DECODE_REJECT (0xffe0f080, 0xfa60f080),
+
+ /* SXTH 1111 1010 0000 1111 1111 xxxx 1xxx xxxx */
+ /* UXTH 1111 1010 0001 1111 1111 xxxx 1xxx xxxx */
+ /* SXTB16 1111 1010 0010 1111 1111 xxxx 1xxx xxxx */
+ /* UXTB16 1111 1010 0011 1111 1111 xxxx 1xxx xxxx */
+ /* SXTB 1111 1010 0100 1111 1111 xxxx 1xxx xxxx */
+ /* UXTB 1111 1010 0101 1111 1111 xxxx 1xxx xxxx */
+ DECODE_EMULATEX (0xff8ff080, 0xfa0ff080, PROBES_T32_SIGN_EXTEND,
+ REGS(0, 0, NOSPPC, 0, NOSPPC)),
+
+
+ /* ??? 1111 1010 1xxx xxxx 1111 xxxx 0x11 xxxx */
+ DECODE_REJECT (0xff80f0b0, 0xfa80f030),
+ /* ??? 1111 1010 1x11 xxxx 1111 xxxx 0xxx xxxx */
+ DECODE_REJECT (0xffb0f080, 0xfab0f000),
+
+ /* SADD16 1111 1010 1001 xxxx 1111 xxxx 0000 xxxx */
+ /* SASX 1111 1010 1010 xxxx 1111 xxxx 0000 xxxx */
+ /* SSAX 1111 1010 1110 xxxx 1111 xxxx 0000 xxxx */
+ /* SSUB16 1111 1010 1101 xxxx 1111 xxxx 0000 xxxx */
+ /* SADD8 1111 1010 1000 xxxx 1111 xxxx 0000 xxxx */
+ /* SSUB8 1111 1010 1100 xxxx 1111 xxxx 0000 xxxx */
+
+ /* QADD16 1111 1010 1001 xxxx 1111 xxxx 0001 xxxx */
+ /* QASX 1111 1010 1010 xxxx 1111 xxxx 0001 xxxx */
+ /* QSAX 1111 1010 1110 xxxx 1111 xxxx 0001 xxxx */
+ /* QSUB16 1111 1010 1101 xxxx 1111 xxxx 0001 xxxx */
+ /* QADD8 1111 1010 1000 xxxx 1111 xxxx 0001 xxxx */
+ /* QSUB8 1111 1010 1100 xxxx 1111 xxxx 0001 xxxx */
+
+ /* SHADD16 1111 1010 1001 xxxx 1111 xxxx 0010 xxxx */
+ /* SHASX 1111 1010 1010 xxxx 1111 xxxx 0010 xxxx */
+ /* SHSAX 1111 1010 1110 xxxx 1111 xxxx 0010 xxxx */
+ /* SHSUB16 1111 1010 1101 xxxx 1111 xxxx 0010 xxxx */
+ /* SHADD8 1111 1010 1000 xxxx 1111 xxxx 0010 xxxx */
+ /* SHSUB8 1111 1010 1100 xxxx 1111 xxxx 0010 xxxx */
+
+ /* UADD16 1111 1010 1001 xxxx 1111 xxxx 0100 xxxx */
+ /* UASX 1111 1010 1010 xxxx 1111 xxxx 0100 xxxx */
+ /* USAX 1111 1010 1110 xxxx 1111 xxxx 0100 xxxx */
+ /* USUB16 1111 1010 1101 xxxx 1111 xxxx 0100 xxxx */
+ /* UADD8 1111 1010 1000 xxxx 1111 xxxx 0100 xxxx */
+ /* USUB8 1111 1010 1100 xxxx 1111 xxxx 0100 xxxx */
+
+ /* UQADD16 1111 1010 1001 xxxx 1111 xxxx 0101 xxxx */
+ /* UQASX 1111 1010 1010 xxxx 1111 xxxx 0101 xxxx */
+ /* UQSAX 1111 1010 1110 xxxx 1111 xxxx 0101 xxxx */
+ /* UQSUB16 1111 1010 1101 xxxx 1111 xxxx 0101 xxxx */
+ /* UQADD8 1111 1010 1000 xxxx 1111 xxxx 0101 xxxx */
+ /* UQSUB8 1111 1010 1100 xxxx 1111 xxxx 0101 xxxx */
+
+ /* UHADD16 1111 1010 1001 xxxx 1111 xxxx 0110 xxxx */
+ /* UHASX 1111 1010 1010 xxxx 1111 xxxx 0110 xxxx */
+ /* UHSAX 1111 1010 1110 xxxx 1111 xxxx 0110 xxxx */
+ /* UHSUB16 1111 1010 1101 xxxx 1111 xxxx 0110 xxxx */
+ /* UHADD8 1111 1010 1000 xxxx 1111 xxxx 0110 xxxx */
+ /* UHSUB8 1111 1010 1100 xxxx 1111 xxxx 0110 xxxx */
+ DECODE_OR (0xff80f080, 0xfa80f000),
+
+ /* SXTAH 1111 1010 0000 xxxx 1111 xxxx 1xxx xxxx */
+ /* UXTAH 1111 1010 0001 xxxx 1111 xxxx 1xxx xxxx */
+ /* SXTAB16 1111 1010 0010 xxxx 1111 xxxx 1xxx xxxx */
+ /* UXTAB16 1111 1010 0011 xxxx 1111 xxxx 1xxx xxxx */
+ /* SXTAB 1111 1010 0100 xxxx 1111 xxxx 1xxx xxxx */
+ /* UXTAB 1111 1010 0101 xxxx 1111 xxxx 1xxx xxxx */
+ DECODE_OR (0xff80f080, 0xfa00f080),
+
+ /* QADD 1111 1010 1000 xxxx 1111 xxxx 1000 xxxx */
+ /* QDADD 1111 1010 1000 xxxx 1111 xxxx 1001 xxxx */
+ /* QSUB 1111 1010 1000 xxxx 1111 xxxx 1010 xxxx */
+ /* QDSUB 1111 1010 1000 xxxx 1111 xxxx 1011 xxxx */
+ DECODE_OR (0xfff0f0c0, 0xfa80f080),
+
+ /* SEL 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */
+ DECODE_OR (0xfff0f0f0, 0xfaa0f080),
+
+ /* LSL 1111 1010 000x xxxx 1111 xxxx 0000 xxxx */
+ /* LSR 1111 1010 001x xxxx 1111 xxxx 0000 xxxx */
+ /* ASR 1111 1010 010x xxxx 1111 xxxx 0000 xxxx */
+ /* ROR 1111 1010 011x xxxx 1111 xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff80f0f0, 0xfa00f000, PROBES_T32_MEDIA,
+ REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
+
+ /* CLZ 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */
+ DECODE_OR (0xfff0f0f0, 0xfab0f080),
+
+ /* REV 1111 1010 1001 xxxx 1111 xxxx 1000 xxxx */
+ /* REV16 1111 1010 1001 xxxx 1111 xxxx 1001 xxxx */
+ /* RBIT 1111 1010 1001 xxxx 1111 xxxx 1010 xxxx */
+ /* REVSH 1111 1010 1001 xxxx 1111 xxxx 1011 xxxx */
+ DECODE_EMULATEX (0xfff0f0c0, 0xfa90f080, PROBES_T32_REVERSE,
+ REGS(NOSPPC, 0, NOSPPC, 0, SAMEAS16)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_1011_0[] = {
+ /* Multiply, multiply accumulate, and absolute difference */
+
+ /* ??? 1111 1011 0000 xxxx 1111 xxxx 0001 xxxx */
+ DECODE_REJECT (0xfff0f0f0, 0xfb00f010),
+ /* ??? 1111 1011 0111 xxxx 1111 xxxx 0001 xxxx */
+ DECODE_REJECT (0xfff0f0f0, 0xfb70f010),
+
+ /* SMULxy 1111 1011 0001 xxxx 1111 xxxx 00xx xxxx */
+ DECODE_OR (0xfff0f0c0, 0xfb10f000),
+ /* MUL 1111 1011 0000 xxxx 1111 xxxx 0000 xxxx */
+ /* SMUAD{X} 1111 1011 0010 xxxx 1111 xxxx 000x xxxx */
+ /* SMULWy 1111 1011 0011 xxxx 1111 xxxx 000x xxxx */
+ /* SMUSD{X} 1111 1011 0100 xxxx 1111 xxxx 000x xxxx */
+ /* SMMUL{R} 1111 1011 0101 xxxx 1111 xxxx 000x xxxx */
+ /* USAD8 1111 1011 0111 xxxx 1111 xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff80f0e0, 0xfb00f000, PROBES_T32_MUL_ADD,
+ REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
+
+ /* ??? 1111 1011 0111 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_REJECT (0xfff000f0, 0xfb700010),
+
+ /* SMLAxy 1111 1011 0001 xxxx xxxx xxxx 00xx xxxx */
+ DECODE_OR (0xfff000c0, 0xfb100000),
+ /* MLA 1111 1011 0000 xxxx xxxx xxxx 0000 xxxx */
+ /* MLS 1111 1011 0000 xxxx xxxx xxxx 0001 xxxx */
+ /* SMLAD{X} 1111 1011 0010 xxxx xxxx xxxx 000x xxxx */
+ /* SMLAWy 1111 1011 0011 xxxx xxxx xxxx 000x xxxx */
+ /* SMLSD{X} 1111 1011 0100 xxxx xxxx xxxx 000x xxxx */
+ /* SMMLA{R} 1111 1011 0101 xxxx xxxx xxxx 000x xxxx */
+ /* SMMLS{R} 1111 1011 0110 xxxx xxxx xxxx 000x xxxx */
+ /* USADA8 1111 1011 0111 xxxx xxxx xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff8000c0, 0xfb000000, PROBES_T32_MUL_ADD2,
+ REGS(NOSPPC, NOSPPCX, NOSPPC, 0, NOSPPC)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_1011_1[] = {
+ /* Long multiply, long multiply accumulate, and divide */
+
+ /* UMAAL 1111 1011 1110 xxxx xxxx xxxx 0110 xxxx */
+ DECODE_OR (0xfff000f0, 0xfbe00060),
+ /* SMLALxy 1111 1011 1100 xxxx xxxx xxxx 10xx xxxx */
+ DECODE_OR (0xfff000c0, 0xfbc00080),
+ /* SMLALD{X} 1111 1011 1100 xxxx xxxx xxxx 110x xxxx */
+ /* SMLSLD{X} 1111 1011 1101 xxxx xxxx xxxx 110x xxxx */
+ DECODE_OR (0xffe000e0, 0xfbc000c0),
+ /* SMULL 1111 1011 1000 xxxx xxxx xxxx 0000 xxxx */
+ /* UMULL 1111 1011 1010 xxxx xxxx xxxx 0000 xxxx */
+ /* SMLAL 1111 1011 1100 xxxx xxxx xxxx 0000 xxxx */
+ /* UMLAL 1111 1011 1110 xxxx xxxx xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff9000f0, 0xfb800000, PROBES_T32_MUL_ADD_LONG,
+ REGS(NOSPPC, NOSPPC, NOSPPC, 0, NOSPPC)),
+
+ /* SDIV 1111 1011 1001 xxxx xxxx xxxx 1111 xxxx */
+ /* UDIV 1111 1011 1011 xxxx xxxx xxxx 1111 xxxx */
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+const union decode_item probes_decode_thumb32_table[] = {
+
+ /*
+ * Load/store multiple instructions
+ * 1110 100x x0xx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe400000, 0xe8000000, t32_table_1110_100x_x0xx),
+
+ /*
+ * Load/store dual, load/store exclusive, table branch
+ * 1110 100x x1xx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe400000, 0xe8400000, t32_table_1110_100x_x1xx),
+
+ /*
+ * Data-processing (shifted register)
+ * 1110 101x xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe000000, 0xea000000, t32_table_1110_101x),
+
+ /*
+ * Coprocessor instructions
+ * 1110 11xx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_REJECT (0xfc000000, 0xec000000),
+
+ /*
+ * Data-processing (modified immediate)
+ * 1111 0x0x xxxx xxxx 0xxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfa008000, 0xf0000000, t32_table_1111_0x0x___0),
+
+ /*
+ * Data-processing (plain binary immediate)
+ * 1111 0x1x xxxx xxxx 0xxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfa008000, 0xf2000000, t32_table_1111_0x1x___0),
+
+ /*
+ * Branches and miscellaneous control
+ * 1111 0xxx xxxx xxxx 1xxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xf8008000, 0xf0008000, t32_table_1111_0xxx___1),
+
+ /*
+ * Advanced SIMD element or structure load/store instructions
+ * 1111 1001 xxx0 xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_REJECT (0xff100000, 0xf9000000),
+
+ /*
+ * Memory hints
+ * 1111 100x x0x1 xxxx 1111 xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe50f000, 0xf810f000, t32_table_1111_100x_x0x1__1111),
+
+ /*
+ * Store single data item
+ * 1111 1000 xxx0 xxxx xxxx xxxx xxxx xxxx
+ * Load single data items
+ * 1111 100x xxx1 xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe000000, 0xf8000000, t32_table_1111_100x),
+
+ /*
+ * Data-processing (register)
+ * 1111 1010 xxxx xxxx 1111 xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xff00f000, 0xfa00f000, t32_table_1111_1010___1111),
+
+ /*
+ * Multiply, multiply accumulate, and absolute difference
+ * 1111 1011 0xxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xff800000, 0xfb000000, t32_table_1111_1011_0),
+
+ /*
+ * Long multiply, long multiply accumulate, and divide
+ * 1111 1011 1xxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xff800000, 0xfb800000, t32_table_1111_1011_1),
+
+ /*
+ * Coprocessor instructions
+ * 1111 11xx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_END
+};
+#ifdef CONFIG_ARM_KPROBES_TEST_MODULE
+EXPORT_SYMBOL_GPL(probes_decode_thumb32_table);
+#endif
+
+static const union decode_item t16_table_1011[] = {
+ /* Miscellaneous 16-bit instructions */
+
+ /* ADD (SP plus immediate) 1011 0000 0xxx xxxx */
+ /* SUB (SP minus immediate) 1011 0000 1xxx xxxx */
+ DECODE_SIMULATE (0xff00, 0xb000, PROBES_T16_ADD_SP),
+
+ /* CBZ 1011 00x1 xxxx xxxx */
+ /* CBNZ 1011 10x1 xxxx xxxx */
+ DECODE_SIMULATE (0xf500, 0xb100, PROBES_T16_CBZ),
+
+ /* SXTH 1011 0010 00xx xxxx */
+ /* SXTB 1011 0010 01xx xxxx */
+ /* UXTH 1011 0010 10xx xxxx */
+ /* UXTB 1011 0010 11xx xxxx */
+ /* REV 1011 1010 00xx xxxx */
+ /* REV16 1011 1010 01xx xxxx */
+ /* ??? 1011 1010 10xx xxxx */
+ /* REVSH 1011 1010 11xx xxxx */
+ DECODE_REJECT (0xffc0, 0xba80),
+ DECODE_EMULATE (0xf500, 0xb000, PROBES_T16_SIGN_EXTEND),
+
+ /* PUSH 1011 010x xxxx xxxx */
+ DECODE_CUSTOM (0xfe00, 0xb400, PROBES_T16_PUSH),
+ /* POP 1011 110x xxxx xxxx */
+ DECODE_CUSTOM (0xfe00, 0xbc00, PROBES_T16_POP),
+
+ /*
+ * If-Then, and hints
+ * 1011 1111 xxxx xxxx
+ */
+
+ /* YIELD 1011 1111 0001 0000 */
+ DECODE_OR (0xffff, 0xbf10),
+ /* SEV 1011 1111 0100 0000 */
+ DECODE_EMULATE (0xffff, 0xbf40, PROBES_T16_SEV),
+ /* NOP 1011 1111 0000 0000 */
+ /* WFE 1011 1111 0010 0000 */
+ /* WFI 1011 1111 0011 0000 */
+ DECODE_SIMULATE (0xffcf, 0xbf00, PROBES_T16_WFE),
+ /* Unassigned hints 1011 1111 xxxx 0000 */
+ DECODE_REJECT (0xff0f, 0xbf00),
+ /* IT 1011 1111 xxxx xxxx */
+ DECODE_CUSTOM (0xff00, 0xbf00, PROBES_T16_IT),
+
+ /* SETEND 1011 0110 010x xxxx */
+ /* CPS 1011 0110 011x xxxx */
+ /* BKPT 1011 1110 xxxx xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+const union decode_item probes_decode_thumb16_table[] = {
+
+ /*
+ * Shift (immediate), add, subtract, move, and compare
+ * 00xx xxxx xxxx xxxx
+ */
+
+ /* CMP (immediate) 0010 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xf800, 0x2800, PROBES_T16_CMP),
+
+ /* ADD (register) 0001 100x xxxx xxxx */
+ /* SUB (register) 0001 101x xxxx xxxx */
+ /* LSL (immediate) 0000 0xxx xxxx xxxx */
+ /* LSR (immediate) 0000 1xxx xxxx xxxx */
+ /* ASR (immediate) 0001 0xxx xxxx xxxx */
+ /* ADD (immediate, Thumb) 0001 110x xxxx xxxx */
+ /* SUB (immediate, Thumb) 0001 111x xxxx xxxx */
+ /* MOV (immediate) 0010 0xxx xxxx xxxx */
+ /* ADD (immediate, Thumb) 0011 0xxx xxxx xxxx */
+ /* SUB (immediate, Thumb) 0011 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xc000, 0x0000, PROBES_T16_ADDSUB),
+
+ /*
+ * 16-bit Thumb data-processing instructions
+ * 0100 00xx xxxx xxxx
+ */
+
+ /* TST (register) 0100 0010 00xx xxxx */
+ DECODE_EMULATE (0xffc0, 0x4200, PROBES_T16_CMP),
+ /* CMP (register) 0100 0010 10xx xxxx */
+ /* CMN (register) 0100 0010 11xx xxxx */
+ DECODE_EMULATE (0xff80, 0x4280, PROBES_T16_CMP),
+ /* AND (register) 0100 0000 00xx xxxx */
+ /* EOR (register) 0100 0000 01xx xxxx */
+ /* LSL (register) 0100 0000 10xx xxxx */
+ /* LSR (register) 0100 0000 11xx xxxx */
+ /* ASR (register) 0100 0001 00xx xxxx */
+ /* ADC (register) 0100 0001 01xx xxxx */
+ /* SBC (register) 0100 0001 10xx xxxx */
+ /* ROR (register) 0100 0001 11xx xxxx */
+ /* RSB (immediate) 0100 0010 01xx xxxx */
+ /* ORR (register) 0100 0011 00xx xxxx */
+ /* MUL 0100 0011 00xx xxxx */
+ /* BIC (register) 0100 0011 10xx xxxx */
+ /* MVN (register) 0100 0011 10xx xxxx */
+ DECODE_EMULATE (0xfc00, 0x4000, PROBES_T16_LOGICAL),
+
+ /*
+ * Special data instructions and branch and exchange
+ * 0100 01xx xxxx xxxx
+ */
+
+ /* BLX pc 0100 0111 1111 1xxx */
+ DECODE_REJECT (0xfff8, 0x47f8),
+
+ /* BX (register) 0100 0111 0xxx xxxx */
+ /* BLX (register) 0100 0111 1xxx xxxx */
+ DECODE_SIMULATE (0xff00, 0x4700, PROBES_T16_BLX),
+
+ /* ADD pc, pc 0100 0100 1111 1111 */
+ DECODE_REJECT (0xffff, 0x44ff),
+
+ /* ADD (register) 0100 0100 xxxx xxxx */
+ /* CMP (register) 0100 0101 xxxx xxxx */
+ /* MOV (register) 0100 0110 xxxx xxxx */
+ DECODE_CUSTOM (0xfc00, 0x4400, PROBES_T16_HIREGOPS),
+
+ /*
+ * Load from Literal Pool
+ * LDR (literal) 0100 1xxx xxxx xxxx
+ */
+ DECODE_SIMULATE (0xf800, 0x4800, PROBES_T16_LDR_LIT),
+
+ /*
+ * 16-bit Thumb Load/store instructions
+ * 0101 xxxx xxxx xxxx
+ * 011x xxxx xxxx xxxx
+ * 100x xxxx xxxx xxxx
+ */
+
+ /* STR (register) 0101 000x xxxx xxxx */
+ /* STRH (register) 0101 001x xxxx xxxx */
+ /* STRB (register) 0101 010x xxxx xxxx */
+ /* LDRSB (register) 0101 011x xxxx xxxx */
+ /* LDR (register) 0101 100x xxxx xxxx */
+ /* LDRH (register) 0101 101x xxxx xxxx */
+ /* LDRB (register) 0101 110x xxxx xxxx */
+ /* LDRSH (register) 0101 111x xxxx xxxx */
+ /* STR (immediate, Thumb) 0110 0xxx xxxx xxxx */
+ /* LDR (immediate, Thumb) 0110 1xxx xxxx xxxx */
+ /* STRB (immediate, Thumb) 0111 0xxx xxxx xxxx */
+ /* LDRB (immediate, Thumb) 0111 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xc000, 0x4000, PROBES_T16_LDRHSTRH),
+ /* STRH (immediate, Thumb) 1000 0xxx xxxx xxxx */
+ /* LDRH (immediate, Thumb) 1000 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xf000, 0x8000, PROBES_T16_LDRHSTRH),
+ /* STR (immediate, Thumb) 1001 0xxx xxxx xxxx */
+ /* LDR (immediate, Thumb) 1001 1xxx xxxx xxxx */
+ DECODE_SIMULATE (0xf000, 0x9000, PROBES_T16_LDRSTR),
+
+ /*
+ * Generate PC-/SP-relative address
+ * ADR (literal) 1010 0xxx xxxx xxxx
+ * ADD (SP plus immediate) 1010 1xxx xxxx xxxx
+ */
+ DECODE_SIMULATE (0xf000, 0xa000, PROBES_T16_ADR),
+
+ /*
+ * Miscellaneous 16-bit instructions
+ * 1011 xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xf000, 0xb000, t16_table_1011),
+
+ /* STM 1100 0xxx xxxx xxxx */
+ /* LDM 1100 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xf000, 0xc000, PROBES_T16_LDMSTM),
+
+ /*
+ * Conditional branch, and Supervisor Call
+ */
+
+ /* Permanently UNDEFINED 1101 1110 xxxx xxxx */
+ /* SVC 1101 1111 xxxx xxxx */
+ DECODE_REJECT (0xfe00, 0xde00),
+
+ /* Conditional branch 1101 xxxx xxxx xxxx */
+ DECODE_CUSTOM (0xf000, 0xd000, PROBES_T16_BRANCH_COND),
+
+ /*
+ * Unconditional branch
+ * B 1110 0xxx xxxx xxxx
+ */
+ DECODE_SIMULATE (0xf800, 0xe000, PROBES_T16_BRANCH),
+
+ DECODE_END
+};
+#ifdef CONFIG_ARM_KPROBES_TEST_MODULE
+EXPORT_SYMBOL_GPL(probes_decode_thumb16_table);
+#endif
+
+static unsigned long __kprobes thumb_check_cc(unsigned long cpsr)
+{
+ if (unlikely(in_it_block(cpsr)))
+ return probes_condition_checks[current_cond(cpsr)](cpsr);
+ return true;
+}
+
+static void __kprobes thumb16_singlestep(probes_opcode_t opcode,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
+{
+ regs->ARM_pc += 2;
+ asi->insn_handler(opcode, asi, regs);
+ regs->ARM_cpsr = it_advance(regs->ARM_cpsr);
+}
+
+static void __kprobes thumb32_singlestep(probes_opcode_t opcode,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
+{
+ regs->ARM_pc += 4;
+ asi->insn_handler(opcode, asi, regs);
+ regs->ARM_cpsr = it_advance(regs->ARM_cpsr);
+}
+
+enum probes_insn __kprobes
+thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ bool emulate, const union decode_action *actions)
+{
+ asi->insn_singlestep = thumb16_singlestep;
+ asi->insn_check_cc = thumb_check_cc;
+ return probes_decode_insn(insn, asi, probes_decode_thumb16_table, true,
+ emulate, actions);
+}
+
+enum probes_insn __kprobes
+thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ bool emulate, const union decode_action *actions)
+{
+ asi->insn_singlestep = thumb32_singlestep;
+ asi->insn_check_cc = thumb_check_cc;
+ return probes_decode_insn(insn, asi, probes_decode_thumb32_table, true,
+ emulate, actions);
+}
diff --git a/arch/arm/kernel/probes-thumb.h b/arch/arm/kernel/probes-thumb.h
new file mode 100644
index 00000000000..7c6f6ebe514
--- /dev/null
+++ b/arch/arm/kernel/probes-thumb.h
@@ -0,0 +1,97 @@
+/*
+ * arch/arm/kernel/probes-thumb.h
+ *
+ * Copyright 2013 Linaro Ltd.
+ * Written by: David A. Long
+ *
+ * The code contained herein is licensed under the GNU General Public
+ * License. You may obtain a copy of the GNU General Public License
+ * Version 2 or later at the following locations:
+ *
+ * http://www.opensource.org/licenses/gpl-license.html
+ * http://www.gnu.org/copyleft/gpl.html
+ */
+
+#ifndef _ARM_KERNEL_PROBES_THUMB_H
+#define _ARM_KERNEL_PROBES_THUMB_H
+
+/*
+ * True if current instruction is in an IT block.
+ */
+#define in_it_block(cpsr) ((cpsr & 0x06000c00) != 0x00000000)
+
+/*
+ * Return the condition code to check for the currently executing instruction.
+ * This is in ITSTATE<7:4> which is in CPSR<15:12> but is only valid if
+ * in_it_block returns true.
+ */
+#define current_cond(cpsr) ((cpsr >> 12) & 0xf)
+
+enum probes_t32_action {
+ PROBES_T32_EMULATE_NONE,
+ PROBES_T32_SIMULATE_NOP,
+ PROBES_T32_LDMSTM,
+ PROBES_T32_LDRDSTRD,
+ PROBES_T32_TABLE_BRANCH,
+ PROBES_T32_TST,
+ PROBES_T32_CMP,
+ PROBES_T32_MOV,
+ PROBES_T32_ADDSUB,
+ PROBES_T32_LOGICAL,
+ PROBES_T32_ADDWSUBW_PC,
+ PROBES_T32_ADDWSUBW,
+ PROBES_T32_MOVW,
+ PROBES_T32_SAT,
+ PROBES_T32_BITFIELD,
+ PROBES_T32_SEV,
+ PROBES_T32_WFE,
+ PROBES_T32_MRS,
+ PROBES_T32_BRANCH_COND,
+ PROBES_T32_BRANCH,
+ PROBES_T32_PLDI,
+ PROBES_T32_LDR_LIT,
+ PROBES_T32_LDRSTR,
+ PROBES_T32_SIGN_EXTEND,
+ PROBES_T32_MEDIA,
+ PROBES_T32_REVERSE,
+ PROBES_T32_MUL_ADD,
+ PROBES_T32_MUL_ADD2,
+ PROBES_T32_MUL_ADD_LONG,
+ NUM_PROBES_T32_ACTIONS
+};
+
+enum probes_t16_action {
+ PROBES_T16_ADD_SP,
+ PROBES_T16_CBZ,
+ PROBES_T16_SIGN_EXTEND,
+ PROBES_T16_PUSH,
+ PROBES_T16_POP,
+ PROBES_T16_SEV,
+ PROBES_T16_WFE,
+ PROBES_T16_IT,
+ PROBES_T16_CMP,
+ PROBES_T16_ADDSUB,
+ PROBES_T16_LOGICAL,
+ PROBES_T16_BLX,
+ PROBES_T16_HIREGOPS,
+ PROBES_T16_LDR_LIT,
+ PROBES_T16_LDRHSTRH,
+ PROBES_T16_LDRSTR,
+ PROBES_T16_ADR,
+ PROBES_T16_LDMSTM,
+ PROBES_T16_BRANCH_COND,
+ PROBES_T16_BRANCH,
+ NUM_PROBES_T16_ACTIONS
+};
+
+extern const union decode_item probes_decode_thumb32_table[];
+extern const union decode_item probes_decode_thumb16_table[];
+
+enum probes_insn __kprobes
+thumb16_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ bool emulate, const union decode_action *actions);
+enum probes_insn __kprobes
+thumb32_probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ bool emulate, const union decode_action *actions);
+
+#endif
diff --git a/arch/arm/kernel/probes.c b/arch/arm/kernel/probes.c
new file mode 100644
index 00000000000..a8ab540d7e7
--- /dev/null
+++ b/arch/arm/kernel/probes.c
@@ -0,0 +1,456 @@
+/*
+ * arch/arm/kernel/probes.c
+ *
+ * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
+ *
+ * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is
+ * Copyright (C) 2006, 2007 Motorola Inc.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include <linux/kernel.h>
+#include <linux/types.h>
+#include <asm/system_info.h>
+#include <asm/ptrace.h>
+#include <linux/bug.h>
+
+#include "probes.h"
+
+
+#ifndef find_str_pc_offset
+
+/*
+ * For STR and STM instructions, an ARM core may choose to use either
+ * a +8 or a +12 displacement from the current instruction's address.
+ * Whichever value is chosen for a given core, it must be the same for
+ * both instructions and may not change. This function measures it.
+ */
+
+int str_pc_offset;
+
+void __init find_str_pc_offset(void)
+{
+ int addr, scratch, ret;
+
+ __asm__ (
+ "sub %[ret], pc, #4 \n\t"
+ "str pc, %[addr] \n\t"
+ "ldr %[scr], %[addr] \n\t"
+ "sub %[ret], %[scr], %[ret] \n\t"
+ : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
+
+ str_pc_offset = ret;
+}
+
+#endif /* !find_str_pc_offset */
+
+
+#ifndef test_load_write_pc_interworking
+
+bool load_write_pc_interworks;
+
+void __init test_load_write_pc_interworking(void)
+{
+ int arch = cpu_architecture();
+ BUG_ON(arch == CPU_ARCH_UNKNOWN);
+ load_write_pc_interworks = arch >= CPU_ARCH_ARMv5T;
+}
+
+#endif /* !test_load_write_pc_interworking */
+
+
+#ifndef test_alu_write_pc_interworking
+
+bool alu_write_pc_interworks;
+
+void __init test_alu_write_pc_interworking(void)
+{
+ int arch = cpu_architecture();
+ BUG_ON(arch == CPU_ARCH_UNKNOWN);
+ alu_write_pc_interworks = arch >= CPU_ARCH_ARMv7;
+}
+
+#endif /* !test_alu_write_pc_interworking */
+
+
+void __init arm_probes_decode_init(void)
+{
+ find_str_pc_offset();
+ test_load_write_pc_interworking();
+ test_alu_write_pc_interworking();
+}
+
+
+static unsigned long __kprobes __check_eq(unsigned long cpsr)
+{
+ return cpsr & PSR_Z_BIT;
+}
+
+static unsigned long __kprobes __check_ne(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_Z_BIT;
+}
+
+static unsigned long __kprobes __check_cs(unsigned long cpsr)
+{
+ return cpsr & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_cc(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_mi(unsigned long cpsr)
+{
+ return cpsr & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_pl(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_vs(unsigned long cpsr)
+{
+ return cpsr & PSR_V_BIT;
+}
+
+static unsigned long __kprobes __check_vc(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_V_BIT;
+}
+
+static unsigned long __kprobes __check_hi(unsigned long cpsr)
+{
+ cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
+ return cpsr & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_ls(unsigned long cpsr)
+{
+ cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
+ return (~cpsr) & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_ge(unsigned long cpsr)
+{
+ cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ return (~cpsr) & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_lt(unsigned long cpsr)
+{
+ cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ return cpsr & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_gt(unsigned long cpsr)
+{
+ unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
+ return (~temp) & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_le(unsigned long cpsr)
+{
+ unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
+ return temp & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_al(unsigned long cpsr)
+{
+ return true;
+}
+
+probes_check_cc * const probes_condition_checks[16] = {
+ &__check_eq, &__check_ne, &__check_cs, &__check_cc,
+ &__check_mi, &__check_pl, &__check_vs, &__check_vc,
+ &__check_hi, &__check_ls, &__check_ge, &__check_lt,
+ &__check_gt, &__check_le, &__check_al, &__check_al
+};
+
+
+void __kprobes probes_simulate_nop(probes_opcode_t opcode,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
+{
+}
+
+void __kprobes probes_emulate_none(probes_opcode_t opcode,
+ struct arch_probes_insn *asi,
+ struct pt_regs *regs)
+{
+ asi->insn_fn();
+}
+
+/*
+ * Prepare an instruction slot to receive an instruction for emulating.
+ * This is done by placing a subroutine return after the location where the
+ * instruction will be placed. We also modify ARM instructions to be
+ * unconditional as the condition code will already be checked before any
+ * emulation handler is called.
+ */
+static probes_opcode_t __kprobes
+prepare_emulated_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ bool thumb)
+{
+#ifdef CONFIG_THUMB2_KERNEL
+ if (thumb) {
+ u16 *thumb_insn = (u16 *)asi->insn;
+ /* Thumb bx lr */
+ thumb_insn[1] = __opcode_to_mem_thumb16(0x4770);
+ thumb_insn[2] = __opcode_to_mem_thumb16(0x4770);
+ return insn;
+ }
+ asi->insn[1] = __opcode_to_mem_arm(0xe12fff1e); /* ARM bx lr */
+#else
+ asi->insn[1] = __opcode_to_mem_arm(0xe1a0f00e); /* mov pc, lr */
+#endif
+ /* Make an ARM instruction unconditional */
+ if (insn < 0xe0000000)
+ insn = (insn | 0xe0000000) & ~0x10000000;
+ return insn;
+}
+
+/*
+ * Write a (probably modified) instruction into the slot previously prepared by
+ * prepare_emulated_insn
+ */
+static void __kprobes
+set_emulated_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ bool thumb)
+{
+#ifdef CONFIG_THUMB2_KERNEL
+ if (thumb) {
+ u16 *ip = (u16 *)asi->insn;
+ if (is_wide_instruction(insn))
+ *ip++ = __opcode_to_mem_thumb16(insn >> 16);
+ *ip++ = __opcode_to_mem_thumb16(insn);
+ return;
+ }
+#endif
+ asi->insn[0] = __opcode_to_mem_arm(insn);
+}
+
+/*
+ * When we modify the register numbers encoded in an instruction to be emulated,
+ * the new values come from this define. For ARM and 32-bit Thumb instructions
+ * this gives...
+ *
+ * bit position 16 12 8 4 0
+ * ---------------+---+---+---+---+---+
+ * register r2 r0 r1 -- r3
+ */
+#define INSN_NEW_BITS 0x00020103
+
+/* Each nibble has same value as that at INSN_NEW_BITS bit 16 */
+#define INSN_SAMEAS16_BITS 0x22222222
+
+/*
+ * Validate and modify each of the registers encoded in an instruction.
+ *
+ * Each nibble in regs contains a value from enum decode_reg_type. For each
+ * non-zero value, the corresponding nibble in pinsn is validated and modified
+ * according to the type.
+ */
+static bool __kprobes decode_regs(probes_opcode_t *pinsn, u32 regs, bool modify)
+{
+ probes_opcode_t insn = *pinsn;
+ probes_opcode_t mask = 0xf; /* Start at least significant nibble */
+
+ for (; regs != 0; regs >>= 4, mask <<= 4) {
+
+ probes_opcode_t new_bits = INSN_NEW_BITS;
+
+ switch (regs & 0xf) {
+
+ case REG_TYPE_NONE:
+ /* Nibble not a register, skip to next */
+ continue;
+
+ case REG_TYPE_ANY:
+ /* Any register is allowed */
+ break;
+
+ case REG_TYPE_SAMEAS16:
+ /* Replace register with same as at bit position 16 */
+ new_bits = INSN_SAMEAS16_BITS;
+ break;
+
+ case REG_TYPE_SP:
+ /* Only allow SP (R13) */
+ if ((insn ^ 0xdddddddd) & mask)
+ goto reject;
+ break;
+
+ case REG_TYPE_PC:
+ /* Only allow PC (R15) */
+ if ((insn ^ 0xffffffff) & mask)
+ goto reject;
+ break;
+
+ case REG_TYPE_NOSP:
+ /* Reject SP (R13) */
+ if (((insn ^ 0xdddddddd) & mask) == 0)
+ goto reject;
+ break;
+
+ case REG_TYPE_NOSPPC:
+ case REG_TYPE_NOSPPCX:
+ /* Reject SP and PC (R13 and R15) */
+ if (((insn ^ 0xdddddddd) & 0xdddddddd & mask) == 0)
+ goto reject;
+ break;
+
+ case REG_TYPE_NOPCWB:
+ if (!is_writeback(insn))
+ break; /* No writeback, so any register is OK */
+ /* fall through... */
+ case REG_TYPE_NOPC:
+ case REG_TYPE_NOPCX:
+ /* Reject PC (R15) */
+ if (((insn ^ 0xffffffff) & mask) == 0)
+ goto reject;
+ break;
+ }
+
+ /* Replace value of nibble with new register number... */
+ insn &= ~mask;
+ insn |= new_bits & mask;
+ }
+
+ if (modify)
+ *pinsn = insn;
+
+ return true;
+
+reject:
+ return false;
+}
+
+static const int decode_struct_sizes[NUM_DECODE_TYPES] = {
+ [DECODE_TYPE_TABLE] = sizeof(struct decode_table),
+ [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom),
+ [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate),
+ [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate),
+ [DECODE_TYPE_OR] = sizeof(struct decode_or),
+ [DECODE_TYPE_REJECT] = sizeof(struct decode_reject)
+};
+
+/*
+ * probes_decode_insn operates on data tables in order to decode an ARM
+ * architecture instruction onto which a kprobe has been placed.
+ *
+ * These instruction decoding tables are a concatenation of entries each
+ * of which consist of one of the following structs:
+ *
+ * decode_table
+ * decode_custom
+ * decode_simulate
+ * decode_emulate
+ * decode_or
+ * decode_reject
+ *
+ * Each of these starts with a struct decode_header which has the following
+ * fields:
+ *
+ * type_regs
+ * mask
+ * value
+ *
+ * The least significant DECODE_TYPE_BITS of type_regs contains a value
+ * from enum decode_type, this indicates which of the decode_* structs
+ * the entry contains. The value DECODE_TYPE_END indicates the end of the
+ * table.
+ *
+ * When the table is parsed, each entry is checked in turn to see if it
+ * matches the instruction to be decoded using the test:
+ *
+ * (insn & mask) == value
+ *
+ * If no match is found before the end of the table is reached then decoding
+ * fails with INSN_REJECTED.
+ *
+ * When a match is found, decode_regs() is called to validate and modify each
+ * of the registers encoded in the instruction; the data it uses to do this
+ * is (type_regs >> DECODE_TYPE_BITS). A validation failure will cause decoding
+ * to fail with INSN_REJECTED.
+ *
+ * Once the instruction has passed the above tests, further processing
+ * depends on the type of the table entry's decode struct.
+ *
+ */
+int __kprobes
+probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const union decode_item *table, bool thumb,
+ bool emulate, const union decode_action *actions)
+{
+ const struct decode_header *h = (struct decode_header *)table;
+ const struct decode_header *next;
+ bool matched = false;
+
+ if (emulate)
+ insn = prepare_emulated_insn(insn, asi, thumb);
+
+ for (;; h = next) {
+ enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK;
+ u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS;
+
+ if (type == DECODE_TYPE_END)
+ return INSN_REJECTED;
+
+ next = (struct decode_header *)
+ ((uintptr_t)h + decode_struct_sizes[type]);
+
+ if (!matched && (insn & h->mask.bits) != h->value.bits)
+ continue;
+
+ if (!decode_regs(&insn, regs, emulate))
+ return INSN_REJECTED;
+
+ switch (type) {
+
+ case DECODE_TYPE_TABLE: {
+ struct decode_table *d = (struct decode_table *)h;
+ next = (struct decode_header *)d->table.table;
+ break;
+ }
+
+ case DECODE_TYPE_CUSTOM: {
+ struct decode_custom *d = (struct decode_custom *)h;
+ return actions[d->decoder.action].decoder(insn, asi, h);
+ }
+
+ case DECODE_TYPE_SIMULATE: {
+ struct decode_simulate *d = (struct decode_simulate *)h;
+ asi->insn_handler = actions[d->handler.action].handler;
+ return INSN_GOOD_NO_SLOT;
+ }
+
+ case DECODE_TYPE_EMULATE: {
+ struct decode_emulate *d = (struct decode_emulate *)h;
+
+ if (!emulate)
+ return actions[d->handler.action].decoder(insn,
+ asi, h);
+
+ asi->insn_handler = actions[d->handler.action].handler;
+ set_emulated_insn(insn, asi, thumb);
+ return INSN_GOOD;
+ }
+
+ case DECODE_TYPE_OR:
+ matched = true;
+ break;
+
+ case DECODE_TYPE_REJECT:
+ default:
+ return INSN_REJECTED;
+ }
+ }
+}
diff --git a/arch/arm/kernel/probes.h b/arch/arm/kernel/probes.h
new file mode 100644
index 00000000000..dba9f2466a9
--- /dev/null
+++ b/arch/arm/kernel/probes.h
@@ -0,0 +1,407 @@
+/*
+ * arch/arm/kernel/probes.h
+ *
+ * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
+ *
+ * Some contents moved here from arch/arm/include/asm/kprobes.h which is
+ * Copyright (C) 2006, 2007 Motorola Inc.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ */
+
+#ifndef _ARM_KERNEL_PROBES_H
+#define _ARM_KERNEL_PROBES_H
+
+#include <linux/types.h>
+#include <linux/stddef.h>
+#include <asm/probes.h>
+
+void __init arm_probes_decode_init(void);
+
+extern probes_check_cc * const probes_condition_checks[16];
+
+#if __LINUX_ARM_ARCH__ >= 7
+
+/* str_pc_offset is architecturally defined from ARMv7 onwards */
+#define str_pc_offset 8
+#define find_str_pc_offset()
+
+#else /* __LINUX_ARM_ARCH__ < 7 */
+
+/* We need a run-time check to determine str_pc_offset */
+extern int str_pc_offset;
+void __init find_str_pc_offset(void);
+
+#endif
+
+
+/*
+ * Update ITSTATE after normal execution of an IT block instruction.
+ *
+ * The 8 IT state bits are split into two parts in CPSR:
+ * ITSTATE<1:0> are in CPSR<26:25>
+ * ITSTATE<7:2> are in CPSR<15:10>
+ */
+static inline unsigned long it_advance(unsigned long cpsr)
+ {
+ if ((cpsr & 0x06000400) == 0) {
+ /* ITSTATE<2:0> == 0 means end of IT block, so clear IT state */
+ cpsr &= ~PSR_IT_MASK;
+ } else {
+ /* We need to shift left ITSTATE<4:0> */
+ const unsigned long mask = 0x06001c00; /* Mask ITSTATE<4:0> */
+ unsigned long it = cpsr & mask;
+ it <<= 1;
+ it |= it >> (27 - 10); /* Carry ITSTATE<2> to correct place */
+ it &= mask;
+ cpsr &= ~mask;
+ cpsr |= it;
+ }
+ return cpsr;
+}
+
+static inline void __kprobes bx_write_pc(long pcv, struct pt_regs *regs)
+{
+ long cpsr = regs->ARM_cpsr;
+ if (pcv & 0x1) {
+ cpsr |= PSR_T_BIT;
+ pcv &= ~0x1;
+ } else {
+ cpsr &= ~PSR_T_BIT;
+ pcv &= ~0x2; /* Avoid UNPREDICTABLE address allignment */
+ }
+ regs->ARM_cpsr = cpsr;
+ regs->ARM_pc = pcv;
+}
+
+
+#if __LINUX_ARM_ARCH__ >= 6
+
+/* Kernels built for >= ARMv6 should never run on <= ARMv5 hardware, so... */
+#define load_write_pc_interworks true
+#define test_load_write_pc_interworking()
+
+#else /* __LINUX_ARM_ARCH__ < 6 */
+
+/* We need run-time testing to determine if load_write_pc() should interwork. */
+extern bool load_write_pc_interworks;
+void __init test_load_write_pc_interworking(void);
+
+#endif
+
+static inline void __kprobes load_write_pc(long pcv, struct pt_regs *regs)
+{
+ if (load_write_pc_interworks)
+ bx_write_pc(pcv, regs);
+ else
+ regs->ARM_pc = pcv;
+}
+
+
+#if __LINUX_ARM_ARCH__ >= 7
+
+#define alu_write_pc_interworks true
+#define test_alu_write_pc_interworking()
+
+#elif __LINUX_ARM_ARCH__ <= 5
+
+/* Kernels built for <= ARMv5 should never run on >= ARMv6 hardware, so... */
+#define alu_write_pc_interworks false
+#define test_alu_write_pc_interworking()
+
+#else /* __LINUX_ARM_ARCH__ == 6 */
+
+/* We could be an ARMv6 binary on ARMv7 hardware so we need a run-time check. */
+extern bool alu_write_pc_interworks;
+void __init test_alu_write_pc_interworking(void);
+
+#endif /* __LINUX_ARM_ARCH__ == 6 */
+
+static inline void __kprobes alu_write_pc(long pcv, struct pt_regs *regs)
+{
+ if (alu_write_pc_interworks)
+ bx_write_pc(pcv, regs);
+ else
+ regs->ARM_pc = pcv;
+}
+
+
+/*
+ * Test if load/store instructions writeback the address register.
+ * if P (bit 24) == 0 or W (bit 21) == 1
+ */
+#define is_writeback(insn) ((insn ^ 0x01000000) & 0x01200000)
+
+/*
+ * The following definitions and macros are used to build instruction
+ * decoding tables for use by probes_decode_insn.
+ *
+ * These tables are a concatenation of entries each of which consist of one of
+ * the decode_* structs. All of the fields in every type of decode structure
+ * are of the union type decode_item, therefore the entire decode table can be
+ * viewed as an array of these and declared like:
+ *
+ * static const union decode_item table_name[] = {};
+ *
+ * In order to construct each entry in the table, macros are used to
+ * initialise a number of sequential decode_item values in a layout which
+ * matches the relevant struct. E.g. DECODE_SIMULATE initialise a struct
+ * decode_simulate by initialising four decode_item objects like this...
+ *
+ * {.bits = _type},
+ * {.bits = _mask},
+ * {.bits = _value},
+ * {.action = _handler},
+ *
+ * Initialising a specified member of the union means that the compiler
+ * will produce a warning if the argument is of an incorrect type.
+ *
+ * Below is a list of each of the macros used to initialise entries and a
+ * description of the action performed when that entry is matched to an
+ * instruction. A match is found when (instruction & mask) == value.
+ *
+ * DECODE_TABLE(mask, value, table)
+ * Instruction decoding jumps to parsing the new sub-table 'table'.
+ *
+ * DECODE_CUSTOM(mask, value, decoder)
+ * The value of 'decoder' is used as an index into the array of
+ * action functions, and the retrieved decoder function is invoked
+ * to complete decoding of the instruction.
+ *
+ * DECODE_SIMULATE(mask, value, handler)
+ * The probes instruction handler is set to the value found by
+ * indexing into the action array using the value of 'handler'. This
+ * will be used to simulate the instruction when the probe is hit.
+ * Decoding returns with INSN_GOOD_NO_SLOT.
+ *
+ * DECODE_EMULATE(mask, value, handler)
+ * The probes instruction handler is set to the value found by
+ * indexing into the action array using the value of 'handler'. This
+ * will be used to emulate the instruction when the probe is hit. The
+ * modified instruction (see below) is placed in the probes instruction
+ * slot so it may be called by the emulation code. Decoding returns
+ * with INSN_GOOD.
+ *
+ * DECODE_REJECT(mask, value)
+ * Instruction decoding fails with INSN_REJECTED
+ *
+ * DECODE_OR(mask, value)
+ * This allows the mask/value test of multiple table entries to be
+ * logically ORed. Once an 'or' entry is matched the decoding action to
+ * be performed is that of the next entry which isn't an 'or'. E.g.
+ *
+ * DECODE_OR (mask1, value1)
+ * DECODE_OR (mask2, value2)
+ * DECODE_SIMULATE (mask3, value3, simulation_handler)
+ *
+ * This means that if any of the three mask/value pairs match the
+ * instruction being decoded, then 'simulation_handler' will be used
+ * for it.
+ *
+ * Both the SIMULATE and EMULATE macros have a second form which take an
+ * additional 'regs' argument.
+ *
+ * DECODE_SIMULATEX(mask, value, handler, regs)
+ * DECODE_EMULATEX (mask, value, handler, regs)
+ *
+ * These are used to specify what kind of CPU register is encoded in each of the
+ * least significant 5 nibbles of the instruction being decoded. The regs value
+ * is specified using the REGS macro, this takes any of the REG_TYPE_* values
+ * from enum decode_reg_type as arguments; only the '*' part of the name is
+ * given. E.g.
+ *
+ * REGS(0, ANY, NOPC, 0, ANY)
+ *
+ * This indicates an instruction is encoded like:
+ *
+ * bits 19..16 ignore
+ * bits 15..12 any register allowed here
+ * bits 11.. 8 any register except PC allowed here
+ * bits 7.. 4 ignore
+ * bits 3.. 0 any register allowed here
+ *
+ * This register specification is checked after a decode table entry is found to
+ * match an instruction (through the mask/value test). Any invalid register then
+ * found in the instruction will cause decoding to fail with INSN_REJECTED. In
+ * the above example this would happen if bits 11..8 of the instruction were
+ * 1111, indicating R15 or PC.
+ *
+ * As well as checking for legal combinations of registers, this data is also
+ * used to modify the registers encoded in the instructions so that an
+ * emulation routines can use it. (See decode_regs() and INSN_NEW_BITS.)
+ *
+ * Here is a real example which matches ARM instructions of the form
+ * "AND <Rd>,<Rn>,<Rm>,<shift> <Rs>"
+ *
+ * DECODE_EMULATEX (0x0e000090, 0x00000010, PROBES_DATA_PROCESSING_REG,
+ * REGS(ANY, ANY, NOPC, 0, ANY)),
+ * ^ ^ ^ ^
+ * Rn Rd Rs Rm
+ *
+ * Decoding the instruction "AND R4, R5, R6, ASL R15" will be rejected because
+ * Rs == R15
+ *
+ * Decoding the instruction "AND R4, R5, R6, ASL R7" will be accepted and the
+ * instruction will be modified to "AND R0, R2, R3, ASL R1" and then placed into
+ * the kprobes instruction slot. This can then be called later by the handler
+ * function emulate_rd12rn16rm0rs8_rwflags (a pointer to which is retrieved from
+ * the indicated slot in the action array), in order to simulate the instruction.
+ */
+
+enum decode_type {
+ DECODE_TYPE_END,
+ DECODE_TYPE_TABLE,
+ DECODE_TYPE_CUSTOM,
+ DECODE_TYPE_SIMULATE,
+ DECODE_TYPE_EMULATE,
+ DECODE_TYPE_OR,
+ DECODE_TYPE_REJECT,
+ NUM_DECODE_TYPES /* Must be last enum */
+};
+
+#define DECODE_TYPE_BITS 4
+#define DECODE_TYPE_MASK ((1 << DECODE_TYPE_BITS) - 1)
+
+enum decode_reg_type {
+ REG_TYPE_NONE = 0, /* Not a register, ignore */
+ REG_TYPE_ANY, /* Any register allowed */
+ REG_TYPE_SAMEAS16, /* Register should be same as that at bits 19..16 */
+ REG_TYPE_SP, /* Register must be SP */
+ REG_TYPE_PC, /* Register must be PC */
+ REG_TYPE_NOSP, /* Register must not be SP */
+ REG_TYPE_NOSPPC, /* Register must not be SP or PC */
+ REG_TYPE_NOPC, /* Register must not be PC */
+ REG_TYPE_NOPCWB, /* No PC if load/store write-back flag also set */
+
+ /* The following types are used when the encoding for PC indicates
+ * another instruction form. This distiction only matters for test
+ * case coverage checks.
+ */
+ REG_TYPE_NOPCX, /* Register must not be PC */
+ REG_TYPE_NOSPPCX, /* Register must not be SP or PC */
+
+ /* Alias to allow '0' arg to be used in REGS macro. */
+ REG_TYPE_0 = REG_TYPE_NONE
+};
+
+#define REGS(r16, r12, r8, r4, r0) \
+ (((REG_TYPE_##r16) << 16) + \
+ ((REG_TYPE_##r12) << 12) + \
+ ((REG_TYPE_##r8) << 8) + \
+ ((REG_TYPE_##r4) << 4) + \
+ (REG_TYPE_##r0))
+
+union decode_item {
+ u32 bits;
+ const union decode_item *table;
+ int action;
+};
+
+struct decode_header;
+typedef enum probes_insn (probes_custom_decode_t)(probes_opcode_t,
+ struct arch_probes_insn *,
+ const struct decode_header *);
+
+union decode_action {
+ probes_insn_handler_t *handler;
+ probes_custom_decode_t *decoder;
+};
+
+#define DECODE_END \
+ {.bits = DECODE_TYPE_END}
+
+
+struct decode_header {
+ union decode_item type_regs;
+ union decode_item mask;
+ union decode_item value;
+};
+
+#define DECODE_HEADER(_type, _mask, _value, _regs) \
+ {.bits = (_type) | ((_regs) << DECODE_TYPE_BITS)}, \
+ {.bits = (_mask)}, \
+ {.bits = (_value)}
+
+
+struct decode_table {
+ struct decode_header header;
+ union decode_item table;
+};
+
+#define DECODE_TABLE(_mask, _value, _table) \
+ DECODE_HEADER(DECODE_TYPE_TABLE, _mask, _value, 0), \
+ {.table = (_table)}
+
+
+struct decode_custom {
+ struct decode_header header;
+ union decode_item decoder;
+};
+
+#define DECODE_CUSTOM(_mask, _value, _decoder) \
+ DECODE_HEADER(DECODE_TYPE_CUSTOM, _mask, _value, 0), \
+ {.action = (_decoder)}
+
+
+struct decode_simulate {
+ struct decode_header header;
+ union decode_item handler;
+};
+
+#define DECODE_SIMULATEX(_mask, _value, _handler, _regs) \
+ DECODE_HEADER(DECODE_TYPE_SIMULATE, _mask, _value, _regs), \
+ {.action = (_handler)}
+
+#define DECODE_SIMULATE(_mask, _value, _handler) \
+ DECODE_SIMULATEX(_mask, _value, _handler, 0)
+
+
+struct decode_emulate {
+ struct decode_header header;
+ union decode_item handler;
+};
+
+#define DECODE_EMULATEX(_mask, _value, _handler, _regs) \
+ DECODE_HEADER(DECODE_TYPE_EMULATE, _mask, _value, _regs), \
+ {.action = (_handler)}
+
+#define DECODE_EMULATE(_mask, _value, _handler) \
+ DECODE_EMULATEX(_mask, _value, _handler, 0)
+
+
+struct decode_or {
+ struct decode_header header;
+};
+
+#define DECODE_OR(_mask, _value) \
+ DECODE_HEADER(DECODE_TYPE_OR, _mask, _value, 0)
+
+enum probes_insn {
+ INSN_REJECTED,
+ INSN_GOOD,
+ INSN_GOOD_NO_SLOT
+};
+
+struct decode_reject {
+ struct decode_header header;
+};
+
+#define DECODE_REJECT(_mask, _value) \
+ DECODE_HEADER(DECODE_TYPE_REJECT, _mask, _value, 0)
+
+probes_insn_handler_t probes_simulate_nop;
+probes_insn_handler_t probes_emulate_none;
+
+int __kprobes
+probes_decode_insn(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const union decode_item *table, bool thumb, bool emulate,
+ const union decode_action *actions);
+
+#endif
diff --git a/arch/arm/kernel/process.c b/arch/arm/kernel/process.c
index 94f6b05f9e2..81ef686a91c 100644
--- a/arch/arm/kernel/process.c
+++ b/arch/arm/kernel/process.c
@@ -30,7 +30,6 @@
#include <linux/uaccess.h>
#include <linux/random.h>
#include <linux/hw_breakpoint.h>
-#include <linux/cpuidle.h>
#include <linux/leds.h>
#include <linux/reboot.h>
@@ -39,6 +38,7 @@
#include <asm/processor.h>
#include <asm/thread_notify.h>
#include <asm/stacktrace.h>
+#include <asm/system_misc.h>
#include <asm/mach/time.h>
#include <asm/tls.h>
@@ -48,14 +48,14 @@ unsigned long __stack_chk_guard __read_mostly;
EXPORT_SYMBOL(__stack_chk_guard);
#endif
-static const char *processor_modes[] = {
+static const char *processor_modes[] __maybe_unused = {
"USER_26", "FIQ_26" , "IRQ_26" , "SVC_26" , "UK4_26" , "UK5_26" , "UK6_26" , "UK7_26" ,
"UK8_26" , "UK9_26" , "UK10_26", "UK11_26", "UK12_26", "UK13_26", "UK14_26", "UK15_26",
"USER_32", "FIQ_32" , "IRQ_32" , "SVC_32" , "UK4_32" , "UK5_32" , "UK6_32" , "ABT_32" ,
"UK8_32" , "UK9_32" , "UK10_32", "UND_32" , "UK12_32", "UK13_32", "UK14_32", "SYS_32"
};
-static const char *isa_modes[] = {
+static const char *isa_modes[] __maybe_unused = {
"ARM" , "Thumb" , "Jazelle", "ThumbEE"
};
@@ -100,7 +100,7 @@ void soft_restart(unsigned long addr)
u64 *stack = soft_restart_stack + ARRAY_SIZE(soft_restart_stack);
/* Disable interrupts first */
- local_irq_disable();
+ raw_local_irq_disable();
local_fiq_disable();
/* Disable the L2 if we're the last man standing. */
@@ -133,7 +133,11 @@ EXPORT_SYMBOL_GPL(arm_pm_restart);
void (*arm_pm_idle)(void);
-static void default_idle(void)
+/*
+ * Called from the core idle loop.
+ */
+
+void arch_cpu_idle(void)
{
if (arm_pm_idle)
arm_pm_idle();
@@ -168,15 +172,6 @@ void arch_cpu_idle_dead(void)
#endif
/*
- * Called from the core idle loop.
- */
-void arch_cpu_idle(void)
-{
- if (cpuidle_idle_call())
- default_idle();
-}
-
-/*
* Called by kexec, immediately prior to machine_kexec().
*
* This must completely disable all secondary CPUs; simply causing those CPUs
@@ -276,12 +271,17 @@ void __show_regs(struct pt_regs *regs)
buf[3] = flags & PSR_V_BIT ? 'V' : 'v';
buf[4] = '\0';
+#ifndef CONFIG_CPU_V7M
printk("Flags: %s IRQs o%s FIQs o%s Mode %s ISA %s Segment %s\n",
buf, interrupts_enabled(regs) ? "n" : "ff",
fast_interrupts_enabled(regs) ? "n" : "ff",
processor_modes[processor_mode(regs)],
isa_modes[isa_mode(regs)],
get_fs() == get_ds() ? "kernel" : "user");
+#else
+ printk("xPSR: %08lx\n", regs->ARM_cpsr);
+#endif
+
#ifdef CONFIG_CPU_CP15
{
unsigned int ctrl;
@@ -404,6 +404,7 @@ EXPORT_SYMBOL(dump_fpu);
unsigned long get_wchan(struct task_struct *p)
{
struct stackframe frame;
+ unsigned long stack_page;
int count = 0;
if (!p || p == current || p->state == TASK_RUNNING)
return 0;
@@ -412,9 +413,11 @@ unsigned long get_wchan(struct task_struct *p)
frame.sp = thread_saved_sp(p);
frame.lr = 0; /* recovered from the stack */
frame.pc = thread_saved_pc(p);
+ stack_page = (unsigned long)task_stack_page(p);
do {
- int ret = unwind_frame(&frame);
- if (ret < 0)
+ if (frame.sp < stack_page ||
+ frame.sp >= stack_page + THREAD_SIZE ||
+ unwind_frame(&frame) < 0)
return 0;
if (!in_sched_functions(frame.pc))
return frame.pc;
diff --git a/arch/arm/kernel/psci.c b/arch/arm/kernel/psci.c
index 46931880093..f73891b6b73 100644
--- a/arch/arm/kernel/psci.c
+++ b/arch/arm/kernel/psci.c
@@ -17,63 +17,58 @@
#include <linux/init.h>
#include <linux/of.h>
+#include <linux/reboot.h>
+#include <linux/pm.h>
+#include <uapi/linux/psci.h>
#include <asm/compiler.h>
#include <asm/errno.h>
#include <asm/opcodes-sec.h>
#include <asm/opcodes-virt.h>
#include <asm/psci.h>
+#include <asm/system_misc.h>
struct psci_operations psci_ops;
static int (*invoke_psci_fn)(u32, u32, u32, u32);
+typedef int (*psci_initcall_t)(const struct device_node *);
enum psci_function {
PSCI_FN_CPU_SUSPEND,
PSCI_FN_CPU_ON,
PSCI_FN_CPU_OFF,
PSCI_FN_MIGRATE,
+ PSCI_FN_AFFINITY_INFO,
+ PSCI_FN_MIGRATE_INFO_TYPE,
PSCI_FN_MAX,
};
static u32 psci_function_id[PSCI_FN_MAX];
-#define PSCI_RET_SUCCESS 0
-#define PSCI_RET_EOPNOTSUPP -1
-#define PSCI_RET_EINVAL -2
-#define PSCI_RET_EPERM -3
-
static int psci_to_linux_errno(int errno)
{
switch (errno) {
case PSCI_RET_SUCCESS:
return 0;
- case PSCI_RET_EOPNOTSUPP:
+ case PSCI_RET_NOT_SUPPORTED:
return -EOPNOTSUPP;
- case PSCI_RET_EINVAL:
+ case PSCI_RET_INVALID_PARAMS:
return -EINVAL;
- case PSCI_RET_EPERM:
+ case PSCI_RET_DENIED:
return -EPERM;
};
return -EINVAL;
}
-#define PSCI_POWER_STATE_ID_MASK 0xffff
-#define PSCI_POWER_STATE_ID_SHIFT 0
-#define PSCI_POWER_STATE_TYPE_MASK 0x1
-#define PSCI_POWER_STATE_TYPE_SHIFT 16
-#define PSCI_POWER_STATE_AFFL_MASK 0x3
-#define PSCI_POWER_STATE_AFFL_SHIFT 24
-
static u32 psci_power_state_pack(struct psci_power_state state)
{
- return ((state.id & PSCI_POWER_STATE_ID_MASK)
- << PSCI_POWER_STATE_ID_SHIFT) |
- ((state.type & PSCI_POWER_STATE_TYPE_MASK)
- << PSCI_POWER_STATE_TYPE_SHIFT) |
- ((state.affinity_level & PSCI_POWER_STATE_AFFL_MASK)
- << PSCI_POWER_STATE_AFFL_SHIFT);
+ return ((state.id << PSCI_0_2_POWER_STATE_ID_SHIFT)
+ & PSCI_0_2_POWER_STATE_ID_MASK) |
+ ((state.type << PSCI_0_2_POWER_STATE_TYPE_SHIFT)
+ & PSCI_0_2_POWER_STATE_TYPE_MASK) |
+ ((state.affinity_level << PSCI_0_2_POWER_STATE_AFFL_SHIFT)
+ & PSCI_0_2_POWER_STATE_AFFL_MASK);
}
/*
@@ -110,6 +105,14 @@ static noinline int __invoke_psci_fn_smc(u32 function_id, u32 arg0, u32 arg1,
return function_id;
}
+static int psci_get_version(void)
+{
+ int err;
+
+ err = invoke_psci_fn(PSCI_0_2_FN_PSCI_VERSION, 0, 0, 0);
+ return err;
+}
+
static int psci_cpu_suspend(struct psci_power_state state,
unsigned long entry_point)
{
@@ -153,26 +156,36 @@ static int psci_migrate(unsigned long cpuid)
return psci_to_linux_errno(err);
}
-static const struct of_device_id psci_of_match[] __initconst = {
- { .compatible = "arm,psci", },
- {},
-};
+static int psci_affinity_info(unsigned long target_affinity,
+ unsigned long lowest_affinity_level)
+{
+ int err;
+ u32 fn;
+
+ fn = psci_function_id[PSCI_FN_AFFINITY_INFO];
+ err = invoke_psci_fn(fn, target_affinity, lowest_affinity_level, 0);
+ return err;
+}
-void __init psci_init(void)
+static int psci_migrate_info_type(void)
{
- struct device_node *np;
- const char *method;
- u32 id;
+ int err;
+ u32 fn;
- np = of_find_matching_node(NULL, psci_of_match);
- if (!np)
- return;
+ fn = psci_function_id[PSCI_FN_MIGRATE_INFO_TYPE];
+ err = invoke_psci_fn(fn, 0, 0, 0);
+ return err;
+}
+
+static int get_set_conduit_method(struct device_node *np)
+{
+ const char *method;
- pr_info("probing function IDs from device-tree\n");
+ pr_info("probing for conduit method from DT.\n");
if (of_property_read_string(np, "method", &method)) {
- pr_warning("missing \"method\" property\n");
- goto out_put_node;
+ pr_warn("missing \"method\" property\n");
+ return -ENXIO;
}
if (!strcmp("hvc", method)) {
@@ -180,10 +193,99 @@ void __init psci_init(void)
} else if (!strcmp("smc", method)) {
invoke_psci_fn = __invoke_psci_fn_smc;
} else {
- pr_warning("invalid \"method\" property: %s\n", method);
+ pr_warn("invalid \"method\" property: %s\n", method);
+ return -EINVAL;
+ }
+ return 0;
+}
+
+static void psci_sys_reset(enum reboot_mode reboot_mode, const char *cmd)
+{
+ invoke_psci_fn(PSCI_0_2_FN_SYSTEM_RESET, 0, 0, 0);
+}
+
+static void psci_sys_poweroff(void)
+{
+ invoke_psci_fn(PSCI_0_2_FN_SYSTEM_OFF, 0, 0, 0);
+}
+
+/*
+ * PSCI Function IDs for v0.2+ are well defined so use
+ * standard values.
+ */
+static int psci_0_2_init(struct device_node *np)
+{
+ int err, ver;
+
+ err = get_set_conduit_method(np);
+
+ if (err)
+ goto out_put_node;
+
+ ver = psci_get_version();
+
+ if (ver == PSCI_RET_NOT_SUPPORTED) {
+ /* PSCI v0.2 mandates implementation of PSCI_ID_VERSION. */
+ pr_err("PSCI firmware does not comply with the v0.2 spec.\n");
+ err = -EOPNOTSUPP;
goto out_put_node;
+ } else {
+ pr_info("PSCIv%d.%d detected in firmware.\n",
+ PSCI_VERSION_MAJOR(ver),
+ PSCI_VERSION_MINOR(ver));
+
+ if (PSCI_VERSION_MAJOR(ver) == 0 &&
+ PSCI_VERSION_MINOR(ver) < 2) {
+ err = -EINVAL;
+ pr_err("Conflicting PSCI version detected.\n");
+ goto out_put_node;
+ }
}
+ pr_info("Using standard PSCI v0.2 function IDs\n");
+ psci_function_id[PSCI_FN_CPU_SUSPEND] = PSCI_0_2_FN_CPU_SUSPEND;
+ psci_ops.cpu_suspend = psci_cpu_suspend;
+
+ psci_function_id[PSCI_FN_CPU_OFF] = PSCI_0_2_FN_CPU_OFF;
+ psci_ops.cpu_off = psci_cpu_off;
+
+ psci_function_id[PSCI_FN_CPU_ON] = PSCI_0_2_FN_CPU_ON;
+ psci_ops.cpu_on = psci_cpu_on;
+
+ psci_function_id[PSCI_FN_MIGRATE] = PSCI_0_2_FN_MIGRATE;
+ psci_ops.migrate = psci_migrate;
+
+ psci_function_id[PSCI_FN_AFFINITY_INFO] = PSCI_0_2_FN_AFFINITY_INFO;
+ psci_ops.affinity_info = psci_affinity_info;
+
+ psci_function_id[PSCI_FN_MIGRATE_INFO_TYPE] =
+ PSCI_0_2_FN_MIGRATE_INFO_TYPE;
+ psci_ops.migrate_info_type = psci_migrate_info_type;
+
+ arm_pm_restart = psci_sys_reset;
+
+ pm_power_off = psci_sys_poweroff;
+
+out_put_node:
+ of_node_put(np);
+ return err;
+}
+
+/*
+ * PSCI < v0.2 get PSCI Function IDs via DT.
+ */
+static int psci_0_1_init(struct device_node *np)
+{
+ u32 id;
+ int err;
+
+ err = get_set_conduit_method(np);
+
+ if (err)
+ goto out_put_node;
+
+ pr_info("Using PSCI v0.1 Function IDs from DT\n");
+
if (!of_property_read_u32(np, "cpu_suspend", &id)) {
psci_function_id[PSCI_FN_CPU_SUSPEND] = id;
psci_ops.cpu_suspend = psci_cpu_suspend;
@@ -206,5 +308,25 @@ void __init psci_init(void)
out_put_node:
of_node_put(np);
- return;
+ return err;
+}
+
+static const struct of_device_id psci_of_match[] __initconst = {
+ { .compatible = "arm,psci", .data = psci_0_1_init},
+ { .compatible = "arm,psci-0.2", .data = psci_0_2_init},
+ {},
+};
+
+int __init psci_init(void)
+{
+ struct device_node *np;
+ const struct of_device_id *matched_np;
+ psci_initcall_t init_fn;
+
+ np = of_find_matching_node_and_match(NULL, psci_of_match, &matched_np);
+ if (!np)
+ return -ENODEV;
+
+ init_fn = (psci_initcall_t)matched_np->data;
+ return init_fn(np);
}
diff --git a/arch/arm/kernel/psci_smp.c b/arch/arm/kernel/psci_smp.c
index 70ded3fb42d..28a1db4da70 100644
--- a/arch/arm/kernel/psci_smp.c
+++ b/arch/arm/kernel/psci_smp.c
@@ -14,9 +14,10 @@
*/
#include <linux/init.h>
-#include <linux/irqchip/arm-gic.h>
#include <linux/smp.h>
#include <linux/of.h>
+#include <linux/delay.h>
+#include <uapi/linux/psci.h>
#include <asm/psci.h>
#include <asm/smp_plat.h>
@@ -67,6 +68,36 @@ void __ref psci_cpu_die(unsigned int cpu)
/* We should never return */
panic("psci: cpu %d failed to shutdown\n", cpu);
}
+
+int __ref psci_cpu_kill(unsigned int cpu)
+{
+ int err, i;
+
+ if (!psci_ops.affinity_info)
+ return 1;
+ /*
+ * cpu_kill could race with cpu_die and we can
+ * potentially end up declaring this cpu undead
+ * while it is dying. So, try again a few times.
+ */
+
+ for (i = 0; i < 10; i++) {
+ err = psci_ops.affinity_info(cpu_logical_map(cpu), 0);
+ if (err == PSCI_0_2_AFFINITY_LEVEL_OFF) {
+ pr_info("CPU%d killed.\n", cpu);
+ return 1;
+ }
+
+ msleep(10);
+ pr_info("Retrying again to check for CPU kill\n");
+ }
+
+ pr_warn("CPU%d may not have shut down cleanly (AFFINITY_INFO reports %d)\n",
+ cpu, err);
+ /* Make platform_cpu_kill() fail. */
+ return 0;
+}
+
#endif
bool __init psci_smp_available(void)
@@ -79,5 +110,6 @@ struct smp_operations __initdata psci_smp_ops = {
.smp_boot_secondary = psci_boot_secondary,
#ifdef CONFIG_HOTPLUG_CPU
.cpu_die = psci_cpu_die,
+ .cpu_kill = psci_cpu_kill,
#endif
};
diff --git a/arch/arm/kernel/ptrace.c b/arch/arm/kernel/ptrace.c
index 0dd3b79b15c..0c27ed6f3f2 100644
--- a/arch/arm/kernel/ptrace.c
+++ b/arch/arm/kernel/ptrace.c
@@ -908,7 +908,7 @@ enum ptrace_syscall_dir {
PTRACE_SYSCALL_EXIT,
};
-static int tracehook_report_syscall(struct pt_regs *regs,
+static void tracehook_report_syscall(struct pt_regs *regs,
enum ptrace_syscall_dir dir)
{
unsigned long ip;
@@ -926,7 +926,6 @@ static int tracehook_report_syscall(struct pt_regs *regs,
current_thread_info()->syscall = -1;
regs->ARM_ip = ip;
- return current_thread_info()->syscall;
}
asmlinkage int syscall_trace_enter(struct pt_regs *regs, int scno)
@@ -938,7 +937,9 @@ asmlinkage int syscall_trace_enter(struct pt_regs *regs, int scno)
return -1;
if (test_thread_flag(TIF_SYSCALL_TRACE))
- scno = tracehook_report_syscall(regs, PTRACE_SYSCALL_ENTER);
+ tracehook_report_syscall(regs, PTRACE_SYSCALL_ENTER);
+
+ scno = current_thread_info()->syscall;
if (test_thread_flag(TIF_SYSCALL_TRACEPOINT))
trace_sys_enter(regs, scno);
diff --git a/arch/arm/kernel/relocate_kernel.S b/arch/arm/kernel/relocate_kernel.S
index d0cdedf4864..95858966d84 100644
--- a/arch/arm/kernel/relocate_kernel.S
+++ b/arch/arm/kernel/relocate_kernel.S
@@ -2,10 +2,12 @@
* relocate_kernel.S - put the kernel image in place to boot
*/
+#include <linux/linkage.h>
#include <asm/kexec.h>
- .globl relocate_new_kernel
-relocate_new_kernel:
+ .align 3 /* not needed for this code, but keeps fncpy() happy */
+
+ENTRY(relocate_new_kernel)
ldr r0,kexec_indirection_page
ldr r1,kexec_start_address
@@ -79,6 +81,8 @@ kexec_mach_type:
kexec_boot_atags:
.long 0x0
+ENDPROC(relocate_new_kernel)
+
relocate_new_kernel_end:
.globl relocate_new_kernel_size
diff --git a/arch/arm/kernel/setup.c b/arch/arm/kernel/setup.c
index 0e1e2b3afa4..8a16ee5d8a9 100644
--- a/arch/arm/kernel/setup.c
+++ b/arch/arm/kernel/setup.c
@@ -72,7 +72,10 @@ static int __init fpe_setup(char *line)
__setup("fpe=", fpe_setup);
#endif
+extern void init_default_cache_policy(unsigned long);
extern void paging_init(const struct machine_desc *desc);
+extern void early_paging_init(const struct machine_desc *,
+ struct proc_info_list *);
extern void sanity_check_meminfo(void);
extern enum reboot_mode reboot_mode;
extern void setup_dma_zone(const struct machine_desc *desc);
@@ -98,6 +101,9 @@ EXPORT_SYMBOL(system_serial_high);
unsigned int elf_hwcap __read_mostly;
EXPORT_SYMBOL(elf_hwcap);
+unsigned int elf_hwcap2 __read_mostly;
+EXPORT_SYMBOL(elf_hwcap2);
+
#ifdef MULTI_CPU
struct processor processor __read_mostly;
@@ -332,7 +338,7 @@ static void __init cacheid_init(void)
cacheid = CACHEID_VIVT;
}
- printk("CPU: %s data cache, %s instruction cache\n",
+ pr_info("CPU: %s data cache, %s instruction cache\n",
cache_is_vivt() ? "VIVT" :
cache_is_vipt_aliasing() ? "VIPT aliasing" :
cache_is_vipt_nonaliasing() ? "PIPT / VIPT nonaliasing" : "unknown",
@@ -414,7 +420,7 @@ void notrace cpu_init(void)
struct stack *stk = &stacks[cpu];
if (cpu >= NR_CPUS) {
- printk(KERN_CRIT "CPU%u: bad primary CPU number\n", cpu);
+ pr_crit("CPU%u: bad primary CPU number\n", cpu);
BUG();
}
@@ -482,7 +488,7 @@ void __init smp_setup_processor_id(void)
*/
set_my_cpu_offset(0);
- printk(KERN_INFO "Booting Linux on physical CPU 0x%x\n", mpidr);
+ pr_info("Booting Linux on physical CPU 0x%x\n", mpidr);
}
struct mpidr_hash mpidr_hash;
@@ -562,8 +568,8 @@ static void __init setup_processor(void)
*/
list = lookup_processor_type(read_cpuid_id());
if (!list) {
- printk("CPU configuration botched (ID %08x), unable "
- "to continue.\n", read_cpuid_id());
+ pr_err("CPU configuration botched (ID %08x), unable to continue.\n",
+ read_cpuid_id());
while (1);
}
@@ -583,9 +589,9 @@ static void __init setup_processor(void)
cpu_cache = *list->cache;
#endif
- printk("CPU: %s [%08x] revision %d (ARMv%s), cr=%08lx\n",
- cpu_name, read_cpuid_id(), read_cpuid_id() & 15,
- proc_arch[cpu_architecture()], cr_alignment);
+ pr_info("CPU: %s [%08x] revision %d (ARMv%s), cr=%08lx\n",
+ cpu_name, read_cpuid_id(), read_cpuid_id() & 15,
+ proc_arch[cpu_architecture()], get_cr());
snprintf(init_utsname()->machine, __NEW_UTS_LEN + 1, "%s%c",
list->arch_name, ENDIANNESS);
@@ -598,6 +604,10 @@ static void __init setup_processor(void)
#ifndef CONFIG_ARM_THUMB
elf_hwcap &= ~(HWCAP_THUMB | HWCAP_IDIVT);
#endif
+#ifdef CONFIG_MMU
+ init_default_cache_policy(list->__cpu_mm_mmu_flags);
+#endif
+ erratum_a15_798181_init();
feat_v6_fixup();
@@ -619,46 +629,61 @@ void __init dump_machine_table(void)
/* can't use cpu_relax() here as it may require MMU setup */;
}
-int __init arm_add_memory(phys_addr_t start, phys_addr_t size)
+int __init arm_add_memory(u64 start, u64 size)
{
- struct membank *bank = &meminfo.bank[meminfo.nr_banks];
-
- if (meminfo.nr_banks >= NR_BANKS) {
- printk(KERN_CRIT "NR_BANKS too low, "
- "ignoring memory at 0x%08llx\n", (long long)start);
- return -EINVAL;
- }
+ u64 aligned_start;
/*
* Ensure that start/size are aligned to a page boundary.
* Size is appropriately rounded down, start is rounded up.
*/
size -= start & ~PAGE_MASK;
- bank->start = PAGE_ALIGN(start);
+ aligned_start = PAGE_ALIGN(start);
+
+#ifndef CONFIG_ARCH_PHYS_ADDR_T_64BIT
+ if (aligned_start > ULONG_MAX) {
+ pr_crit("Ignoring memory at 0x%08llx outside 32-bit physical address space\n",
+ (long long)start);
+ return -EINVAL;
+ }
-#ifndef CONFIG_ARM_LPAE
- if (bank->start + size < bank->start) {
- printk(KERN_CRIT "Truncating memory at 0x%08llx to fit in "
- "32-bit physical address space\n", (long long)start);
+ if (aligned_start + size > ULONG_MAX) {
+ pr_crit("Truncating memory at 0x%08llx to fit in 32-bit physical address space\n",
+ (long long)start);
/*
* To ensure bank->start + bank->size is representable in
* 32 bits, we use ULONG_MAX as the upper limit rather than 4GB.
* This means we lose a page after masking.
*/
- size = ULONG_MAX - bank->start;
+ size = ULONG_MAX - aligned_start;
}
#endif
- bank->size = size & ~(phys_addr_t)(PAGE_SIZE - 1);
+ if (aligned_start < PHYS_OFFSET) {
+ if (aligned_start + size <= PHYS_OFFSET) {
+ pr_info("Ignoring memory below PHYS_OFFSET: 0x%08llx-0x%08llx\n",
+ aligned_start, aligned_start + size);
+ return -EINVAL;
+ }
+
+ pr_info("Ignoring memory below PHYS_OFFSET: 0x%08llx-0x%08llx\n",
+ aligned_start, (u64)PHYS_OFFSET);
+
+ size -= PHYS_OFFSET - aligned_start;
+ aligned_start = PHYS_OFFSET;
+ }
+
+ start = aligned_start;
+ size = size & ~(phys_addr_t)(PAGE_SIZE - 1);
/*
* Check whether this memory region has non-zero size or
* invalid node number.
*/
- if (bank->size == 0)
+ if (size == 0)
return -EINVAL;
- meminfo.nr_banks++;
+ memblock_add(start, size);
return 0;
}
@@ -666,11 +691,12 @@ int __init arm_add_memory(phys_addr_t start, phys_addr_t size)
* Pick out the memory size. We look for mem=size@start,
* where start and size are "size[KkMm]"
*/
+
static int __init early_mem(char *p)
{
static int usermem __initdata = 0;
- phys_addr_t size;
- phys_addr_t start;
+ u64 size;
+ u64 start;
char *endp;
/*
@@ -680,7 +706,8 @@ static int __init early_mem(char *p)
*/
if (usermem == 0) {
usermem = 1;
- meminfo.nr_banks = 0;
+ memblock_remove(memblock_start_of_DRAM(),
+ memblock_end_of_DRAM() - memblock_start_of_DRAM());
}
start = PHYS_OFFSET;
@@ -705,7 +732,7 @@ static void __init request_standard_resources(const struct machine_desc *mdesc)
kernel_data.end = virt_to_phys(_end - 1);
for_each_memblock(memory, region) {
- res = alloc_bootmem_low(sizeof(*res));
+ res = memblock_virt_alloc(sizeof(*res), 0);
res->name = "System RAM";
res->start = __pfn_to_phys(memblock_region_memory_base_pfn(region));
res->end = __pfn_to_phys(memblock_region_memory_end_pfn(region)) - 1;
@@ -805,18 +832,17 @@ static void __init reserve_crashkernel(void)
if (ret)
return;
- ret = reserve_bootmem(crash_base, crash_size, BOOTMEM_EXCLUSIVE);
+ ret = memblock_reserve(crash_base, crash_size);
if (ret < 0) {
- printk(KERN_WARNING "crashkernel reservation failed - "
- "memory is in use (0x%lx)\n", (unsigned long)crash_base);
+ pr_warn("crashkernel reservation failed - memory is in use (0x%lx)\n",
+ (unsigned long)crash_base);
return;
}
- printk(KERN_INFO "Reserving %ldMB of memory at %ldMB "
- "for crashkernel (System RAM: %ldMB)\n",
- (unsigned long)(crash_size >> 20),
- (unsigned long)(crash_base >> 20),
- (unsigned long)(total_mem >> 20));
+ pr_info("Reserving %ldMB of memory at %ldMB for crashkernel (System RAM: %ldMB)\n",
+ (unsigned long)(crash_size >> 20),
+ (unsigned long)(crash_base >> 20),
+ (unsigned long)(total_mem >> 20));
crashk_res.start = crash_base;
crashk_res.end = crash_base + crash_size - 1;
@@ -826,13 +852,6 @@ static void __init reserve_crashkernel(void)
static inline void reserve_crashkernel(void) {}
#endif /* CONFIG_KEXEC */
-static int __init meminfo_cmp(const void *_a, const void *_b)
-{
- const struct membank *a = _a, *b = _b;
- long cmp = bank_pfn_start(a) - bank_pfn_start(b);
- return cmp < 0 ? -1 : cmp > 0 ? 1 : 0;
-}
-
void __init hyp_mode_check(void)
{
#ifdef CONFIG_ARM_VIRT_EXT
@@ -861,8 +880,6 @@ void __init setup_arch(char **cmdline_p)
machine_desc = mdesc;
machine_name = mdesc->name;
- setup_dma_zone(mdesc);
-
if (mdesc->reboot_mode != REBOOT_HARD)
reboot_mode = mdesc->reboot_mode;
@@ -877,9 +894,10 @@ void __init setup_arch(char **cmdline_p)
parse_early_param();
- sort(&meminfo.bank, meminfo.nr_banks, sizeof(meminfo.bank[0]), meminfo_cmp, NULL);
+ early_paging_init(mdesc, lookup_processor_type(read_cpuid_id()));
+ setup_dma_zone(mdesc);
sanity_check_meminfo();
- arm_memblock_init(&meminfo, mdesc);
+ arm_memblock_init(mdesc);
paging_init(mdesc);
request_standard_resources(mdesc);
@@ -975,6 +993,16 @@ static const char *hwcap_str[] = {
"idivt",
"vfpd32",
"lpae",
+ "evtstrm",
+ NULL
+};
+
+static const char *hwcap2_str[] = {
+ "aes",
+ "pmull",
+ "sha1",
+ "sha2",
+ "crc32",
NULL
};
@@ -1001,6 +1029,10 @@ static int c_show(struct seq_file *m, void *v)
if (elf_hwcap & (1 << j))
seq_printf(m, "%s ", hwcap_str[j]);
+ for (j = 0; hwcap2_str[j]; j++)
+ if (elf_hwcap2 & (1 << j))
+ seq_printf(m, "%s ", hwcap2_str[j]);
+
seq_printf(m, "\nCPU implementer\t: 0x%02x\n", cpuid >> 24);
seq_printf(m, "CPU architecture: %s\n",
proc_arch[cpu_architecture()]);
diff --git a/arch/arm/kernel/signal.c b/arch/arm/kernel/signal.c
index ab330422527..bd198343720 100644
--- a/arch/arm/kernel/signal.c
+++ b/arch/arm/kernel/signal.c
@@ -13,6 +13,7 @@
#include <linux/personality.h>
#include <linux/uaccess.h>
#include <linux/tracehook.h>
+#include <linux/uprobes.h>
#include <asm/elf.h>
#include <asm/cacheflush.h>
@@ -21,29 +22,7 @@
#include <asm/unistd.h>
#include <asm/vfp.h>
-/*
- * For ARM syscalls, we encode the syscall number into the instruction.
- */
-#define SWI_SYS_SIGRETURN (0xef000000|(__NR_sigreturn)|(__NR_OABI_SYSCALL_BASE))
-#define SWI_SYS_RT_SIGRETURN (0xef000000|(__NR_rt_sigreturn)|(__NR_OABI_SYSCALL_BASE))
-
-/*
- * With EABI, the syscall number has to be loaded into r7.
- */
-#define MOV_R7_NR_SIGRETURN (0xe3a07000 | (__NR_sigreturn - __NR_SYSCALL_BASE))
-#define MOV_R7_NR_RT_SIGRETURN (0xe3a07000 | (__NR_rt_sigreturn - __NR_SYSCALL_BASE))
-
-/*
- * For Thumb syscalls, we pass the syscall number via r7. We therefore
- * need two 16-bit instructions.
- */
-#define SWI_THUMB_SIGRETURN (0xdf00 << 16 | 0x2700 | (__NR_sigreturn - __NR_SYSCALL_BASE))
-#define SWI_THUMB_RT_SIGRETURN (0xdf00 << 16 | 0x2700 | (__NR_rt_sigreturn - __NR_SYSCALL_BASE))
-
-static const unsigned long sigreturn_codes[7] = {
- MOV_R7_NR_SIGRETURN, SWI_SYS_SIGRETURN, SWI_THUMB_SIGRETURN,
- MOV_R7_NR_RT_SIGRETURN, SWI_SYS_RT_SIGRETURN, SWI_THUMB_RT_SIGRETURN,
-};
+extern const unsigned long sigreturn_codes[7];
static unsigned long signal_return_offset;
@@ -375,12 +354,18 @@ setup_return(struct pt_regs *regs, struct ksignal *ksig,
*/
thumb = handler & 1;
- if (thumb) {
- cpsr |= PSR_T_BIT;
#if __LINUX_ARM_ARCH__ >= 7
- /* clear the If-Then Thumb-2 execution state */
- cpsr &= ~PSR_IT_MASK;
+ /*
+ * Clear the If-Then Thumb-2 execution state
+ * ARM spec requires this to be all 000s in ARM mode
+ * Snapdragon S4/Krait misbehaves on a Thumb=>ARM
+ * signal transition without this.
+ */
+ cpsr &= ~PSR_IT_MASK;
#endif
+
+ if (thumb) {
+ cpsr |= PSR_T_BIT;
} else
cpsr &= ~PSR_T_BIT;
}
@@ -606,6 +591,9 @@ do_work_pending(struct pt_regs *regs, unsigned int thread_flags, int syscall)
return restart;
}
syscall = 0;
+ } else if (thread_flags & _TIF_UPROBE) {
+ clear_thread_flag(TIF_UPROBE);
+ uprobe_notify_resume(regs);
} else {
clear_thread_flag(TIF_NOTIFY_RESUME);
tracehook_notify_resume(regs);
diff --git a/arch/arm/kernel/sigreturn_codes.S b/arch/arm/kernel/sigreturn_codes.S
new file mode 100644
index 00000000000..b84d0cb1368
--- /dev/null
+++ b/arch/arm/kernel/sigreturn_codes.S
@@ -0,0 +1,102 @@
+/*
+ * sigreturn_codes.S - code sinpets for sigreturn syscalls
+ *
+ * Created by: Victor Kamensky, 2013-08-13
+ * Copyright: (C) 2013 Linaro Limited
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ */
+
+#include <asm/unistd.h>
+
+/*
+ * For ARM syscalls, we encode the syscall number into the instruction.
+ * With EABI, the syscall number has to be loaded into r7. As result
+ * ARM syscall sequence snippet will have move and svc in .arm encoding
+ *
+ * For Thumb syscalls, we pass the syscall number via r7. We therefore
+ * need two 16-bit instructions in .thumb encoding
+ *
+ * Please note sigreturn_codes code are not executed in place. Instead
+ * they just copied by kernel into appropriate places. Code inside of
+ * arch/arm/kernel/signal.c is very sensitive to layout of these code
+ * snippets.
+ */
+
+/*
+ * In CPU_THUMBONLY case kernel arm opcodes are not allowed.
+ * Note in this case codes skips those instructions but it uses .org
+ * directive to keep correct layout of sigreturn_codes array.
+ */
+#ifndef CONFIG_CPU_THUMBONLY
+#define ARM_OK(code...) code
+#else
+#define ARM_OK(code...)
+#endif
+
+ .macro arm_slot n
+ .org sigreturn_codes + 12 * (\n)
+ARM_OK( .arm )
+ .endm
+
+ .macro thumb_slot n
+ .org sigreturn_codes + 12 * (\n) + 8
+ .thumb
+ .endm
+
+#if __LINUX_ARM_ARCH__ <= 4
+ /*
+ * Note we manually set minimally required arch that supports
+ * required thumb opcodes for early arch versions. It is OK
+ * for this file to be used in combination with other
+ * lower arch variants, since these code snippets are only
+ * used as input data.
+ */
+ .arch armv4t
+#endif
+
+ .section .rodata
+ .global sigreturn_codes
+ .type sigreturn_codes, #object
+
+ .align
+
+sigreturn_codes:
+
+ /* ARM sigreturn syscall code snippet */
+ arm_slot 0
+ARM_OK( mov r7, #(__NR_sigreturn - __NR_SYSCALL_BASE) )
+ARM_OK( swi #(__NR_sigreturn)|(__NR_OABI_SYSCALL_BASE) )
+
+ /* Thumb sigreturn syscall code snippet */
+ thumb_slot 0
+ movs r7, #(__NR_sigreturn - __NR_SYSCALL_BASE)
+ swi #0
+
+ /* ARM sigreturn_rt syscall code snippet */
+ arm_slot 1
+ARM_OK( mov r7, #(__NR_rt_sigreturn - __NR_SYSCALL_BASE) )
+ARM_OK( swi #(__NR_rt_sigreturn)|(__NR_OABI_SYSCALL_BASE) )
+
+ /* Thumb sigreturn_rt syscall code snippet */
+ thumb_slot 1
+ movs r7, #(__NR_rt_sigreturn - __NR_SYSCALL_BASE)
+ swi #0
+
+ /*
+ * Note on addtional space: setup_return in signal.c
+ * algorithm uses two words copy regardless whether
+ * it is thumb case or not, so we need additional
+ * word after real last entry.
+ */
+ arm_slot 2
+ .space 4
+
+ .size sigreturn_codes, . - sigreturn_codes
diff --git a/arch/arm/kernel/sleep.S b/arch/arm/kernel/sleep.S
index db1536b8b30..1b880db2a03 100644
--- a/arch/arm/kernel/sleep.S
+++ b/arch/arm/kernel/sleep.S
@@ -55,6 +55,7 @@
* specific registers and some other data for resume.
* r0 = suspend function arg0
* r1 = suspend function
+ * r2 = MPIDR value the resuming CPU will use
*/
ENTRY(__cpu_suspend)
stmfd sp!, {r4 - r11, lr}
@@ -67,23 +68,18 @@ ENTRY(__cpu_suspend)
mov r5, sp @ current virtual SP
add r4, r4, #12 @ Space for pgd, virt sp, phys resume fn
sub sp, sp, r4 @ allocate CPU state on stack
- stmfd sp!, {r0, r1} @ save suspend func arg and pointer
- add r0, sp, #8 @ save pointer to save block
- mov r1, r4 @ size of save block
- mov r2, r5 @ virtual SP
ldr r3, =sleep_save_sp
+ stmfd sp!, {r0, r1} @ save suspend func arg and pointer
ldr r3, [r3, #SLEEP_SAVE_SP_VIRT]
- ALT_SMP(mrc p15, 0, r9, c0, c0, 5)
- ALT_UP_B(1f)
- ldr r8, =mpidr_hash
- /*
- * This ldmia relies on the memory layout of the mpidr_hash
- * struct mpidr_hash.
- */
- ldmia r8, {r4-r7} @ r4 = mpidr mask (r5,r6,r7) = l[0,1,2] shifts
- compute_mpidr_hash lr, r5, r6, r7, r9, r4
- add r3, r3, lr, lsl #2
-1:
+ ALT_SMP(ldr r0, =mpidr_hash)
+ ALT_UP_B(1f)
+ /* This ldmia relies on the memory layout of the mpidr_hash struct */
+ ldmia r0, {r1, r6-r8} @ r1 = mpidr mask (r6,r7,r8) = l[0,1,2] shifts
+ compute_mpidr_hash r0, r6, r7, r8, r2, r1
+ add r3, r3, r0, lsl #2
+1: mov r2, r5 @ virtual SP
+ mov r1, r4 @ size of save block
+ add r0, sp, #8 @ pointer to save block
bl __cpu_suspend_save
adr lr, BSYM(cpu_suspend_abort)
ldmfd sp!, {r0, pc} @ call suspend fn
@@ -130,6 +126,11 @@ ENDPROC(cpu_resume_after_mmu)
.data
.align
ENTRY(cpu_resume)
+ARM_BE8(setend be) @ ensure we are in BE mode
+#ifdef CONFIG_ARM_VIRT_EXT
+ bl __hyp_stub_install_secondary
+#endif
+ safe_svcmode_maskall r1
mov r1, #0
ALT_SMP(mrc p15, 0, r0, c0, c0, 5)
ALT_UP_B(1f)
@@ -147,7 +148,6 @@ ENTRY(cpu_resume)
ldr r0, [r0, #SLEEP_SAVE_SP_PHYS]
ldr r0, [r0, r1, lsl #2]
- setmode PSR_I_BIT | PSR_F_BIT | SVC_MODE, r1 @ set SVC, irqs off
@ load phys pgd, stack, resume fn
ARM( ldmia r0!, {r1, sp, pc} )
THUMB( ldmia r0!, {r1, r2, r3} )
diff --git a/arch/arm/kernel/smp.c b/arch/arm/kernel/smp.c
index 72024ea8a3a..7c4fada440f 100644
--- a/arch/arm/kernel/smp.c
+++ b/arch/arm/kernel/smp.c
@@ -25,6 +25,7 @@
#include <linux/clockchips.h>
#include <linux/completion.h>
#include <linux/cpufreq.h>
+#include <linux/irq_work.h>
#include <linux/atomic.h>
#include <asm/smp.h>
@@ -66,6 +67,8 @@ enum ipi_msg_type {
IPI_CALL_FUNC,
IPI_CALL_FUNC_SINGLE,
IPI_CPU_STOP,
+ IPI_IRQ_WORK,
+ IPI_COMPLETION,
};
static DECLARE_COMPLETION(cpu_running);
@@ -80,7 +83,7 @@ void __init smp_set_ops(struct smp_operations *ops)
static unsigned long get_arch_pgd(pgd_t *pgd)
{
- phys_addr_t pgdir = virt_to_phys(pgd);
+ phys_addr_t pgdir = virt_to_idmap(pgd);
BUG_ON(pgdir & ARCH_PGD_MASK);
return pgdir >> ARCH_PGD_SHIFT;
}
@@ -102,8 +105,7 @@ int __cpu_up(unsigned int cpu, struct task_struct *idle)
secondary_data.pgdir = get_arch_pgd(idmap_pgd);
secondary_data.swapper_pg_dir = get_arch_pgd(swapper_pg_dir);
#endif
- __cpuc_flush_dcache_area(&secondary_data, sizeof(secondary_data));
- outer_clean_range(__pa(&secondary_data), __pa(&secondary_data + 1));
+ sync_cache_w(&secondary_data);
/*
* Now bring the CPU into our world.
@@ -291,6 +293,9 @@ void __ref cpu_die(void)
if (smp_ops.cpu_die)
smp_ops.cpu_die(cpu);
+ pr_warn("CPU%u: smp_ops.cpu_die() returned, trying to resuscitate\n",
+ cpu);
+
/*
* Do not return to the idle loop - jump back to the secondary
* cpu initialisation. There's some initialisation which needs
@@ -448,6 +453,14 @@ void arch_send_call_function_single_ipi(int cpu)
smp_cross_call(cpumask_of(cpu), IPI_CALL_FUNC_SINGLE);
}
+#ifdef CONFIG_IRQ_WORK
+void arch_irq_work_raise(void)
+{
+ if (is_smp())
+ smp_cross_call(cpumask_of(smp_processor_id()), IPI_IRQ_WORK);
+}
+#endif
+
static const char *ipi_types[NR_IPI] = {
#define S(x,s) [x] = s
S(IPI_WAKEUP, "CPU wakeup interrupts"),
@@ -456,6 +469,8 @@ static const char *ipi_types[NR_IPI] = {
S(IPI_CALL_FUNC, "Function call interrupts"),
S(IPI_CALL_FUNC_SINGLE, "Single function call interrupts"),
S(IPI_CPU_STOP, "CPU stop interrupts"),
+ S(IPI_IRQ_WORK, "IRQ work interrupts"),
+ S(IPI_COMPLETION, "completion interrupts"),
};
void show_ipi_list(struct seq_file *p, int prec)
@@ -515,6 +530,19 @@ static void ipi_cpu_stop(unsigned int cpu)
cpu_relax();
}
+static DEFINE_PER_CPU(struct completion *, cpu_completion);
+
+int register_ipi_completion(struct completion *completion, int cpu)
+{
+ per_cpu(cpu_completion, cpu) = completion;
+ return IPI_COMPLETION;
+}
+
+static void ipi_complete(unsigned int cpu)
+{
+ complete(per_cpu(cpu_completion, cpu));
+}
+
/*
* Main handler for inter-processor interrupts
*/
@@ -565,6 +593,20 @@ void handle_IPI(int ipinr, struct pt_regs *regs)
irq_exit();
break;
+#ifdef CONFIG_IRQ_WORK
+ case IPI_IRQ_WORK:
+ irq_enter();
+ irq_work_run();
+ irq_exit();
+ break;
+#endif
+
+ case IPI_COMPLETION:
+ irq_enter();
+ ipi_complete(cpu);
+ irq_exit();
+ break;
+
default:
printk(KERN_CRIT "CPU%u: Unknown IPI message 0x%x\n",
cpu, ipinr);
@@ -632,8 +674,7 @@ static int cpufreq_callback(struct notifier_block *nb,
}
if ((val == CPUFREQ_PRECHANGE && freq->old < freq->new) ||
- (val == CPUFREQ_POSTCHANGE && freq->old > freq->new) ||
- (val == CPUFREQ_RESUMECHANGE || val == CPUFREQ_SUSPENDCHANGE)) {
+ (val == CPUFREQ_POSTCHANGE && freq->old > freq->new)) {
loops_per_jiffy = cpufreq_scale(global_l_p_j_ref,
global_l_p_j_ref_freq,
freq->new);
diff --git a/arch/arm/kernel/smp_scu.c b/arch/arm/kernel/smp_scu.c
index 5bc1a63284e..1aafa0d785e 100644
--- a/arch/arm/kernel/smp_scu.c
+++ b/arch/arm/kernel/smp_scu.c
@@ -28,7 +28,7 @@
*/
unsigned int __init scu_get_core_count(void __iomem *scu_base)
{
- unsigned int ncores = __raw_readl(scu_base + SCU_CONFIG);
+ unsigned int ncores = readl_relaxed(scu_base + SCU_CONFIG);
return (ncores & 0x03) + 1;
}
@@ -42,19 +42,19 @@ void scu_enable(void __iomem *scu_base)
#ifdef CONFIG_ARM_ERRATA_764369
/* Cortex-A9 only */
if ((read_cpuid_id() & 0xff0ffff0) == 0x410fc090) {
- scu_ctrl = __raw_readl(scu_base + 0x30);
+ scu_ctrl = readl_relaxed(scu_base + 0x30);
if (!(scu_ctrl & 1))
- __raw_writel(scu_ctrl | 0x1, scu_base + 0x30);
+ writel_relaxed(scu_ctrl | 0x1, scu_base + 0x30);
}
#endif
- scu_ctrl = __raw_readl(scu_base + SCU_CTRL);
+ scu_ctrl = readl_relaxed(scu_base + SCU_CTRL);
/* already enabled? */
if (scu_ctrl & 1)
return;
scu_ctrl |= 1;
- __raw_writel(scu_ctrl, scu_base + SCU_CTRL);
+ writel_relaxed(scu_ctrl, scu_base + SCU_CTRL);
/*
* Ensure that the data accessed by CPU0 before the SCU was
@@ -80,9 +80,9 @@ int scu_power_mode(void __iomem *scu_base, unsigned int mode)
if (mode > 3 || mode == 1 || cpu > 3)
return -EINVAL;
- val = __raw_readb(scu_base + SCU_CPU_STATUS + cpu) & ~0x03;
+ val = readb_relaxed(scu_base + SCU_CPU_STATUS + cpu) & ~0x03;
val |= mode;
- __raw_writeb(val, scu_base + SCU_CPU_STATUS + cpu);
+ writeb_relaxed(val, scu_base + SCU_CPU_STATUS + cpu);
return 0;
}
diff --git a/arch/arm/kernel/smp_tlb.c b/arch/arm/kernel/smp_tlb.c
index 83ccca303df..95d063620b7 100644
--- a/arch/arm/kernel/smp_tlb.c
+++ b/arch/arm/kernel/smp_tlb.c
@@ -70,6 +70,40 @@ static inline void ipi_flush_bp_all(void *ignored)
local_flush_bp_all();
}
+#ifdef CONFIG_ARM_ERRATA_798181
+bool (*erratum_a15_798181_handler)(void);
+
+static bool erratum_a15_798181_partial(void)
+{
+ asm("mcr p15, 0, %0, c8, c3, 1" : : "r" (0));
+ dsb(ish);
+ return false;
+}
+
+static bool erratum_a15_798181_broadcast(void)
+{
+ asm("mcr p15, 0, %0, c8, c3, 1" : : "r" (0));
+ dsb(ish);
+ return true;
+}
+
+void erratum_a15_798181_init(void)
+{
+ unsigned int midr = read_cpuid_id();
+ unsigned int revidr = read_cpuid(CPUID_REVIDR);
+
+ /* Cortex-A15 r0p0..r3p2 w/o ECO fix affected */
+ if ((midr & 0xff0ffff0) != 0x410fc0f0 || midr > 0x413fc0f2 ||
+ (revidr & 0x210) == 0x210) {
+ return;
+ }
+ if (revidr & 0x10)
+ erratum_a15_798181_handler = erratum_a15_798181_partial;
+ else
+ erratum_a15_798181_handler = erratum_a15_798181_broadcast;
+}
+#endif
+
static void ipi_flush_tlb_a15_erratum(void *arg)
{
dmb();
@@ -80,7 +114,6 @@ static void broadcast_tlb_a15_erratum(void)
if (!erratum_a15_798181())
return;
- dummy_flush_tlb_a15_erratum();
smp_call_function(ipi_flush_tlb_a15_erratum, NULL, 1);
}
@@ -92,7 +125,6 @@ static void broadcast_tlb_mm_a15_erratum(struct mm_struct *mm)
if (!erratum_a15_798181())
return;
- dummy_flush_tlb_a15_erratum();
this_cpu = get_cpu();
a15_erratum_get_cpumask(this_cpu, mm, &mask);
smp_call_function_many(&mask, ipi_flush_tlb_a15_erratum, NULL, 1);
diff --git a/arch/arm/kernel/smp_twd.c b/arch/arm/kernel/smp_twd.c
index 2985c9f0905..dfc32130bc4 100644
--- a/arch/arm/kernel/smp_twd.c
+++ b/arch/arm/kernel/smp_twd.c
@@ -45,7 +45,7 @@ static void twd_set_mode(enum clock_event_mode mode,
case CLOCK_EVT_MODE_PERIODIC:
ctrl = TWD_TIMER_CONTROL_ENABLE | TWD_TIMER_CONTROL_IT_ENABLE
| TWD_TIMER_CONTROL_PERIODIC;
- __raw_writel(DIV_ROUND_CLOSEST(twd_timer_rate, HZ),
+ writel_relaxed(DIV_ROUND_CLOSEST(twd_timer_rate, HZ),
twd_base + TWD_TIMER_LOAD);
break;
case CLOCK_EVT_MODE_ONESHOT:
@@ -58,18 +58,18 @@ static void twd_set_mode(enum clock_event_mode mode,
ctrl = 0;
}
- __raw_writel(ctrl, twd_base + TWD_TIMER_CONTROL);
+ writel_relaxed(ctrl, twd_base + TWD_TIMER_CONTROL);
}
static int twd_set_next_event(unsigned long evt,
struct clock_event_device *unused)
{
- unsigned long ctrl = __raw_readl(twd_base + TWD_TIMER_CONTROL);
+ unsigned long ctrl = readl_relaxed(twd_base + TWD_TIMER_CONTROL);
ctrl |= TWD_TIMER_CONTROL_ENABLE;
- __raw_writel(evt, twd_base + TWD_TIMER_COUNTER);
- __raw_writel(ctrl, twd_base + TWD_TIMER_CONTROL);
+ writel_relaxed(evt, twd_base + TWD_TIMER_COUNTER);
+ writel_relaxed(ctrl, twd_base + TWD_TIMER_CONTROL);
return 0;
}
@@ -82,8 +82,8 @@ static int twd_set_next_event(unsigned long evt,
*/
static int twd_timer_ack(void)
{
- if (__raw_readl(twd_base + TWD_TIMER_INTSTAT)) {
- __raw_writel(1, twd_base + TWD_TIMER_INTSTAT);
+ if (readl_relaxed(twd_base + TWD_TIMER_INTSTAT)) {
+ writel_relaxed(1, twd_base + TWD_TIMER_INTSTAT);
return 1;
}
@@ -166,7 +166,7 @@ static int twd_cpufreq_transition(struct notifier_block *nb,
* frequency. The timer is local to a cpu, so cross-call to the
* changing cpu.
*/
- if (state == CPUFREQ_POSTCHANGE || state == CPUFREQ_RESUMECHANGE)
+ if (state == CPUFREQ_POSTCHANGE)
smp_call_function_single(freqs->cpu, twd_update_frequency,
NULL, 1);
@@ -211,15 +211,15 @@ static void twd_calibrate_rate(void)
waitjiffies += 5;
/* enable, no interrupt or reload */
- __raw_writel(0x1, twd_base + TWD_TIMER_CONTROL);
+ writel_relaxed(0x1, twd_base + TWD_TIMER_CONTROL);
/* maximum value */
- __raw_writel(0xFFFFFFFFU, twd_base + TWD_TIMER_COUNTER);
+ writel_relaxed(0xFFFFFFFFU, twd_base + TWD_TIMER_COUNTER);
while (get_jiffies_64() < waitjiffies)
udelay(10);
- count = __raw_readl(twd_base + TWD_TIMER_COUNTER);
+ count = readl_relaxed(twd_base + TWD_TIMER_COUNTER);
twd_timer_rate = (0xFFFFFFFFU - count) * (HZ / 5);
@@ -277,7 +277,7 @@ static void twd_timer_setup(void)
* bother with the below.
*/
if (per_cpu(percpu_setup_called, cpu)) {
- __raw_writel(0, twd_base + TWD_TIMER_CONTROL);
+ writel_relaxed(0, twd_base + TWD_TIMER_CONTROL);
clockevents_register_device(clk);
enable_percpu_irq(clk->irq, 0);
return;
@@ -290,7 +290,7 @@ static void twd_timer_setup(void)
* The following is done once per CPU the first time .setup() is
* called.
*/
- __raw_writel(0, twd_base + TWD_TIMER_CONTROL);
+ writel_relaxed(0, twd_base + TWD_TIMER_CONTROL);
clk->name = "local_timer";
clk->features = CLOCK_EVT_FEAT_PERIODIC | CLOCK_EVT_FEAT_ONESHOT |
diff --git a/arch/arm/kernel/stacktrace.c b/arch/arm/kernel/stacktrace.c
index 00f79e59985..f065eb05d25 100644
--- a/arch/arm/kernel/stacktrace.c
+++ b/arch/arm/kernel/stacktrace.c
@@ -3,6 +3,7 @@
#include <linux/stacktrace.h>
#include <asm/stacktrace.h>
+#include <asm/traps.h>
#if defined(CONFIG_FRAME_POINTER) && !defined(CONFIG_ARM_UNWIND)
/*
@@ -31,7 +32,7 @@ int notrace unwind_frame(struct stackframe *frame)
high = ALIGN(low, THREAD_SIZE);
/* check current frame pointer is within bounds */
- if (fp < (low + 12) || fp + 4 >= high)
+ if (fp < low + 12 || fp > high - 4)
return -EINVAL;
/* restore the registers from the stack frame */
@@ -61,6 +62,7 @@ EXPORT_SYMBOL(walk_stackframe);
#ifdef CONFIG_STACKTRACE
struct stack_trace_data {
struct stack_trace *trace;
+ unsigned long last_pc;
unsigned int no_sched_functions;
unsigned int skip;
};
@@ -69,6 +71,7 @@ static int save_trace(struct stackframe *frame, void *d)
{
struct stack_trace_data *data = d;
struct stack_trace *trace = data->trace;
+ struct pt_regs *regs;
unsigned long addr = frame->pc;
if (data->no_sched_functions && in_sched_functions(addr))
@@ -80,16 +83,39 @@ static int save_trace(struct stackframe *frame, void *d)
trace->entries[trace->nr_entries++] = addr;
+ if (trace->nr_entries >= trace->max_entries)
+ return 1;
+
+ /*
+ * in_exception_text() is designed to test if the PC is one of
+ * the functions which has an exception stack above it, but
+ * unfortunately what is in frame->pc is the return LR value,
+ * not the saved PC value. So, we need to track the previous
+ * frame PC value when doing this.
+ */
+ addr = data->last_pc;
+ data->last_pc = frame->pc;
+ if (!in_exception_text(addr))
+ return 0;
+
+ regs = (struct pt_regs *)frame->sp;
+
+ trace->entries[trace->nr_entries++] = regs->ARM_pc;
+
return trace->nr_entries >= trace->max_entries;
}
-void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
+/* This must be noinline to so that our skip calculation works correctly */
+static noinline void __save_stack_trace(struct task_struct *tsk,
+ struct stack_trace *trace, unsigned int nosched)
{
struct stack_trace_data data;
struct stackframe frame;
data.trace = trace;
+ data.last_pc = ULONG_MAX;
data.skip = trace->skip;
+ data.no_sched_functions = nosched;
if (tsk != current) {
#ifdef CONFIG_SMP
@@ -102,7 +128,6 @@ void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
trace->entries[trace->nr_entries++] = ULONG_MAX;
return;
#else
- data.no_sched_functions = 1;
frame.fp = thread_saved_fp(tsk);
frame.sp = thread_saved_sp(tsk);
frame.lr = 0; /* recovered from the stack */
@@ -111,11 +136,12 @@ void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
} else {
register unsigned long current_sp asm ("sp");
- data.no_sched_functions = 0;
+ /* We don't want this function nor the caller */
+ data.skip += 2;
frame.fp = (unsigned long)__builtin_frame_address(0);
frame.sp = current_sp;
frame.lr = (unsigned long)__builtin_return_address(0);
- frame.pc = (unsigned long)save_stack_trace_tsk;
+ frame.pc = (unsigned long)__save_stack_trace;
}
walk_stackframe(&frame, save_trace, &data);
@@ -123,9 +149,33 @@ void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
trace->entries[trace->nr_entries++] = ULONG_MAX;
}
+void save_stack_trace_regs(struct pt_regs *regs, struct stack_trace *trace)
+{
+ struct stack_trace_data data;
+ struct stackframe frame;
+
+ data.trace = trace;
+ data.skip = trace->skip;
+ data.no_sched_functions = 0;
+
+ frame.fp = regs->ARM_fp;
+ frame.sp = regs->ARM_sp;
+ frame.lr = regs->ARM_lr;
+ frame.pc = regs->ARM_pc;
+
+ walk_stackframe(&frame, save_trace, &data);
+ if (trace->nr_entries < trace->max_entries)
+ trace->entries[trace->nr_entries++] = ULONG_MAX;
+}
+
+void save_stack_trace_tsk(struct task_struct *tsk, struct stack_trace *trace)
+{
+ __save_stack_trace(tsk, trace, 1);
+}
+
void save_stack_trace(struct stack_trace *trace)
{
- save_stack_trace_tsk(current, trace);
+ __save_stack_trace(current, trace, 0);
}
EXPORT_SYMBOL_GPL(save_stack_trace);
#endif
diff --git a/arch/arm/kernel/suspend.c b/arch/arm/kernel/suspend.c
index 41cf3cbf756..2835d35234c 100644
--- a/arch/arm/kernel/suspend.c
+++ b/arch/arm/kernel/suspend.c
@@ -10,7 +10,7 @@
#include <asm/suspend.h>
#include <asm/tlbflush.h>
-extern int __cpu_suspend(unsigned long, int (*)(unsigned long));
+extern int __cpu_suspend(unsigned long, int (*)(unsigned long), u32 cpuid);
extern void cpu_resume_mmu(void);
#ifdef CONFIG_MMU
@@ -21,6 +21,7 @@ extern void cpu_resume_mmu(void);
int cpu_suspend(unsigned long arg, int (*fn)(unsigned long))
{
struct mm_struct *mm = current->active_mm;
+ u32 __mpidr = cpu_logical_map(smp_processor_id());
int ret;
if (!idmap_pgd)
@@ -32,7 +33,7 @@ int cpu_suspend(unsigned long arg, int (*fn)(unsigned long))
* resume (indicated by a zero return code), we need to switch
* back to the correct page tables.
*/
- ret = __cpu_suspend(arg, fn);
+ ret = __cpu_suspend(arg, fn, __mpidr);
if (ret == 0) {
cpu_switch_mm(mm->pgd, mm);
local_flush_bp_all();
@@ -44,7 +45,8 @@ int cpu_suspend(unsigned long arg, int (*fn)(unsigned long))
#else
int cpu_suspend(unsigned long arg, int (*fn)(unsigned long))
{
- return __cpu_suspend(arg, fn);
+ u32 __mpidr = cpu_logical_map(smp_processor_id());
+ return __cpu_suspend(arg, fn, __mpidr);
}
#define idmap_pgd NULL
#endif
diff --git a/arch/arm/kernel/sys_oabi-compat.c b/arch/arm/kernel/sys_oabi-compat.c
index 3e94811690c..e90a3148f38 100644
--- a/arch/arm/kernel/sys_oabi-compat.c
+++ b/arch/arm/kernel/sys_oabi-compat.c
@@ -203,6 +203,9 @@ asmlinkage long sys_oabi_fcntl64(unsigned int fd, unsigned int cmd,
int ret;
switch (cmd) {
+ case F_OFD_GETLK:
+ case F_OFD_SETLK:
+ case F_OFD_SETLKW:
case F_GETLK64:
case F_SETLK64:
case F_SETLKW64:
diff --git a/arch/arm/kernel/tcm.c b/arch/arm/kernel/tcm.c
index f50f19e5c13..7a3be1d4d0b 100644
--- a/arch/arm/kernel/tcm.c
+++ b/arch/arm/kernel/tcm.c
@@ -52,7 +52,7 @@ static struct map_desc dtcm_iomap[] __initdata = {
.virtual = DTCM_OFFSET,
.pfn = __phys_to_pfn(DTCM_OFFSET),
.length = 0,
- .type = MT_MEMORY_DTCM
+ .type = MT_MEMORY_RW_DTCM
}
};
@@ -61,7 +61,7 @@ static struct map_desc itcm_iomap[] __initdata = {
.virtual = ITCM_OFFSET,
.pfn = __phys_to_pfn(ITCM_OFFSET),
.length = 0,
- .type = MT_MEMORY_ITCM
+ .type = MT_MEMORY_RWX_ITCM,
}
};
diff --git a/arch/arm/kernel/time.c b/arch/arm/kernel/time.c
index 98aee325839..829a96d4a17 100644
--- a/arch/arm/kernel/time.c
+++ b/arch/arm/kernel/time.c
@@ -11,25 +11,26 @@
* This file contains the ARM-specific time handling details:
* reading the RTC at bootup, etc...
*/
+#include <linux/clk-provider.h>
+#include <linux/clocksource.h>
+#include <linux/errno.h>
#include <linux/export.h>
-#include <linux/kernel.h>
-#include <linux/interrupt.h>
-#include <linux/time.h>
#include <linux/init.h>
+#include <linux/interrupt.h>
+#include <linux/irq.h>
+#include <linux/kernel.h>
+#include <linux/profile.h>
#include <linux/sched.h>
+#include <linux/sched_clock.h>
#include <linux/smp.h>
+#include <linux/time.h>
#include <linux/timex.h>
-#include <linux/errno.h>
-#include <linux/profile.h>
#include <linux/timer.h>
-#include <linux/clocksource.h>
-#include <linux/irq.h>
-#include <linux/sched_clock.h>
-#include <asm/thread_info.h>
-#include <asm/stacktrace.h>
#include <asm/mach/arch.h>
#include <asm/mach/time.h>
+#include <asm/stacktrace.h>
+#include <asm/thread_info.h>
#if defined(CONFIG_RTC_DRV_CMOS) || defined(CONFIG_RTC_DRV_CMOS_MODULE) || \
defined(CONFIG_NVRAM) || defined(CONFIG_NVRAM_MODULE)
@@ -116,8 +117,12 @@ int __init register_persistent_clock(clock_access_fn read_boot,
void __init time_init(void)
{
- if (machine_desc->init_time)
+ if (machine_desc->init_time) {
machine_desc->init_time();
- else
+ } else {
+#ifdef CONFIG_COMMON_CLK
+ of_clk_init(NULL);
+#endif
clocksource_of_init();
+ }
}
diff --git a/arch/arm/kernel/topology.c b/arch/arm/kernel/topology.c
index 85a87370f14..e35d880f977 100644
--- a/arch/arm/kernel/topology.c
+++ b/arch/arm/kernel/topology.c
@@ -26,30 +26,30 @@
#include <asm/topology.h>
/*
- * cpu power scale management
+ * cpu capacity scale management
*/
/*
- * cpu power table
+ * cpu capacity table
* This per cpu data structure describes the relative capacity of each core.
* On a heteregenous system, cores don't have the same computation capacity
- * and we reflect that difference in the cpu_power field so the scheduler can
- * take this difference into account during load balance. A per cpu structure
- * is preferred because each CPU updates its own cpu_power field during the
- * load balance except for idle cores. One idle core is selected to run the
- * rebalance_domains for all idle cores and the cpu_power can be updated
- * during this sequence.
+ * and we reflect that difference in the cpu_capacity field so the scheduler
+ * can take this difference into account during load balance. A per cpu
+ * structure is preferred because each CPU updates its own cpu_capacity field
+ * during the load balance except for idle cores. One idle core is selected
+ * to run the rebalance_domains for all idle cores and the cpu_capacity can be
+ * updated during this sequence.
*/
static DEFINE_PER_CPU(unsigned long, cpu_scale);
-unsigned long arch_scale_freq_power(struct sched_domain *sd, int cpu)
+unsigned long arch_scale_freq_capacity(struct sched_domain *sd, int cpu)
{
return per_cpu(cpu_scale, cpu);
}
-static void set_power_scale(unsigned int cpu, unsigned long power)
+static void set_capacity_scale(unsigned int cpu, unsigned long capacity)
{
- per_cpu(cpu_scale, cpu) = power;
+ per_cpu(cpu_scale, cpu) = capacity;
}
#ifdef CONFIG_OF
@@ -62,42 +62,42 @@ struct cpu_efficiency {
* Table of relative efficiency of each processors
* The efficiency value must fit in 20bit and the final
* cpu_scale value must be in the range
- * 0 < cpu_scale < 3*SCHED_POWER_SCALE/2
+ * 0 < cpu_scale < 3*SCHED_CAPACITY_SCALE/2
* in order to return at most 1 when DIV_ROUND_CLOSEST
* is used to compute the capacity of a CPU.
* Processors that are not defined in the table,
- * use the default SCHED_POWER_SCALE value for cpu_scale.
+ * use the default SCHED_CAPACITY_SCALE value for cpu_scale.
*/
-struct cpu_efficiency table_efficiency[] = {
+static const struct cpu_efficiency table_efficiency[] = {
{"arm,cortex-a15", 3891},
{"arm,cortex-a7", 2048},
{NULL, },
};
-unsigned long *__cpu_capacity;
+static unsigned long *__cpu_capacity;
#define cpu_capacity(cpu) __cpu_capacity[cpu]
-unsigned long middle_capacity = 1;
+static unsigned long middle_capacity = 1;
/*
* Iterate all CPUs' descriptor in DT and compute the efficiency
* (as per table_efficiency). Also calculate a middle efficiency
* as close as possible to (max{eff_i} - min{eff_i}) / 2
- * This is later used to scale the cpu_power field such that an
- * 'average' CPU is of middle power. Also see the comments near
- * table_efficiency[] and update_cpu_power().
+ * This is later used to scale the cpu_capacity field such that an
+ * 'average' CPU is of middle capacity. Also see the comments near
+ * table_efficiency[] and update_cpu_capacity().
*/
static void __init parse_dt_topology(void)
{
- struct cpu_efficiency *cpu_eff;
+ const struct cpu_efficiency *cpu_eff;
struct device_node *cn = NULL;
- unsigned long min_capacity = (unsigned long)(-1);
+ unsigned long min_capacity = ULONG_MAX;
unsigned long max_capacity = 0;
unsigned long capacity = 0;
- int alloc_size, cpu = 0;
+ int cpu = 0;
- alloc_size = nr_cpu_ids * sizeof(*__cpu_capacity);
- __cpu_capacity = kzalloc(alloc_size, GFP_NOWAIT);
+ __cpu_capacity = kcalloc(nr_cpu_ids, sizeof(*__cpu_capacity),
+ GFP_NOWAIT);
for_each_possible_cpu(cpu) {
const u32 *rate;
@@ -141,15 +141,15 @@ static void __init parse_dt_topology(void)
* cpu_scale because all CPUs have the same capacity. Otherwise, we
* compute a middle_capacity factor that will ensure that the capacity
* of an 'average' CPU of the system will be as close as possible to
- * SCHED_POWER_SCALE, which is the default value, but with the
+ * SCHED_CAPACITY_SCALE, which is the default value, but with the
* constraint explained near table_efficiency[].
*/
if (4*max_capacity < (3*(max_capacity + min_capacity)))
middle_capacity = (min_capacity + max_capacity)
- >> (SCHED_POWER_SHIFT+1);
+ >> (SCHED_CAPACITY_SHIFT+1);
else
middle_capacity = ((max_capacity / 3)
- >> (SCHED_POWER_SHIFT-1)) + 1;
+ >> (SCHED_CAPACITY_SHIFT-1)) + 1;
}
@@ -158,20 +158,20 @@ static void __init parse_dt_topology(void)
* boot. The update of all CPUs is in O(n^2) for heteregeneous system but the
* function returns directly for SMP system.
*/
-void update_cpu_power(unsigned int cpu)
+static void update_cpu_capacity(unsigned int cpu)
{
if (!cpu_capacity(cpu))
return;
- set_power_scale(cpu, cpu_capacity(cpu) / middle_capacity);
+ set_capacity_scale(cpu, cpu_capacity(cpu) / middle_capacity);
- printk(KERN_INFO "CPU%u: update cpu_power %lu\n",
- cpu, arch_scale_freq_power(NULL, cpu));
+ printk(KERN_INFO "CPU%u: update cpu_capacity %lu\n",
+ cpu, arch_scale_freq_capacity(NULL, cpu));
}
#else
static inline void parse_dt_topology(void) {}
-static inline void update_cpu_power(unsigned int cpuid) {}
+static inline void update_cpu_capacity(unsigned int cpuid) {}
#endif
/*
@@ -185,7 +185,16 @@ const struct cpumask *cpu_coregroup_mask(int cpu)
return &cpu_topology[cpu].core_sibling;
}
-void update_siblings_masks(unsigned int cpuid)
+/*
+ * The current assumption is that we can power gate each core independently.
+ * This will be superseded by DT binding once available.
+ */
+const struct cpumask *cpu_corepower_mask(int cpu)
+{
+ return &cpu_topology[cpu].thread_sibling;
+}
+
+static void update_siblings_masks(unsigned int cpuid)
{
struct cputopo_arm *cpu_topo, *cpuid_topo = &cpu_topology[cpuid];
int cpu;
@@ -258,7 +267,7 @@ void store_cpu_topology(unsigned int cpuid)
update_siblings_masks(cpuid);
- update_cpu_power(cpuid);
+ update_cpu_capacity(cpuid);
printk(KERN_INFO "CPU%u: thread %d, cpu %d, socket %d, mpidr %x\n",
cpuid, cpu_topology[cpuid].thread_id,
@@ -266,6 +275,20 @@ void store_cpu_topology(unsigned int cpuid)
cpu_topology[cpuid].socket_id, mpidr);
}
+static inline int cpu_corepower_flags(void)
+{
+ return SD_SHARE_PKG_RESOURCES | SD_SHARE_POWERDOMAIN;
+}
+
+static struct sched_domain_topology_level arm_topology[] = {
+#ifdef CONFIG_SCHED_MC
+ { cpu_corepower_mask, cpu_corepower_flags, SD_INIT_NAME(GMC) },
+ { cpu_coregroup_mask, cpu_core_flags, SD_INIT_NAME(MC) },
+#endif
+ { cpu_cpu_mask, SD_INIT_NAME(DIE) },
+ { NULL, },
+};
+
/*
* init_cpu_topology is called at boot when only one cpu is running
* which prevent simultaneous write access to cpu_topology array
@@ -274,7 +297,7 @@ void __init init_cpu_topology(void)
{
unsigned int cpu;
- /* init core mask and power*/
+ /* init core mask and capacity */
for_each_possible_cpu(cpu) {
struct cputopo_arm *cpu_topo = &(cpu_topology[cpu]);
@@ -284,9 +307,12 @@ void __init init_cpu_topology(void)
cpumask_clear(&cpu_topo->core_sibling);
cpumask_clear(&cpu_topo->thread_sibling);
- set_power_scale(cpu, SCHED_POWER_SCALE);
+ set_capacity_scale(cpu, SCHED_CAPACITY_SCALE);
}
smp_wmb();
parse_dt_topology();
+
+ /* Set scheduler topology descriptor */
+ set_sched_topology(arm_topology);
}
diff --git a/arch/arm/kernel/traps.c b/arch/arm/kernel/traps.c
index 8fcda140358..abd2fc06773 100644
--- a/arch/arm/kernel/traps.c
+++ b/arch/arm/kernel/traps.c
@@ -34,8 +34,15 @@
#include <asm/unwind.h>
#include <asm/tls.h>
#include <asm/system_misc.h>
-
-static const char *handler[]= { "prefetch abort", "data abort", "address exception", "interrupt" };
+#include <asm/opcodes.h>
+
+static const char *handler[]= {
+ "prefetch abort",
+ "data abort",
+ "address exception",
+ "interrupt",
+ "undefined instruction",
+};
void *vectors_page;
@@ -55,7 +62,7 @@ static void dump_mem(const char *, const char *, unsigned long, unsigned long);
void dump_backtrace_entry(unsigned long where, unsigned long from, unsigned long frame)
{
#ifdef CONFIG_KALLSYMS
- printk("[<%08lx>] (%pS) from [<%08lx>] (%pS)\n", where, (void *)where, from, (void *)from);
+ printk("[<%08lx>] (%ps) from [<%08lx>] (%pS)\n", where, (void *)where, from, (void *)from);
#else
printk("Function entered at [<%08lx>] from [<%08lx>]\n", where, from);
#endif
@@ -341,15 +348,17 @@ void arm_notify_die(const char *str, struct pt_regs *regs,
int is_valid_bugaddr(unsigned long pc)
{
#ifdef CONFIG_THUMB2_KERNEL
- unsigned short bkpt;
+ u16 bkpt;
+ u16 insn = __opcode_to_mem_thumb16(BUG_INSTR_VALUE);
#else
- unsigned long bkpt;
+ u32 bkpt;
+ u32 insn = __opcode_to_mem_arm(BUG_INSTR_VALUE);
#endif
if (probe_kernel_address((unsigned *)pc, bkpt))
return 0;
- return bkpt == BUG_INSTR_VALUE;
+ return bkpt == insn;
}
#endif
@@ -402,26 +411,30 @@ asmlinkage void __exception do_undefinstr(struct pt_regs *regs)
if (processor_mode(regs) == SVC_MODE) {
#ifdef CONFIG_THUMB2_KERNEL
if (thumb_mode(regs)) {
- instr = ((u16 *)pc)[0];
+ instr = __mem_to_opcode_thumb16(((u16 *)pc)[0]);
if (is_wide_instruction(instr)) {
- instr <<= 16;
- instr |= ((u16 *)pc)[1];
+ u16 inst2;
+ inst2 = __mem_to_opcode_thumb16(((u16 *)pc)[1]);
+ instr = __opcode_thumb32_compose(instr, inst2);
}
} else
#endif
- instr = *(u32 *) pc;
+ instr = __mem_to_opcode_arm(*(u32 *) pc);
} else if (thumb_mode(regs)) {
if (get_user(instr, (u16 __user *)pc))
goto die_sig;
+ instr = __mem_to_opcode_thumb16(instr);
if (is_wide_instruction(instr)) {
unsigned int instr2;
if (get_user(instr2, (u16 __user *)pc+1))
goto die_sig;
- instr <<= 16;
- instr |= instr2;
+ instr2 = __mem_to_opcode_thumb16(instr2);
+ instr = __opcode_thumb32_compose(instr, instr2);
}
- } else if (get_user(instr, (u32 __user *)pc)) {
- goto die_sig;
+ } else {
+ if (get_user(instr, (u32 __user *)pc))
+ goto die_sig;
+ instr = __mem_to_opcode_arm(instr);
}
if (call_undef_hook(regs, instr) == 0)
@@ -432,6 +445,7 @@ die_sig:
if (user_debug & UDBG_UNDEFINED) {
printk(KERN_INFO "%s (%d): undefined instruction: pc=%p\n",
current->comm, task_pid_nr(current), pc);
+ __show_regs(regs);
dump_instr(KERN_INFO, regs);
}
#endif
@@ -503,9 +517,10 @@ static inline int
__do_cache_op(unsigned long start, unsigned long end)
{
int ret;
- unsigned long chunk = PAGE_SIZE;
do {
+ unsigned long chunk = min(PAGE_SIZE, end - start);
+
if (signal_pending(current)) {
struct thread_info *ti = current_thread_info();
@@ -850,7 +865,7 @@ static void __init kuser_init(void *vectors)
memcpy(vectors + 0xfe0, vectors + 0xfe8, 4);
}
#else
-static void __init kuser_init(void *vectors)
+static inline void __init kuser_init(void *vectors)
{
}
#endif
diff --git a/arch/arm/kernel/unwind.c b/arch/arm/kernel/unwind.c
index 00df012c467..e67682f02cb 100644
--- a/arch/arm/kernel/unwind.c
+++ b/arch/arm/kernel/unwind.c
@@ -31,7 +31,7 @@
#warning Your compiler does not have EABI support.
#warning ARM unwind is known to compile only with EABI compilers.
#warning Change compiler or disable ARM_UNWIND option.
-#elif (__GNUC__ == 4 && __GNUC_MINOR__ <= 2)
+#elif (__GNUC__ == 4 && __GNUC_MINOR__ <= 2) && !defined(__clang__)
#warning Your compiler is too buggy; it is known to not compile ARM unwind support.
#warning Change compiler or disable ARM_UNWIND option.
#endif
@@ -68,6 +68,12 @@ EXPORT_SYMBOL(__aeabi_unwind_cpp_pr2);
struct unwind_ctrl_block {
unsigned long vrs[16]; /* virtual register set */
const unsigned long *insn; /* pointer to the current instructions word */
+ unsigned long sp_high; /* highest value of sp allowed */
+ /*
+ * 1 : check for stack overflow for each register pop.
+ * 0 : save overhead if there is plenty of stack remaining.
+ */
+ int check_each_pop;
int entries; /* number of entries left to interpret */
int byte; /* current byte number in the instructions word */
};
@@ -235,12 +241,85 @@ static unsigned long unwind_get_byte(struct unwind_ctrl_block *ctrl)
return ret;
}
+/* Before poping a register check whether it is feasible or not */
+static int unwind_pop_register(struct unwind_ctrl_block *ctrl,
+ unsigned long **vsp, unsigned int reg)
+{
+ if (unlikely(ctrl->check_each_pop))
+ if (*vsp >= (unsigned long *)ctrl->sp_high)
+ return -URC_FAILURE;
+
+ ctrl->vrs[reg] = *(*vsp)++;
+ return URC_OK;
+}
+
+/* Helper functions to execute the instructions */
+static int unwind_exec_pop_subset_r4_to_r13(struct unwind_ctrl_block *ctrl,
+ unsigned long mask)
+{
+ unsigned long *vsp = (unsigned long *)ctrl->vrs[SP];
+ int load_sp, reg = 4;
+
+ load_sp = mask & (1 << (13 - 4));
+ while (mask) {
+ if (mask & 1)
+ if (unwind_pop_register(ctrl, &vsp, reg))
+ return -URC_FAILURE;
+ mask >>= 1;
+ reg++;
+ }
+ if (!load_sp)
+ ctrl->vrs[SP] = (unsigned long)vsp;
+
+ return URC_OK;
+}
+
+static int unwind_exec_pop_r4_to_rN(struct unwind_ctrl_block *ctrl,
+ unsigned long insn)
+{
+ unsigned long *vsp = (unsigned long *)ctrl->vrs[SP];
+ int reg;
+
+ /* pop R4-R[4+bbb] */
+ for (reg = 4; reg <= 4 + (insn & 7); reg++)
+ if (unwind_pop_register(ctrl, &vsp, reg))
+ return -URC_FAILURE;
+
+ if (insn & 0x8)
+ if (unwind_pop_register(ctrl, &vsp, 14))
+ return -URC_FAILURE;
+
+ ctrl->vrs[SP] = (unsigned long)vsp;
+
+ return URC_OK;
+}
+
+static int unwind_exec_pop_subset_r0_to_r3(struct unwind_ctrl_block *ctrl,
+ unsigned long mask)
+{
+ unsigned long *vsp = (unsigned long *)ctrl->vrs[SP];
+ int reg = 0;
+
+ /* pop R0-R3 according to mask */
+ while (mask) {
+ if (mask & 1)
+ if (unwind_pop_register(ctrl, &vsp, reg))
+ return -URC_FAILURE;
+ mask >>= 1;
+ reg++;
+ }
+ ctrl->vrs[SP] = (unsigned long)vsp;
+
+ return URC_OK;
+}
+
/*
* Execute the current unwind instruction.
*/
static int unwind_exec_insn(struct unwind_ctrl_block *ctrl)
{
unsigned long insn = unwind_get_byte(ctrl);
+ int ret = URC_OK;
pr_debug("%s: insn = %08lx\n", __func__, insn);
@@ -250,8 +329,6 @@ static int unwind_exec_insn(struct unwind_ctrl_block *ctrl)
ctrl->vrs[SP] -= ((insn & 0x3f) << 2) + 4;
else if ((insn & 0xf0) == 0x80) {
unsigned long mask;
- unsigned long *vsp = (unsigned long *)ctrl->vrs[SP];
- int load_sp, reg = 4;
insn = (insn << 8) | unwind_get_byte(ctrl);
mask = insn & 0x0fff;
@@ -261,29 +338,16 @@ static int unwind_exec_insn(struct unwind_ctrl_block *ctrl)
return -URC_FAILURE;
}
- /* pop R4-R15 according to mask */
- load_sp = mask & (1 << (13 - 4));
- while (mask) {
- if (mask & 1)
- ctrl->vrs[reg] = *vsp++;
- mask >>= 1;
- reg++;
- }
- if (!load_sp)
- ctrl->vrs[SP] = (unsigned long)vsp;
+ ret = unwind_exec_pop_subset_r4_to_r13(ctrl, mask);
+ if (ret)
+ goto error;
} else if ((insn & 0xf0) == 0x90 &&
(insn & 0x0d) != 0x0d)
ctrl->vrs[SP] = ctrl->vrs[insn & 0x0f];
else if ((insn & 0xf0) == 0xa0) {
- unsigned long *vsp = (unsigned long *)ctrl->vrs[SP];
- int reg;
-
- /* pop R4-R[4+bbb] */
- for (reg = 4; reg <= 4 + (insn & 7); reg++)
- ctrl->vrs[reg] = *vsp++;
- if (insn & 0x80)
- ctrl->vrs[14] = *vsp++;
- ctrl->vrs[SP] = (unsigned long)vsp;
+ ret = unwind_exec_pop_r4_to_rN(ctrl, insn);
+ if (ret)
+ goto error;
} else if (insn == 0xb0) {
if (ctrl->vrs[PC] == 0)
ctrl->vrs[PC] = ctrl->vrs[LR];
@@ -291,8 +355,6 @@ static int unwind_exec_insn(struct unwind_ctrl_block *ctrl)
ctrl->entries = 0;
} else if (insn == 0xb1) {
unsigned long mask = unwind_get_byte(ctrl);
- unsigned long *vsp = (unsigned long *)ctrl->vrs[SP];
- int reg = 0;
if (mask == 0 || mask & 0xf0) {
pr_warning("unwind: Spare encoding %04lx\n",
@@ -300,14 +362,9 @@ static int unwind_exec_insn(struct unwind_ctrl_block *ctrl)
return -URC_FAILURE;
}
- /* pop R0-R3 according to mask */
- while (mask) {
- if (mask & 1)
- ctrl->vrs[reg] = *vsp++;
- mask >>= 1;
- reg++;
- }
- ctrl->vrs[SP] = (unsigned long)vsp;
+ ret = unwind_exec_pop_subset_r0_to_r3(ctrl, mask);
+ if (ret)
+ goto error;
} else if (insn == 0xb2) {
unsigned long uleb128 = unwind_get_byte(ctrl);
@@ -320,7 +377,8 @@ static int unwind_exec_insn(struct unwind_ctrl_block *ctrl)
pr_debug("%s: fp = %08lx sp = %08lx lr = %08lx pc = %08lx\n", __func__,
ctrl->vrs[FP], ctrl->vrs[SP], ctrl->vrs[LR], ctrl->vrs[PC]);
- return URC_OK;
+error:
+ return ret;
}
/*
@@ -329,13 +387,13 @@ static int unwind_exec_insn(struct unwind_ctrl_block *ctrl)
*/
int unwind_frame(struct stackframe *frame)
{
- unsigned long high, low;
+ unsigned long low;
const struct unwind_idx *idx;
struct unwind_ctrl_block ctrl;
- /* only go to a higher address on the stack */
+ /* store the highest address on the stack to avoid crossing it*/
low = frame->sp;
- high = ALIGN(low, THREAD_SIZE);
+ ctrl.sp_high = ALIGN(low, THREAD_SIZE);
pr_debug("%s(pc = %08lx lr = %08lx sp = %08lx)\n", __func__,
frame->pc, frame->lr, frame->sp);
@@ -382,11 +440,16 @@ int unwind_frame(struct stackframe *frame)
return -URC_FAILURE;
}
+ ctrl.check_each_pop = 0;
+
while (ctrl.entries > 0) {
- int urc = unwind_exec_insn(&ctrl);
+ int urc;
+ if ((ctrl.sp_high - ctrl.vrs[SP]) < sizeof(ctrl.vrs))
+ ctrl.check_each_pop = 1;
+ urc = unwind_exec_insn(&ctrl);
if (urc < 0)
return urc;
- if (ctrl.vrs[SP] < low || ctrl.vrs[SP] >= high)
+ if (ctrl.vrs[SP] < low || ctrl.vrs[SP] >= ctrl.sp_high)
return -URC_FAILURE;
}
diff --git a/arch/arm/kernel/uprobes-arm.c b/arch/arm/kernel/uprobes-arm.c
new file mode 100644
index 00000000000..d3b655ff17d
--- /dev/null
+++ b/arch/arm/kernel/uprobes-arm.c
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2012 Rabin Vincent <rabin at rab.in>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include <linux/kernel.h>
+#include <linux/types.h>
+#include <linux/stddef.h>
+#include <linux/wait.h>
+#include <linux/uprobes.h>
+#include <linux/module.h>
+
+#include "probes.h"
+#include "probes-arm.h"
+#include "uprobes.h"
+
+static int uprobes_substitute_pc(unsigned long *pinsn, u32 oregs)
+{
+ probes_opcode_t insn = __mem_to_opcode_arm(*pinsn);
+ probes_opcode_t temp;
+ probes_opcode_t mask;
+ int freereg;
+ u32 free = 0xffff;
+ u32 regs;
+
+ for (regs = oregs; regs; regs >>= 4, insn >>= 4) {
+ if ((regs & 0xf) == REG_TYPE_NONE)
+ continue;
+
+ free &= ~(1 << (insn & 0xf));
+ }
+
+ /* No PC, no problem */
+ if (free & (1 << 15))
+ return 15;
+
+ if (!free)
+ return -1;
+
+ /*
+ * fls instead of ffs ensures that for "ldrd r0, r1, [pc]" we would
+ * pick LR instead of R1.
+ */
+ freereg = free = fls(free) - 1;
+
+ temp = __mem_to_opcode_arm(*pinsn);
+ insn = temp;
+ regs = oregs;
+ mask = 0xf;
+
+ for (; regs; regs >>= 4, mask <<= 4, free <<= 4, temp >>= 4) {
+ if ((regs & 0xf) == REG_TYPE_NONE)
+ continue;
+
+ if ((temp & 0xf) != 15)
+ continue;
+
+ insn &= ~mask;
+ insn |= free & mask;
+ }
+
+ *pinsn = __opcode_to_mem_arm(insn);
+ return freereg;
+}
+
+static void uprobe_set_pc(struct arch_uprobe *auprobe,
+ struct arch_uprobe_task *autask,
+ struct pt_regs *regs)
+{
+ u32 pcreg = auprobe->pcreg;
+
+ autask->backup = regs->uregs[pcreg];
+ regs->uregs[pcreg] = regs->ARM_pc + 8;
+}
+
+static void uprobe_unset_pc(struct arch_uprobe *auprobe,
+ struct arch_uprobe_task *autask,
+ struct pt_regs *regs)
+{
+ /* PC will be taken care of by common code */
+ regs->uregs[auprobe->pcreg] = autask->backup;
+}
+
+static void uprobe_aluwrite_pc(struct arch_uprobe *auprobe,
+ struct arch_uprobe_task *autask,
+ struct pt_regs *regs)
+{
+ u32 pcreg = auprobe->pcreg;
+
+ alu_write_pc(regs->uregs[pcreg], regs);
+ regs->uregs[pcreg] = autask->backup;
+}
+
+static void uprobe_write_pc(struct arch_uprobe *auprobe,
+ struct arch_uprobe_task *autask,
+ struct pt_regs *regs)
+{
+ u32 pcreg = auprobe->pcreg;
+
+ load_write_pc(regs->uregs[pcreg], regs);
+ regs->uregs[pcreg] = autask->backup;
+}
+
+enum probes_insn
+decode_pc_ro(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
+{
+ struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe,
+ asi);
+ struct decode_emulate *decode = (struct decode_emulate *) d;
+ u32 regs = decode->header.type_regs.bits >> DECODE_TYPE_BITS;
+ int reg;
+
+ reg = uprobes_substitute_pc(&auprobe->ixol[0], regs);
+ if (reg == 15)
+ return INSN_GOOD;
+
+ if (reg == -1)
+ return INSN_REJECTED;
+
+ auprobe->pcreg = reg;
+ auprobe->prehandler = uprobe_set_pc;
+ auprobe->posthandler = uprobe_unset_pc;
+
+ return INSN_GOOD;
+}
+
+enum probes_insn
+decode_wb_pc(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d, bool alu)
+{
+ struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe,
+ asi);
+ enum probes_insn ret = decode_pc_ro(insn, asi, d);
+
+ if (((insn >> 12) & 0xf) == 15)
+ auprobe->posthandler = alu ? uprobe_aluwrite_pc
+ : uprobe_write_pc;
+
+ return ret;
+}
+
+enum probes_insn
+decode_rd12rn16rm0rs8_rwflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ const struct decode_header *d)
+{
+ return decode_wb_pc(insn, asi, d, true);
+}
+
+enum probes_insn
+decode_ldr(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d)
+{
+ return decode_wb_pc(insn, asi, d, false);
+}
+
+enum probes_insn
+uprobe_decode_ldmstm(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ const struct decode_header *d)
+{
+ struct arch_uprobe *auprobe = container_of(asi, struct arch_uprobe,
+ asi);
+ unsigned reglist = insn & 0xffff;
+ int rn = (insn >> 16) & 0xf;
+ int lbit = insn & (1 << 20);
+ unsigned used = reglist | (1 << rn);
+
+ if (rn == 15)
+ return INSN_REJECTED;
+
+ if (!(used & (1 << 15)))
+ return INSN_GOOD;
+
+ if (used & (1 << 14))
+ return INSN_REJECTED;
+
+ /* Use LR instead of PC */
+ insn ^= 0xc000;
+
+ auprobe->pcreg = 14;
+ auprobe->ixol[0] = __opcode_to_mem_arm(insn);
+
+ auprobe->prehandler = uprobe_set_pc;
+ if (lbit)
+ auprobe->posthandler = uprobe_write_pc;
+ else
+ auprobe->posthandler = uprobe_unset_pc;
+
+ return INSN_GOOD;
+}
+
+const union decode_action uprobes_probes_actions[] = {
+ [PROBES_EMULATE_NONE] = {.handler = probes_simulate_nop},
+ [PROBES_SIMULATE_NOP] = {.handler = probes_simulate_nop},
+ [PROBES_PRELOAD_IMM] = {.handler = probes_simulate_nop},
+ [PROBES_PRELOAD_REG] = {.handler = probes_simulate_nop},
+ [PROBES_BRANCH_IMM] = {.handler = simulate_blx1},
+ [PROBES_MRS] = {.handler = simulate_mrs},
+ [PROBES_BRANCH_REG] = {.handler = simulate_blx2bx},
+ [PROBES_CLZ] = {.handler = probes_simulate_nop},
+ [PROBES_SATURATING_ARITHMETIC] = {.handler = probes_simulate_nop},
+ [PROBES_MUL1] = {.handler = probes_simulate_nop},
+ [PROBES_MUL2] = {.handler = probes_simulate_nop},
+ [PROBES_SWP] = {.handler = probes_simulate_nop},
+ [PROBES_LDRSTRD] = {.decoder = decode_pc_ro},
+ [PROBES_LOAD_EXTRA] = {.decoder = decode_pc_ro},
+ [PROBES_LOAD] = {.decoder = decode_ldr},
+ [PROBES_STORE_EXTRA] = {.decoder = decode_pc_ro},
+ [PROBES_STORE] = {.decoder = decode_pc_ro},
+ [PROBES_MOV_IP_SP] = {.handler = simulate_mov_ipsp},
+ [PROBES_DATA_PROCESSING_REG] = {
+ .decoder = decode_rd12rn16rm0rs8_rwflags},
+ [PROBES_DATA_PROCESSING_IMM] = {
+ .decoder = decode_rd12rn16rm0rs8_rwflags},
+ [PROBES_MOV_HALFWORD] = {.handler = probes_simulate_nop},
+ [PROBES_SEV] = {.handler = probes_simulate_nop},
+ [PROBES_WFE] = {.handler = probes_simulate_nop},
+ [PROBES_SATURATE] = {.handler = probes_simulate_nop},
+ [PROBES_REV] = {.handler = probes_simulate_nop},
+ [PROBES_MMI] = {.handler = probes_simulate_nop},
+ [PROBES_PACK] = {.handler = probes_simulate_nop},
+ [PROBES_EXTEND] = {.handler = probes_simulate_nop},
+ [PROBES_EXTEND_ADD] = {.handler = probes_simulate_nop},
+ [PROBES_MUL_ADD_LONG] = {.handler = probes_simulate_nop},
+ [PROBES_MUL_ADD] = {.handler = probes_simulate_nop},
+ [PROBES_BITFIELD] = {.handler = probes_simulate_nop},
+ [PROBES_BRANCH] = {.handler = simulate_bbl},
+ [PROBES_LDMSTM] = {.decoder = uprobe_decode_ldmstm}
+};
diff --git a/arch/arm/kernel/uprobes.c b/arch/arm/kernel/uprobes.c
new file mode 100644
index 00000000000..56adf9c1fde
--- /dev/null
+++ b/arch/arm/kernel/uprobes.c
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2012 Rabin Vincent <rabin at rab.in>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include <linux/kernel.h>
+#include <linux/stddef.h>
+#include <linux/errno.h>
+#include <linux/highmem.h>
+#include <linux/sched.h>
+#include <linux/uprobes.h>
+#include <linux/notifier.h>
+
+#include <asm/opcodes.h>
+#include <asm/traps.h>
+
+#include "probes.h"
+#include "probes-arm.h"
+#include "uprobes.h"
+
+#define UPROBE_TRAP_NR UINT_MAX
+
+bool is_swbp_insn(uprobe_opcode_t *insn)
+{
+ return (__mem_to_opcode_arm(*insn) & 0x0fffffff) ==
+ (UPROBE_SWBP_ARM_INSN & 0x0fffffff);
+}
+
+int set_swbp(struct arch_uprobe *auprobe, struct mm_struct *mm,
+ unsigned long vaddr)
+{
+ return uprobe_write_opcode(mm, vaddr,
+ __opcode_to_mem_arm(auprobe->bpinsn));
+}
+
+bool arch_uprobe_ignore(struct arch_uprobe *auprobe, struct pt_regs *regs)
+{
+ if (!auprobe->asi.insn_check_cc(regs->ARM_cpsr)) {
+ regs->ARM_pc += 4;
+ return true;
+ }
+
+ return false;
+}
+
+bool arch_uprobe_skip_sstep(struct arch_uprobe *auprobe, struct pt_regs *regs)
+{
+ probes_opcode_t opcode;
+
+ if (!auprobe->simulate)
+ return false;
+
+ opcode = __mem_to_opcode_arm(*(unsigned int *) auprobe->insn);
+
+ auprobe->asi.insn_singlestep(opcode, &auprobe->asi, regs);
+
+ return true;
+}
+
+unsigned long
+arch_uretprobe_hijack_return_addr(unsigned long trampoline_vaddr,
+ struct pt_regs *regs)
+{
+ unsigned long orig_ret_vaddr;
+
+ orig_ret_vaddr = regs->ARM_lr;
+ /* Replace the return addr with trampoline addr */
+ regs->ARM_lr = trampoline_vaddr;
+ return orig_ret_vaddr;
+}
+
+int arch_uprobe_analyze_insn(struct arch_uprobe *auprobe, struct mm_struct *mm,
+ unsigned long addr)
+{
+ unsigned int insn;
+ unsigned int bpinsn;
+ enum probes_insn ret;
+
+ /* Thumb not yet support */
+ if (addr & 0x3)
+ return -EINVAL;
+
+ insn = __mem_to_opcode_arm(*(unsigned int *)auprobe->insn);
+ auprobe->ixol[0] = __opcode_to_mem_arm(insn);
+ auprobe->ixol[1] = __opcode_to_mem_arm(UPROBE_SS_ARM_INSN);
+
+ ret = arm_probes_decode_insn(insn, &auprobe->asi, false,
+ uprobes_probes_actions);
+ switch (ret) {
+ case INSN_REJECTED:
+ return -EINVAL;
+
+ case INSN_GOOD_NO_SLOT:
+ auprobe->simulate = true;
+ break;
+
+ case INSN_GOOD:
+ default:
+ break;
+ }
+
+ bpinsn = UPROBE_SWBP_ARM_INSN & 0x0fffffff;
+ if (insn >= 0xe0000000)
+ bpinsn |= 0xe0000000; /* Unconditional instruction */
+ else
+ bpinsn |= insn & 0xf0000000; /* Copy condition from insn */
+
+ auprobe->bpinsn = bpinsn;
+
+ return 0;
+}
+
+void arch_uprobe_copy_ixol(struct page *page, unsigned long vaddr,
+ void *src, unsigned long len)
+{
+ void *xol_page_kaddr = kmap_atomic(page);
+ void *dst = xol_page_kaddr + (vaddr & ~PAGE_MASK);
+
+ preempt_disable();
+
+ /* Initialize the slot */
+ memcpy(dst, src, len);
+
+ /* flush caches (dcache/icache) */
+ flush_uprobe_xol_access(page, vaddr, dst, len);
+
+ preempt_enable();
+
+ kunmap_atomic(xol_page_kaddr);
+}
+
+
+int arch_uprobe_pre_xol(struct arch_uprobe *auprobe, struct pt_regs *regs)
+{
+ struct uprobe_task *utask = current->utask;
+
+ if (auprobe->prehandler)
+ auprobe->prehandler(auprobe, &utask->autask, regs);
+
+ utask->autask.saved_trap_no = current->thread.trap_no;
+ current->thread.trap_no = UPROBE_TRAP_NR;
+ regs->ARM_pc = utask->xol_vaddr;
+
+ return 0;
+}
+
+int arch_uprobe_post_xol(struct arch_uprobe *auprobe, struct pt_regs *regs)
+{
+ struct uprobe_task *utask = current->utask;
+
+ WARN_ON_ONCE(current->thread.trap_no != UPROBE_TRAP_NR);
+
+ current->thread.trap_no = utask->autask.saved_trap_no;
+ regs->ARM_pc = utask->vaddr + 4;
+
+ if (auprobe->posthandler)
+ auprobe->posthandler(auprobe, &utask->autask, regs);
+
+ return 0;
+}
+
+bool arch_uprobe_xol_was_trapped(struct task_struct *t)
+{
+ if (t->thread.trap_no != UPROBE_TRAP_NR)
+ return true;
+
+ return false;
+}
+
+void arch_uprobe_abort_xol(struct arch_uprobe *auprobe, struct pt_regs *regs)
+{
+ struct uprobe_task *utask = current->utask;
+
+ current->thread.trap_no = utask->autask.saved_trap_no;
+ instruction_pointer_set(regs, utask->vaddr);
+}
+
+int arch_uprobe_exception_notify(struct notifier_block *self,
+ unsigned long val, void *data)
+{
+ return NOTIFY_DONE;
+}
+
+static int uprobe_trap_handler(struct pt_regs *regs, unsigned int instr)
+{
+ unsigned long flags;
+
+ local_irq_save(flags);
+ instr &= 0x0fffffff;
+ if (instr == (UPROBE_SWBP_ARM_INSN & 0x0fffffff))
+ uprobe_pre_sstep_notifier(regs);
+ else if (instr == (UPROBE_SS_ARM_INSN & 0x0fffffff))
+ uprobe_post_sstep_notifier(regs);
+ local_irq_restore(flags);
+
+ return 0;
+}
+
+unsigned long uprobe_get_swbp_addr(struct pt_regs *regs)
+{
+ return instruction_pointer(regs);
+}
+
+static struct undef_hook uprobes_arm_break_hook = {
+ .instr_mask = 0x0fffffff,
+ .instr_val = (UPROBE_SWBP_ARM_INSN & 0x0fffffff),
+ .cpsr_mask = MODE_MASK,
+ .cpsr_val = USR_MODE,
+ .fn = uprobe_trap_handler,
+};
+
+static struct undef_hook uprobes_arm_ss_hook = {
+ .instr_mask = 0x0fffffff,
+ .instr_val = (UPROBE_SS_ARM_INSN & 0x0fffffff),
+ .cpsr_mask = MODE_MASK,
+ .cpsr_val = USR_MODE,
+ .fn = uprobe_trap_handler,
+};
+
+static int arch_uprobes_init(void)
+{
+ register_undef_hook(&uprobes_arm_break_hook);
+ register_undef_hook(&uprobes_arm_ss_hook);
+
+ return 0;
+}
+device_initcall(arch_uprobes_init);
diff --git a/arch/arm/kernel/uprobes.h b/arch/arm/kernel/uprobes.h
new file mode 100644
index 00000000000..1d0c12dfbd0
--- /dev/null
+++ b/arch/arm/kernel/uprobes.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2012 Rabin Vincent <rabin at rab.in>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#ifndef __ARM_KERNEL_UPROBES_H
+#define __ARM_KERNEL_UPROBES_H
+
+enum probes_insn uprobe_decode_ldmstm(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ const struct decode_header *d);
+
+enum probes_insn decode_ldr(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ const struct decode_header *d);
+
+enum probes_insn
+decode_rd12rn16rm0rs8_rwflags(probes_opcode_t insn,
+ struct arch_probes_insn *asi,
+ const struct decode_header *d);
+
+enum probes_insn
+decode_wb_pc(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d, bool alu);
+
+enum probes_insn
+decode_pc_ro(probes_opcode_t insn, struct arch_probes_insn *asi,
+ const struct decode_header *d);
+
+extern const union decode_action uprobes_probes_actions[];
+
+#endif