aboutsummaryrefslogtreecommitdiff
path: root/arch/x86
diff options
context:
space:
mode:
authorAndreas Herrmann <andreas.herrmann3@amd.com>2009-06-08 19:09:39 +0200
committerGreg Kroah-Hartman <gregkh@suse.de>2009-07-02 16:49:54 -0700
commita0baa4304e2043545d20db7e3e7a694cf28cc375 (patch)
tree830d05218eaa1b671216dc6cad0209fa685dd92c /arch/x86
parente78a00fca1f50b3ce9da1e2e814d80103028a905 (diff)
x86: memtest: remove 64-bit division
commit c9690998ef48ffefeccb91c70a7739eebdea57f9 upstream. Using gcc 3.3.5 a "make allmodconfig" + "CONFIG_KVM=n" triggers a build error: arch/x86/mm/built-in.o(.init.text+0x43f7): In function `__change_page_attr': arch/x86/mm/pageattr.c:114: undefined reference to `__udivdi3' make: *** [.tmp_vmlinux1] Error 1 The culprit turned out to be a division in arch/x86/mm/memtest.c For more info see this thread: http://marc.info/?l=linux-kernel&m=124416232620683 The patch entirely removes the division that caused the build error. [ Impact: build fix with certain GCC versions ] Reported-by: Tetsuo Handa <penguin-kernel@i-love.sakura.ne.jp> Signed-off-by: Andreas Herrmann <andreas.herrmann3@amd.com> Cc: Yinghai Lu <yinghai@kernel.org> Cc: xiyou.wangcong@gmail.com Cc: Andrew Morton <akpm@linux-foundation.org> LKML-Reference: <20090608170939.GB12431@alberich.amd.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Greg Kroah-Hartman <gregkh@suse.de>
Diffstat (limited to 'arch/x86')
-rw-r--r--arch/x86/mm/memtest.c14
1 files changed, 7 insertions, 7 deletions
diff --git a/arch/x86/mm/memtest.c b/arch/x86/mm/memtest.c
index 605c8be0621..c0bedcd10f9 100644
--- a/arch/x86/mm/memtest.c
+++ b/arch/x86/mm/memtest.c
@@ -40,23 +40,23 @@ static void __init reserve_bad_mem(u64 pattern, u64 start_bad, u64 end_bad)
static void __init memtest(u64 pattern, u64 start_phys, u64 size)
{
- u64 i, count;
- u64 *start;
+ u64 *p;
+ void *start, *end;
u64 start_bad, last_bad;
u64 start_phys_aligned;
size_t incr;
incr = sizeof(pattern);
start_phys_aligned = ALIGN(start_phys, incr);
- count = (size - (start_phys_aligned - start_phys))/incr;
start = __va(start_phys_aligned);
+ end = start + size - (start_phys_aligned - start_phys);
start_bad = 0;
last_bad = 0;
- for (i = 0; i < count; i++)
- start[i] = pattern;
- for (i = 0; i < count; i++, start++, start_phys_aligned += incr) {
- if (*start == pattern)
+ for (p = start; p < end; p++)
+ *p = pattern;
+ for (p = start; p < end; p++, start_phys_aligned += incr) {
+ if (*p == pattern)
continue;
if (start_phys_aligned == last_bad + incr) {
last_bad += incr;