From 5cba6d22e35a05adb28fdea191b232501518c455 Mon Sep 17 00:00:00 2001 From: Andrew Morton Date: Tue, 4 Mar 2008 14:28:45 -0800 Subject: ndelay(): switch to C function to avoid 64-bit division We should be able to do ndelay(some_u64), but that can cause a call to __divdi3() to be emitted because the ndelay() macros does a divide. Fix it by switching to static inline which will force the u64 arg to be treated as an unsigned long. udelay() takes an unsigned long arg. [bunk@kernel.org: reported m68k build breakage] Cc: Adrian Bunk Cc: Evgeniy Polyakov Cc: Martin Michlmayr Cc: Herbert Xu Cc: Ralf Baechle Cc: Thomas Gleixner Cc: Ingo Molnar Cc: Adrian Bunk Signed-off-by: Andrew Morton Signed-off-by: Linus Torvalds --- include/linux/delay.h | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) (limited to 'include') diff --git a/include/linux/delay.h b/include/linux/delay.h index 17ddb55430a..54552d21296 100644 --- a/include/linux/delay.h +++ b/include/linux/delay.h @@ -7,6 +7,8 @@ * Delay routines, using a pre-computed "loops_per_jiffy" value. */ +#include + extern unsigned long loops_per_jiffy; #include @@ -32,7 +34,11 @@ extern unsigned long loops_per_jiffy; #endif #ifndef ndelay -#define ndelay(x) udelay(((x)+999)/1000) +static inline void ndelay(unsigned long x) +{ + udelay(DIV_ROUND_UP(x, 1000)); +} +#define ndelay(x) ndelay(x) #endif void calibrate_delay(void); -- cgit v1.2.3