5cba6d22e3
We should be able to do ndelay(some_u64), but that can cause a call to __divdi3() to be emitted because the ndelay() macros does a divide. Fix it by switching to static inline which will force the u64 arg to be treated as an unsigned long. udelay() takes an unsigned long arg. [bunk@kernel.org: reported m68k build breakage] Cc: Adrian Bunk <bunk@kernel.org> Cc: Evgeniy Polyakov <johnpol@2ka.mipt.ru> Cc: Martin Michlmayr <tbm@cyrius.com> Cc: Herbert Xu <herbert@gondor.apana.org.au> Cc: Ralf Baechle <ralf@linux-mips.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: Ingo Molnar <mingo@elte.hu> Cc: Adrian Bunk <bunk@kernel.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
54 lines
1.3 KiB
C
54 lines
1.3 KiB
C
#ifndef _LINUX_DELAY_H
|
|
#define _LINUX_DELAY_H
|
|
|
|
/*
|
|
* Copyright (C) 1993 Linus Torvalds
|
|
*
|
|
* Delay routines, using a pre-computed "loops_per_jiffy" value.
|
|
*/
|
|
|
|
#include <linux/kernel.h>
|
|
|
|
extern unsigned long loops_per_jiffy;
|
|
|
|
#include <asm/delay.h>
|
|
|
|
/*
|
|
* Using udelay() for intervals greater than a few milliseconds can
|
|
* risk overflow for high loops_per_jiffy (high bogomips) machines. The
|
|
* mdelay() provides a wrapper to prevent this. For delays greater
|
|
* than MAX_UDELAY_MS milliseconds, the wrapper is used. Architecture
|
|
* specific values can be defined in asm-???/delay.h as an override.
|
|
* The 2nd mdelay() definition ensures GCC will optimize away the
|
|
* while loop for the common cases where n <= MAX_UDELAY_MS -- Paul G.
|
|
*/
|
|
|
|
#ifndef MAX_UDELAY_MS
|
|
#define MAX_UDELAY_MS 5
|
|
#endif
|
|
|
|
#ifndef mdelay
|
|
#define mdelay(n) (\
|
|
(__builtin_constant_p(n) && (n)<=MAX_UDELAY_MS) ? udelay((n)*1000) : \
|
|
({unsigned long __ms=(n); while (__ms--) udelay(1000);}))
|
|
#endif
|
|
|
|
#ifndef ndelay
|
|
static inline void ndelay(unsigned long x)
|
|
{
|
|
udelay(DIV_ROUND_UP(x, 1000));
|
|
}
|
|
#define ndelay(x) ndelay(x)
|
|
#endif
|
|
|
|
void calibrate_delay(void);
|
|
void msleep(unsigned int msecs);
|
|
unsigned long msleep_interruptible(unsigned int msecs);
|
|
|
|
static inline void ssleep(unsigned int seconds)
|
|
{
|
|
msleep(seconds * 1000);
|
|
}
|
|
|
|
#endif /* defined(_LINUX_DELAY_H) */
|