I have this code: #define spin_event_timeout(condition, timeout, delay, rc) \ { \ unsigned long __loops = tb_ticks_per_usec * timeout; \ unsigned long __start = get_tbl(); \ while (!(rc = (condition)) && (tb_ticks_since(__start) <= __loops)) \ if (delay) \ udelay(delay); \ else \ cpu_relax(); \ } assuming that the 'delay' parameter is a constant, will gcc optimize-out the "if (delay)" when this macro is compiled? That is, is the above code equivalent to: #define spin_event_timeout(condition, timeout, delay, rc) \ { \ unsigned long __loops = tb_ticks_per_usec * timeout; \ unsigned long __start = get_tbl(); \ if (delay) \ while (!(rc = (condition)) && (tb_ticks_since(__start) <= __loops)) \ udelay(delay); \ else \ while (!(rc = (condition)) && (tb_ticks_since(__start) <= __loops)) \ cpu_relax(); \ } -- Timur Tabi Linux kernel developer at Freescale