1 // RUN: %llvmgcc -S %s -o - | FileCheck %s 2 extern int printf(const char *, ...); 3 static void bad(unsigned int v1, unsigned int v2) { 4 printf("%u\n", 1631381461u * (((v2 - 1273463329u <= v1 - 1273463329u) ? v2 : v1) - 1273463329u) + 121322179u); 5 } 6 // Radar 8198362 7 // GCC FE wants to convert the above to 8 // 1631381461u * MIN(v2 - 1273463329u, v1 - 1273463329u) 9 // and then to 10 // MIN(1631381461u * v2 - 4047041419, 1631381461u * v1 - 4047041419) 11 // 12 // 1631381461u * 1273463329u = 2077504466193943669, but 32-bit overflow clips 13 // this to 4047041419. This breaks the comparison implicit in the MIN(). 14 // Two multiply operations suggests the bad optimization is happening; 15 // one multiplication, after the MIN(), is correct. 16 // CHECK: mul 17 // CHECK-NOT: mul 18 // CHECK: ret 19