|
|
|
|
|
|
|
|
asm volatile("smmul %0, %1, %2" : "=r" (out) : "r" (a), "r" (b)); |
|
|
asm volatile("smmul %0, %1, %2" : "=r" (out) : "r" (a), "r" (b)); |
|
|
return out; |
|
|
return out; |
|
|
#elif defined(KINETISL) |
|
|
#elif defined(KINETISL) |
|
|
return 0; // TODO.... |
|
|
|
|
|
|
|
|
return ((int64_t)a * (int64_t)b) >> 32; |
|
|
#endif |
|
|
#endif |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
asm volatile("smmulr %0, %1, %2" : "=r" (out) : "r" (a), "r" (b)); |
|
|
asm volatile("smmulr %0, %1, %2" : "=r" (out) : "r" (a), "r" (b)); |
|
|
return out; |
|
|
return out; |
|
|
#elif defined(KINETISL) |
|
|
#elif defined(KINETISL) |
|
|
return 0; // TODO.... |
|
|
|
|
|
|
|
|
return (((int64_t)a * (int64_t)b) + 0x8000000) >> 32; |
|
|
#endif |
|
|
#endif |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
asm volatile("smmlar %0, %2, %3, %1" : "=r" (out) : "r" (sum), "r" (a), "r" (b)); |
|
|
asm volatile("smmlar %0, %2, %3, %1" : "=r" (out) : "r" (sum), "r" (a), "r" (b)); |
|
|
return out; |
|
|
return out; |
|
|
#elif defined(KINETISL) |
|
|
#elif defined(KINETISL) |
|
|
return 0; // TODO.... |
|
|
|
|
|
|
|
|
return sum + ((((int64_t)a * (int64_t)b) + 0x8000000) >> 32); |
|
|
#endif |
|
|
#endif |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
asm volatile("smmlsr %0, %2, %3, %1" : "=r" (out) : "r" (sum), "r" (a), "r" (b)); |
|
|
asm volatile("smmlsr %0, %2, %3, %1" : "=r" (out) : "r" (sum), "r" (a), "r" (b)); |
|
|
return out; |
|
|
return out; |
|
|
#elif defined(KINETISL) |
|
|
#elif defined(KINETISL) |
|
|
return 0; // TODO.... |
|
|
|
|
|
|
|
|
return sum - ((((int64_t)a * (int64_t)b) + 0x8000000) >> 32); |
|
|
#endif |
|
|
#endif |
|
|
} |
|
|
} |
|
|
|
|
|
|