summaryrefslogtreecommitdiffstats
path: root/arch/arm/lib/udivdi3.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/lib/udivdi3.c')
-rw-r--r--arch/arm/lib/udivdi3.c372
1 files changed, 176 insertions, 196 deletions
diff --git a/arch/arm/lib/udivdi3.c b/arch/arm/lib/udivdi3.c
index df1d5ef62bc..e343be4c664 100644
--- a/arch/arm/lib/udivdi3.c
+++ b/arch/arm/lib/udivdi3.c
@@ -32,211 +32,191 @@ Boston, MA 02111-1307, USA. */
#include "gcclib.h"
#include "longlong.h"
-static const u8 __clz_tab[] =
-{
- 0,1,2,2,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
- 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
- 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
- 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
- 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
- 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
- 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
- 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
+static const u8 __clz_tab[] = {
+ 0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
+ 8, 8, 8, 8, 8, 8, 8, 8,
};
-u64
-__udivmoddi4 (u64 n, u64 d, u64 *rp)
+u64 __udivmoddi4(u64 n, u64 d, u64 * rp)
{
- DIunion ww;
- DIunion nn, dd;
- DIunion rr;
- u32 d0, d1, n0, n1, n2;
- u32 q0, q1;
- u32 b, bm;
-
- nn.ll = n;
- dd.ll = d;
-
- d0 = dd.s.low;
- d1 = dd.s.high;
- n0 = nn.s.low;
- n1 = nn.s.high;
-
- if (d1 == 0)
- {
- if (d0 > n1)
- {
- /* 0q = nn / 0D */
-
- count_leading_zeros (bm, d0);
-
- if (bm != 0)
- {
- /* Normalize, i.e. make the most significant bit of the
- denominator set. */
-
- d0 = d0 << bm;
- n1 = (n1 << bm) | (n0 >> (SI_TYPE_SIZE - bm));
- n0 = n0 << bm;
- }
-
- udiv_qrnnd (q0, n0, n1, n0, d0);
- q1 = 0;
-
- /* Remainder in n0 >> bm. */
- }
- else
- {
- /* qq = NN / 0d */
-
- if (d0 == 0)
- d0 = 1 / d0; /* Divide intentionally by zero. */
-
- count_leading_zeros (bm, d0);
-
- if (bm == 0)
- {
- /* From (n1 >= d0) /\ (the most significant bit of d0 is set),
- conclude (the most significant bit of n1 is set) /\ (the
- leading quotient digit q1 = 1).
-
- This special case is necessary, not an optimization.
- (Shifts counts of SI_TYPE_SIZE are undefined.) */
-
- n1 -= d0;
- q1 = 1;
- }
- else
- {
- /* Normalize. */
-
- b = SI_TYPE_SIZE - bm;
-
- d0 = d0 << bm;
- n2 = n1 >> b;
- n1 = (n1 << bm) | (n0 >> b);
- n0 = n0 << bm;
-
- udiv_qrnnd (q1, n1, n2, n1, d0);
- }
-
- /* n1 != d0... */
-
- udiv_qrnnd (q0, n0, n1, n0, d0);
-
- /* Remainder in n0 >> bm. */
- }
-
- if (rp != 0)
- {
- rr.s.low = n0 >> bm;
- rr.s.high = 0;
- *rp = rr.ll;
- }
- }
- else
- {
- if (d1 > n1)
- {
- /* 00 = nn / DD */
-
- q0 = 0;
- q1 = 0;
-
- /* Remainder in n1n0. */
- if (rp != 0)
- {
- rr.s.low = n0;
- rr.s.high = n1;
- *rp = rr.ll;
- }
- }
- else
- {
- /* 0q = NN / dd */
-
- count_leading_zeros (bm, d1);
- if (bm == 0)
- {
- /* From (n1 >= d1) /\ (the most significant bit of d1 is set),
- conclude (the most significant bit of n1 is set) /\ (the
- quotient digit q0 = 0 or 1).
-
- This special case is necessary, not an optimization. */
-
- /* The condition on the next line takes advantage of that
- n1 >= d1 (true due to program flow). */
- if (n1 > d1 || n0 >= d0)
- {
- q0 = 1;
- sub_ddmmss (n1, n0, n1, n0, d1, d0);
- }
- else
- q0 = 0;
-
- q1 = 0;
-
- if (rp != 0)
- {
- rr.s.low = n0;
- rr.s.high = n1;
- *rp = rr.ll;
- }
- }
- else
- {
- u32 m1, m0;
- /* Normalize. */
-
- b = SI_TYPE_SIZE - bm;
-
- d1 = (d1 << bm) | (d0 >> b);
- d0 = d0 << bm;
- n2 = n1 >> b;
- n1 = (n1 << bm) | (n0 >> b);
- n0 = n0 << bm;
-
- udiv_qrnnd (q0, n1, n2, n1, d1);
- umul_ppmm (m1, m0, q0, d0);
-
- if (m1 > n1 || (m1 == n1 && m0 > n0))
- {
- q0--;
- sub_ddmmss (m1, m0, m1, m0, d1, d0);
- }
-
- q1 = 0;
-
- /* Remainder in (n1n0 - m1m0) >> bm. */
- if (rp != 0)
- {
- sub_ddmmss (n1, n0, n1, n0, m1, m0);
- rr.s.low = (n1 << b) | (n0 >> bm);
- rr.s.high = n1 >> bm;
- *rp = rr.ll;
- }
- }
- }
- }
-
- ww.s.low = q0;
- ww.s.high = q1;
- return ww.ll;
+ DIunion ww;
+ DIunion nn, dd;
+ DIunion rr;
+ u32 d0, d1, n0, n1, n2;
+ u32 q0, q1;
+ u32 b, bm;
+
+ nn.ll = n;
+ dd.ll = d;
+
+ d0 = dd.s.low;
+ d1 = dd.s.high;
+ n0 = nn.s.low;
+ n1 = nn.s.high;
+
+ if (d1 == 0) {
+ if (d0 > n1) {
+ /* 0q = nn / 0D */
+
+ count_leading_zeros(bm, d0);
+
+ if (bm != 0) {
+ /* Normalize, i.e. make the most significant bit of the
+ denominator set. */
+
+ d0 = d0 << bm;
+ n1 = (n1 << bm) | (n0 >> (SI_TYPE_SIZE - bm));
+ n0 = n0 << bm;
+ }
+
+ udiv_qrnnd(q0, n0, n1, n0, d0);
+ q1 = 0;
+
+ /* Remainder in n0 >> bm. */
+ } else {
+ /* qq = NN / 0d */
+
+ if (d0 == 0)
+ d0 = 1 / d0; /* Divide intentionally by zero. */
+
+ count_leading_zeros(bm, d0);
+
+ if (bm == 0) {
+ /* From (n1 >= d0) /\ (the most significant bit of d0 is set),
+ conclude (the most significant bit of n1 is set) /\ (the
+ leading quotient digit q1 = 1).
+
+ This special case is necessary, not an optimization.
+ (Shifts counts of SI_TYPE_SIZE are undefined.) */
+
+ n1 -= d0;
+ q1 = 1;
+ } else {
+ /* Normalize. */
+
+ b = SI_TYPE_SIZE - bm;
+
+ d0 = d0 << bm;
+ n2 = n1 >> b;
+ n1 = (n1 << bm) | (n0 >> b);
+ n0 = n0 << bm;
+
+ udiv_qrnnd(q1, n1, n2, n1, d0);
+ }
+
+ /* n1 != d0... */
+
+ udiv_qrnnd(q0, n0, n1, n0, d0);
+
+ /* Remainder in n0 >> bm. */
+ }
+
+ if (rp != 0) {
+ rr.s.low = n0 >> bm;
+ rr.s.high = 0;
+ *rp = rr.ll;
+ }
+ } else {
+ if (d1 > n1) {
+ /* 00 = nn / DD */
+
+ q0 = 0;
+ q1 = 0;
+
+ /* Remainder in n1n0. */
+ if (rp != 0) {
+ rr.s.low = n0;
+ rr.s.high = n1;
+ *rp = rr.ll;
+ }
+ } else {
+ /* 0q = NN / dd */
+
+ count_leading_zeros(bm, d1);
+ if (bm == 0) {
+ /* From (n1 >= d1) /\ (the most significant bit of d1 is set),
+ conclude (the most significant bit of n1 is set) /\ (the
+ quotient digit q0 = 0 or 1).
+
+ This special case is necessary, not an optimization. */
+
+ /* The condition on the next line takes advantage of that
+ n1 >= d1 (true due to program flow). */
+ if (n1 > d1 || n0 >= d0) {
+ q0 = 1;
+ sub_ddmmss(n1, n0, n1, n0, d1, d0);
+ } else
+ q0 = 0;
+
+ q1 = 0;
+
+ if (rp != 0) {
+ rr.s.low = n0;
+ rr.s.high = n1;
+ *rp = rr.ll;
+ }
+ } else {
+ u32 m1, m0;
+ /* Normalize. */
+
+ b = SI_TYPE_SIZE - bm;
+
+ d1 = (d1 << bm) | (d0 >> b);
+ d0 = d0 << bm;
+ n2 = n1 >> b;
+ n1 = (n1 << bm) | (n0 >> b);
+ n0 = n0 << bm;
+
+ udiv_qrnnd(q0, n1, n2, n1, d1);
+ umul_ppmm(m1, m0, q0, d0);
+
+ if (m1 > n1 || (m1 == n1 && m0 > n0)) {
+ q0--;
+ sub_ddmmss(m1, m0, m1, m0, d1, d0);
+ }
+
+ q1 = 0;
+
+ /* Remainder in (n1n0 - m1m0) >> bm. */
+ if (rp != 0) {
+ sub_ddmmss(n1, n0, n1, n0, m1, m0);
+ rr.s.low = (n1 << b) | (n0 >> bm);
+ rr.s.high = n1 >> bm;
+ *rp = rr.ll;
+ }
+ }
+ }
+ }
+
+ ww.s.low = q0;
+ ww.s.high = q1;
+ return ww.ll;
}
-u64
-__udivdi3 (u64 n, u64 d)
+u64 __udivdi3(u64 n, u64 d)
{
- return __udivmoddi4 (n, d, (u64 *) 0);
+ return __udivmoddi4(n, d, (u64 *) 0);
}
-u64
-__umoddi3 (u64 u, u64 v)
+u64 __umoddi3(u64 u, u64 v)
{
- u64 w;
+ u64 w;
- (void) __udivmoddi4 (u ,v, &w);
+ (void)__udivmoddi4(u, v, &w);
- return w;
+ return w;
}
-