1/* mpn_mod_1_1p (ap, n, b, cps)
2   Divide (ap,,n) by b.  Return the single-limb remainder.
3
4   Contributed to the GNU project by Torbjorn Granlund.
5   Based on a suggestion by Peter L. Montgomery.
6
7   THE FUNCTIONS IN THIS FILE ARE INTERNAL WITH MUTABLE INTERFACES.  IT IS ONLY
8   SAFE TO REACH THEM THROUGH DOCUMENTED INTERFACES.  IN FACT, IT IS ALMOST
9   GUARANTEED THAT THEY WILL CHANGE OR DISAPPEAR IN A FUTURE GNU MP RELEASE.
10
11Copyright 2008, 2009 Free Software Foundation, Inc.
12
13This file is part of the GNU MP Library.
14
15The GNU MP Library is free software; you can redistribute it and/or modify
16it under the terms of the GNU Lesser General Public License as published by
17the Free Software Foundation; either version 3 of the License, or (at your
18option) any later version.
19
20The GNU MP Library is distributed in the hope that it will be useful, but
21WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
22or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
23License for more details.
24
25You should have received a copy of the GNU Lesser General Public License
26along with the GNU MP Library.  If not, see http://www.gnu.org/licenses/.  */
27
28#include "gmp.h"
29#include "gmp-impl.h"
30#include "longlong.h"
31
32void
33mpn_mod_1_1p_cps (mp_limb_t cps[4], mp_limb_t b)
34{
35  mp_limb_t bi;
36  mp_limb_t B1modb, B2modb;
37  int cnt;
38
39  count_leading_zeros (cnt, b);
40
41  b <<= cnt;
42  invert_limb (bi, b);
43
44  if (UNLIKELY (cnt == 0))
45    B1modb = -b;
46  else
47    B1modb = -b * ((bi >> (GMP_LIMB_BITS-cnt)) | (CNST_LIMB(1) << cnt));
48  ASSERT (B1modb <= b);		/* NB: not fully reduced mod b */
49  udiv_rnd_preinv (B2modb, B1modb, b, bi);
50
51  cps[0] = bi;
52  cps[1] = cnt;
53  cps[2] = B1modb >> cnt;
54  cps[3] = B2modb >> cnt;
55}
56
57mp_limb_t
58mpn_mod_1_1p (mp_srcptr ap, mp_size_t n, mp_limb_t b, mp_limb_t bmodb[4])
59{
60  mp_limb_t rh, rl, bi, q, ph, pl, r;
61  mp_limb_t B1modb, B2modb;
62  mp_size_t i;
63  int cnt;
64  mp_limb_t mask;
65
66  ASSERT (n >= 2);		/* fix tuneup.c if this is changed */
67
68  B1modb = bmodb[2];
69  B2modb = bmodb[3];
70
71  umul_ppmm (ph, pl, ap[n - 1], B1modb);
72  add_ssaaaa (rh, rl, ph, pl, 0, ap[n - 2]);
73
74  for (i = n - 3; i >= 0; i -= 1)
75    {
76      /* rr = ap[i]				< B
77	    + LO(rr)  * (B mod b)		<= (B-1)(b-1)
78	    + HI(rr)  * (B^2 mod b)		<= (B-1)(b-1)
79      */
80      umul_ppmm (ph, pl, rl, B1modb);
81      add_ssaaaa (ph, pl, ph, pl, 0, ap[i]);
82
83      umul_ppmm (rh, rl, rh, B2modb);
84      add_ssaaaa (rh, rl, rh, rl, ph, pl);
85    }
86
87  bi = bmodb[0];
88  cnt = bmodb[1];
89
90  if (LIKELY (cnt != 0))
91    rh = (rh << cnt) | (rl >> (GMP_LIMB_BITS - cnt));
92
93  mask = -(mp_limb_t) (rh >= b);
94  rh -= mask & b;
95
96  udiv_qrnnd_preinv (q, r, rh, rl << cnt, b, bi);
97
98  return r >> cnt;
99}
100