aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPetr Baudis <pasky@ucw.cz>2010-08-22 16:44:15 +0200
committerPetr Baudis <pasky@suse.cz>2010-11-16 02:50:16 +0100
commit9fcff7a79b60cd9202fdacafdbd5fb369933c64b (patch)
treebb07b9f932cea94dae8e48466ad8d0ba3b688da7
parent75134e46476263ab116be341531cadb1e6ab87d6 (diff)
downloadglibc-9fcff7a79b60cd9202fdacafdbd5fb369933c64b.zip
glibc-9fcff7a79b60cd9202fdacafdbd5fb369933c64b.tar.gz
glibc-9fcff7a79b60cd9202fdacafdbd5fb369933c64b.tar.bz2
Fix jn() precision problems around zero points of j0()
There appears to be a really nasty bug in jn() from fdlibm, which is the foundation for most libm implementations (including glibc libm). The zeroth-order j0() and first-order j1() cylindrical Bessel functions are used to recursively generate the jn() value, but only the zeroth-order Bessel function is used to normalize it; however, each of the functions gets highly imprecise (approaching "bogus") near its zero point, making the jn() value itself bogus. But in fact, the zero points of j0() and j1() never coincide, thus j1() should be used in case it is more precise than j0(). (That is, simply when its value is further from zero.) As an example, 2.4048255576957729_8 is the first zero of j0(). The proper value as calculated by Mathematica is 0.19899990535769... However, jn() returns -inf on 64-bit arch, or 0.185007 on 32-bit arch. With the proposed patch below, the returned value is 0.199000. The fix is based on work by Steve Kargl and Tobias Burnus.
-rw-r--r--ChangeLog10
-rw-r--r--sysdeps/ieee754/dbl-64/e_jn.c11
-rw-r--r--sysdeps/ieee754/flt-32/e_jnf.c11
-rw-r--r--sysdeps/ieee754/ldbl-128/e_jnl.c11
-rw-r--r--sysdeps/ieee754/ldbl-128ibm/e_jnl.c11
-rw-r--r--sysdeps/ieee754/ldbl-96/e_jnl.c11
6 files changed, 60 insertions, 5 deletions
diff --git a/ChangeLog b/ChangeLog
index 601f6f0..122a2b8 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,13 @@
+2010-05-12 Petr Baudis <pasky@suse.cz>
+
+ [BZ #11589]
+ * sysdeps/ieee754/dbl-64/e_jn.c: Compensate major precision loss
+ around j0() zero points by switching to j1().
+ * sysdeps/ieee754/flt-32/e_jnf.c: Likewise.
+ * sysdeps/ieee754/ldbl-128/e_jnl.c: Likewise.
+ * sysdeps/ieee754/ldbl-128ibm/e_jnl.c: Likewise.
+ * sysdeps/ieee754/ldbl-96/e_jnl.c: Likewise.
+
2010-06-01 Jan Sembera <jsembera@suse.cz>
[BZ #6812]
diff --git a/sysdeps/ieee754/dbl-64/e_jn.c b/sysdeps/ieee754/dbl-64/e_jn.c
index bf4a13d..d9d6f91 100644
--- a/sysdeps/ieee754/dbl-64/e_jn.c
+++ b/sysdeps/ieee754/dbl-64/e_jn.c
@@ -215,7 +215,16 @@ static double zero = 0.00000000000000000000e+00;
}
}
}
- b = (t*__ieee754_j0(x)/b);
+ /* j0() and j1() suffer enormous loss of precision at and
+ * near zero; however, we know that their zero points never
+ * coincide, so just choose the one further away from zero.
+ */
+ z = __ieee754_j0 (x);
+ w = __ieee754_j1 (x);
+ if (fabs (z) >= fabs (w))
+ b = (t * z / b);
+ else
+ b = (t * w / a);
}
}
if(sgn==1) return -b; else return b;
diff --git a/sysdeps/ieee754/flt-32/e_jnf.c b/sysdeps/ieee754/flt-32/e_jnf.c
index de2e53d..dd3d551 100644
--- a/sysdeps/ieee754/flt-32/e_jnf.c
+++ b/sysdeps/ieee754/flt-32/e_jnf.c
@@ -165,7 +165,16 @@ static float zero = 0.0000000000e+00;
}
}
}
- b = (t*__ieee754_j0f(x)/b);
+ /* j0() and j1() suffer enormous loss of precision at and
+ * near zero; however, we know that their zero points never
+ * coincide, so just choose the one further away from zero.
+ */
+ z = __ieee754_j0f (x);
+ w = __ieee754_j1f (x);
+ if (fabsf (z) >= fabsf (w))
+ b = (t * z / b);
+ else
+ b = (t * w / a);
}
}
if(sgn==1) return -b; else return b;
diff --git a/sysdeps/ieee754/ldbl-128/e_jnl.c b/sysdeps/ieee754/ldbl-128/e_jnl.c
index a4a4e24..a7f6772 100644
--- a/sysdeps/ieee754/ldbl-128/e_jnl.c
+++ b/sysdeps/ieee754/ldbl-128/e_jnl.c
@@ -285,7 +285,16 @@ __ieee754_jnl (n, x)
}
}
}
- b = (t * __ieee754_j0l (x) / b);
+ /* j0() and j1() suffer enormous loss of precision at and
+ * near zero; however, we know that their zero points never
+ * coincide, so just choose the one further away from zero.
+ */
+ z = __ieee754_j0l (x);
+ w = __ieee754_j1l (x);
+ if (fabsl (z) >= fabsl (w))
+ b = (t * z / b);
+ else
+ b = (t * w / a);
}
}
if (sgn == 1)
diff --git a/sysdeps/ieee754/ldbl-128ibm/e_jnl.c b/sysdeps/ieee754/ldbl-128ibm/e_jnl.c
index 0eea745..372f942 100644
--- a/sysdeps/ieee754/ldbl-128ibm/e_jnl.c
+++ b/sysdeps/ieee754/ldbl-128ibm/e_jnl.c
@@ -286,7 +286,16 @@ __ieee754_jnl (n, x)
}
}
}
- b = (t * __ieee754_j0l (x) / b);
+ /* j0() and j1() suffer enormous loss of precision at and
+ * near zero; however, we know that their zero points never
+ * coincide, so just choose the one further away from zero.
+ */
+ z = __ieee754_j0l (x);
+ w = __ieee754_j1l (x);
+ if (fabsl (z) >= fabsl (w))
+ b = (t * z / b);
+ else
+ b = (t * w / a);
}
}
if (sgn == 1)
diff --git a/sysdeps/ieee754/ldbl-96/e_jnl.c b/sysdeps/ieee754/ldbl-96/e_jnl.c
index 3d715d3..bedff7d 100644
--- a/sysdeps/ieee754/ldbl-96/e_jnl.c
+++ b/sysdeps/ieee754/ldbl-96/e_jnl.c
@@ -281,7 +281,16 @@ __ieee754_jnl (n, x)
}
}
}
- b = (t * __ieee754_j0l (x) / b);
+ /* j0() and j1() suffer enormous loss of precision at and
+ * near zero; however, we know that their zero points never
+ * coincide, so just choose the one further away from zero.
+ */
+ z = __ieee754_j0l (x);
+ w = __ieee754_j1l (x);
+ if (fabsl (z) >= fabsl (w))
+ b = (t * z / b);
+ else
+ b = (t * w / a);
}
}
if (sgn == 1)