Diff of /src/modules/glm/samplers/KS.cc [01a998] .. [081fbe] Maximize Restore

  Switch to side-by-side view

--- a/src/modules/glm/samplers/KS.cc
+++ b/src/modules/glm/samplers/KS.cc
@@ -14,6 +14,13 @@
 
 static bool r_intvl(double u, double lambda)
 {
+    // Rejection algorithm based on an alternating series expansion
+    // of the KS distribution. This is valid for lambda > 4/3
+
+    if (u==0) {
+	return false;
+    }
+
     double z = 1;
     double x = exp(-lambda/2);
     int j = 0;
@@ -26,16 +33,23 @@
 	j++;
 	j2 = (j+1)*(j+1);
 	z += j2 * pow(x, j2 - 1);
-	if (z <= u)
+	if (z < u)
 	    return false;
     }
 }
 
 static bool l_intvl(double u, double lambda)
 {
+    // Rejection algorithm based on an alternating series expansion
+    // of the KS distribution. This is valid for lambda < pi^2
+
     if (u==0) {
-	return false;
+	return false; // Avoid numerical problems with log(u)
     }
+    if (lambda < 1E-3) {
+	return false; //Acceptance probability -> 0 in this range
+    }
+
     double h = 0.5*log(2.0) + 2.5*log(PI) - 2.5*log(lambda) - 
 	PI_SQUARE/(2 * lambda) + 0.5*lambda;
     double logu = log(u);
@@ -51,8 +65,53 @@
 	j++;
 	int j2 = (j+1)*(j+1);
 	z += j2*pow(x,j2-1);
-	if (h + log(z) <= logu)
+	if (h + log(z) < logu)
 	    return false;
+    }
+}
+
+static bool accept(double lambda, RNG *rng)
+{
+    // Accepts or rejects proposal lambda generated from a GIG
+    // distribution.  The rejection algorithm is proposed by Holmes
+    // and Held (2006) and adapted from the original algorithm for
+    // sampling from the KS distribution by Devroye (1986).
+
+    // We use separate alternating series expansions for lambda <=
+    // 3.1039 and lambda > 3.1039.  This cut-off was chosen after
+    // numerical investigation of the series expansions to optimize
+    // convergence.  With this cutoff, the error is no more than
+    // 3.6E-5 when the expansion is truncated to the first
+    // term. Hence, although both l_intvl and r_intvl contain infinite
+    // loops, they will exit almost immediately.
+
+    double u = rng->uniform();
+    return lambda > 3.1039 ? r_intvl(u, lambda) : l_intvl(u, lambda);
+}
+
+
+static double gig(double delta, RNG *rng)
+{
+    // Draws a sample from the generalized inverse gaussian distribution
+    // GIG(0.5, 1, delta^2) = delta/IG(1, delta). 
+    //
+    // The IG (Inverse Gaussian) distribution is sampled using the
+    // algorithm of Michael, Schucany and Haas (1976) as described by
+    // Devroye (1986).
+    //
+    // Note that as delta -> 0, GIG(0.5, 1, delta^2) tends to a
+    // chisquare distribution on 1 df. We use this approximation for
+    // small delta.
+    
+    double y = rng->normal();
+    y = y * y;
+    if (delta <= 1.0E-6 * y) {
+	// Inversion is numerically unstable for delta/y < 1.E-8
+	return y;
+    }
+    else {
+	y = 1 + (y - sqrt(y * (4*delta + y))) / (2*delta);
+	return (rng->uniform() <= 1/(1 + y)) ? delta/y : delta*y;
     }
 }
 
@@ -62,15 +121,9 @@
     {
 	delta = fabs(delta);
 	while (true) {
-	    double y = rng->normal();
-	    y = y * y;
-	    y = 1 + (y - sqrt(y*(4*delta + y)))/(2*delta);
-	    double u = rng->uniform();
-	    double lambda = (u <= 1/(1+y)) ? delta/y : delta*y;
-	    u = rng->uniform();
-	    bool ok = (lambda > 2.25) ? r_intvl(u, lambda) : l_intvl(u, lambda);
-	if (ok)
-	    return lambda;
+	    //Acceptance probability is at least 0.24 for all delta
+	    double lambda = gig(delta, rng);
+	    if (accept(lambda, rng)) return lambda;
 	}
     }