Skip to contents

[Stable] Generates a list of arguments as required by rstan::sampling or rstan::vb by combining the required options, with data, and type of initialisation. Initialisation defaults to random but it is expected that create_initial_conditions will be used.

Usage

create_stan_args(
  stan = stan_opts(),
  data = NULL,
  init = "random",
  verbose = FALSE
)

Arguments

stan

A list of stan options as generated by stan_opts(). Defaults to stan_opts(). Can be used to override data, init, and verbose settings if desired.

data

A list of stan data as created by create_stan_data

init

Initial conditions passed to rstan. Defaults to "random" but can also be a function (as supplied by create_intitial_conditions).

verbose

Logical, defaults to FALSE. Should verbose progress messages be returned.

Value

A list of stan arguments

Author

Sam Abbott

Examples

# default settings
create_stan_args()
#> $data
#> NULL
#> 
#> $init
#> [1] "random"
#> 
#> $refresh
#> [1] 0
#> 
#> $object
#> S4 class stanmodel 'estimate_infections' coded as follows:
#> functions {
#> // convolve two vectors as a backwards dot product
#> // y vector should be reversed
#> // limited to the length of x and backwards looking for x indexes
#> vector convolve_with_rev_pmf(vector x, vector y, int len) {
#>     int xlen = num_elements(x);
#>     int ylen = num_elements(y);
#>     vector[len] z;
#>     if (xlen + ylen <= len) {
#>       reject("convolve_with_rev_pmf: len is longer then x and y combined");
#>     }
#>     for (s in 1:len) {
#>       z[s] = dot_product(
#>         x[max(1, (s - ylen + 1)):min(s, xlen)],
#>         y[max(1, ylen - s + 1):min(ylen, ylen + xlen - s)]
#>       );
#>     }
#>    return(z);
#>   }
#> // convolve latent infections to reported (but still unobserved) cases
#> vector convolve_to_report(vector infections,
#>                           vector delay_rev_pmf,
#>                           int seeding_time) {
#>   int t = num_elements(infections);
#>   vector[t - seeding_time] reports;
#>   vector[t] unobs_reports = infections;
#>   int delays = num_elements(delay_rev_pmf);
#>   if (delays) {
#>     unobs_reports = convolve_with_rev_pmf(unobs_reports, delay_rev_pmf, t);
#>     reports = unobs_reports[(seeding_time + 1):t];
#>   } else {
#>     reports = infections[(seeding_time + 1):t];
#>   }
#>   return(reports);
#> }
#> // Calculate the daily probability of reporting using parametric
#> // distributions up to the maximum observed delay.
#> // If sigma is 0 all the probability mass is put on n.
#> // Adapted from https://github.com/epiforecasts/epinowcast
#> // @author Sam Abbott
#> // @author Adrian Lison
#> vector discretised_pmf(real mu, real sigma, int n, int dist) {
#>   vector[n] lpmf;
#>   if (sigma > 0) {
#>     vector[n] upper_lcdf;
#>     if (dist == 0) {
#>       for (i in 1:n) {
#>         upper_lcdf[i] = lognormal_lcdf(i | mu, sigma);
#>       }
#>     } else if (dist == 1) {
#>       real alpha = mu^2 / sigma^2;
#>       real beta = mu / sigma^2;
#>       for (i in 1:n) {
#>         upper_lcdf[i] = gamma_lcdf(i | alpha, beta);
#>       }
#>     } else {
#>       reject("Unknown distribution function provided.");
#>     }
#>     // discretise
#>     lpmf[1] = upper_lcdf[1];
#>     lpmf[2:n] = log_diff_exp(upper_lcdf[2:n], upper_lcdf[1:(n-1)]);
#>     // normalize
#>     lpmf = lpmf - upper_lcdf[n];
#>   } else {
#>     // delta function
#>     lpmf = rep_vector(negative_infinity(), n);
#>     lpmf[n] = 0;
#>   }
#>   return(exp(lpmf));
#> }
#> // reverse a mf
#> vector reverse_mf(vector pmf) {
#>   int max_pmf = num_elements(pmf);
#>   vector[max_pmf] rev_pmf;
#>   for (d in 1:max_pmf) {
#>     rev_pmf[d] = pmf[max_pmf - d + 1];
#>   }
#>   return rev_pmf;
#> }
#> vector rev_seq(int base, int len) {
#>   vector[len] seq;
#>   for (i in 1:len) {
#>     seq[i] = len + base - i;
#>   }
#>   return(seq);
#> }
#> real rev_pmf_mean(vector rev_pmf, int base) {
#>   int len = num_elements(rev_pmf);
#>   vector[len] rev_pmf_seq = rev_seq(base, len);
#>   return(dot_product(rev_pmf_seq, rev_pmf));
#> }
#> real rev_pmf_var(vector rev_pmf, int base, real mean) {
#>   int len = num_elements(rev_pmf);
#>   vector[len] rev_pmf_seq = rev_seq(base, len);
#>   for (i in 1:len) {
#>     rev_pmf_seq[i] = pow(rev_pmf_seq[i], 2);
#>   }
#>   return(dot_product(rev_pmf_seq, rev_pmf) - pow(mean, 2));
#> }
#> array[] int get_delay_type_max(
#>   int delay_types, array[] int delay_types_p, array[] int delay_types_id,
#>   array[] int delay_types_groups, array[] int delay_max, array[] int delay_np_pmf_groups
#> ) {
#>   array[delay_types] int ret;
#>   for (i in 1:delay_types) {
#>     ret[i] = 1;
#>     for (j in delay_types_groups[i]:(delay_types_groups[i + 1] - 1)) {
#>       if (delay_types_p[j]) { // parametric
#>         ret[i] += delay_max[delay_types_id[j]] - 1;
#>       } else { // nonparametric
#>         ret[i] += delay_np_pmf_groups[delay_types_id[j] + 1] -
#>           delay_np_pmf_groups[delay_types_id[j]] - 1;
#>       }
#>     }
#>   }
#>   return ret;
#> }
#> vector get_delay_rev_pmf(
#>   int delay_id, int len, array[] int delay_types_p, array[] int delay_types_id,
#>   array[] int delay_types_groups, array[] int delay_max,
#>   vector delay_np_pmf, array[] int delay_np_pmf_groups,
#>   array[] real delay_mean, array[] real delay_sigma, array[] int delay_dist,
#>   int left_truncate, int reverse_pmf, int cumulative
#> ) {
#>   // loop over delays
#>   vector[len] pmf = rep_vector(0, len);
#>   int current_len = 1;
#>   int new_len;
#>   for (i in delay_types_groups[delay_id]:(delay_types_groups[delay_id + 1] - 1)) {
#>     if (delay_types_p[i]) { // parametric
#>       vector[delay_max[delay_types_id[i]]] new_variable_pmf =
#>         discretised_pmf(
#>           delay_mean[delay_types_id[i]],
#>           delay_sigma[delay_types_id[i]],
#>           delay_max[delay_types_id[i]],
#>           delay_dist[delay_types_id[i]]
#>       );
#>       new_len = current_len + delay_max[delay_types_id[i]] - 1;
#>       if (current_len == 1) { // first delay
#>         pmf[1:new_len] = new_variable_pmf;
#>       } else { // subsequent delay to be convolved
#>         pmf[1:new_len] = convolve_with_rev_pmf(
#>           pmf[1:current_len], reverse_mf(new_variable_pmf), new_len
#>         );
#>       }
#>     } else { // nonparametric
#>       int start = delay_np_pmf_groups[delay_types_id[i]];
#>       int end = delay_np_pmf_groups[delay_types_id[i] + 1] - 1;
#>       new_len = current_len + end - start;
#>       if (current_len == 1) { // first delay
#>         pmf[1:new_len] = delay_np_pmf[start:end];
#>       } else { // subsequent delay to be convolved
#>         pmf[1:new_len] = convolve_with_rev_pmf(
#>           pmf[1:current_len], reverse_mf(delay_np_pmf[start:end]), new_len
#>         );
#>       }
#>     }
#>     current_len = new_len;
#>   }
#>   if (left_truncate) {
#>     pmf = append_row(
#>       rep_vector(0, left_truncate),
#>       pmf[(left_truncate + 1):len] / sum(pmf[(left_truncate + 1):len])
#>     );
#>   }
#>   if (cumulative) {
#>     pmf = cumulative_sum(pmf);
#>   }
#>   if (reverse_pmf) {
#>     pmf = reverse_mf(pmf);
#>   }
#>   return pmf;
#> }
#> void delays_lp(array[] real delay_mean, array[] real delay_mean_mean, array[] real delay_mean_sd,
#>                array[] real delay_sd, array[] real delay_sd_mean, array[] real delay_sd_sd,
#>                array[] int delay_dist, array[] int weight) {
#>     int mean_delays = num_elements(delay_mean);
#>     int sd_delays = num_elements(delay_sd);
#>     if (mean_delays) {
#>       for (s in 1:mean_delays) {
#>         if (delay_mean_sd[s] > 0) {
#>           // uncertain mean
#>           target += normal_lpdf(delay_mean[s] | delay_mean_mean[s], delay_mean_sd[s]) * weight[s];
#>           // if a distribution with postive support only truncate the prior
#>           if (delay_dist[s]) {
#>             target += -normal_lccdf(0 | delay_mean_mean[s], delay_mean_sd[s]) * weight[s];
#>           }
#>         }
#>       }
#>     }
#>     if (sd_delays) {
#>       for (s in 1:sd_delays) {
#>         if (delay_sd_sd[s] > 0) {
#>           // uncertain sd
#>           target += normal_lpdf(delay_sd[s] | delay_sd_mean[s], delay_sd_sd[s]) * weight[s];
#>           target += -normal_lccdf(0 | delay_sd_mean[s], delay_sd_sd[s]) * weight[s];
#>         }
#>      }
#>   }
#> }
#> // eigenvalues for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> real lambda(real L, int m) {
#>   real lam;
#>   lam = ((m*pi())/(2*L))^2;
#>   return lam;
#> }
#> // eigenfunction for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> vector phi(real L, int m, vector x) {
#>   vector[rows(x)] fi;
#>   fi = 1/sqrt(L) * sin(m*pi()/(2*L) * (x+L));
#>   return fi;
#> }
#> // spectral density of the exponential quadratic kernal
#> real spd_se(real alpha, real rho, real w) {
#>   real S;
#>   S = (alpha^2) * sqrt(2*pi()) * rho * exp(-0.5*(rho^2)*(w^2));
#>   return S;
#> }
#> // spectral density of the Matern 3/2 kernel
#> real spd_matern(real alpha, real rho, real w) {
#>   real S;
#>   S = 4*alpha^2 * (sqrt(3)/rho)^3 * 1/((sqrt(3)/rho)^2 + w^2)^2;
#>   return S;
#> }
#> // setup gaussian process noise dimensions
#> int setup_noise(int ot_h, int t, int horizon, int estimate_r,
#>                 int stationary, int future_fixed, int fixed_from) {
#>   int noise_time = estimate_r > 0 ? (stationary > 0 ? ot_h : ot_h - 1) : t;
#>   int noise_terms =  future_fixed > 0 ? (noise_time - horizon + fixed_from) : noise_time;
#>   return(noise_terms);
#> }
#> // setup approximate gaussian process
#> matrix setup_gp(int M, real L, int dimension) {
#>   vector[dimension] time;
#>   matrix[dimension, M] PHI;
#>   real half_dim = dimension / 2.0;
#>   for (s in 1:dimension) {
#>     time[s] = (s - half_dim) / half_dim;
#>   }
#>   for (m in 1:M){
#>     PHI[,m] = phi(L, m, time);
#>   }
#>   return(PHI);
#> }
#> // update gaussian process using spectral densities
#> vector update_gp(matrix PHI, int M, real L, real alpha,
#>                  real rho, vector eta, int type) {
#>   vector[M] diagSPD;    // spectral density
#>   vector[M] SPD_eta;    // spectral density * noise
#>   int noise_terms = rows(PHI);
#>   vector[noise_terms] noise = rep_vector(1e-6, noise_terms);
#>   real unit_rho = rho / noise_terms;
#>   // GP in noise - spectral densities
#>   if (type == 0) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_se(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }else if (type == 1) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_matern(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }
#>   SPD_eta = diagSPD .* eta;
#>   noise = noise + PHI[,] * SPD_eta;
#>   return(noise);
#> }
#> // priors for gaussian process
#> void gaussian_process_lp(real rho, real alpha, vector eta,
#>                          real ls_meanlog, real ls_sdlog,
#>                          real ls_min, real ls_max, real alpha_sd) {
#>   if (ls_sdlog > 0) {
#>     rho ~ lognormal(ls_meanlog, ls_sdlog) T[ls_min, ls_max];
#>   } else {
#>     rho ~ inv_gamma(1.499007, 0.057277 * ls_max) T[ls_min, ls_max];
#>   }
#>   alpha ~ normal(0, alpha_sd);
#>   eta ~ std_normal();
#> }
#> // update a vector of Rts
#> vector update_Rt(int t, real log_R, vector noise, array[] int bps,
#>                  array[] real bp_effects, int stationary) {
#>   // define control parameters
#>   int bp_n = num_elements(bp_effects);
#>   int bp_c = 0;
#>   int gp_n = num_elements(noise);
#>   // define result vectors
#>   vector[t] bp = rep_vector(0, t);
#>   vector[t] gp = rep_vector(0, t);
#>   vector[t] R;
#>   // initialise breakpoints
#>   if (bp_n) {
#>     for (s in 1:t) {
#>       if (bps[s]) {
#>         bp_c += bps[s];
#>         bp[s] = bp_effects[bp_c];
#>       }
#>     }
#>     bp = cumulative_sum(bp);
#>   }
#>   //initialise gaussian process
#>   if (gp_n) {
#>     if (stationary) {
#>       gp[1:gp_n] = noise;
#>       // fix future gp based on last estimated
#>       if (t > gp_n) {
#>         gp[(gp_n + 1):t] = rep_vector(noise[gp_n], t - gp_n);
#>       }
#>     }else{
#>       gp[2:(gp_n + 1)] = noise;
#>       gp = cumulative_sum(gp);
#>     }
#>   }
#>   // Calculate Rt
#>   R = rep_vector(log_R, t) + bp + gp;
#>   R = exp(R);
#>   return(R);
#> }
#> // Rt priors
#> void rt_lp(vector log_R, array[] real initial_infections, array[] real initial_growth,
#>            array[] real bp_effects, array[] real bp_sd, int bp_n, int seeding_time,
#>            real r_logmean, real r_logsd, real prior_infections,
#>            real prior_growth) {
#>   // prior on R
#>   log_R ~ normal(r_logmean, r_logsd);
#>   //breakpoint effects on Rt
#>   if (bp_n > 0) {
#>     bp_sd[1] ~ normal(0, 0.1) T[0,];
#>     bp_effects ~ normal(0, bp_sd[1]);
#>   }
#>   // initial infections
#>   initial_infections ~ normal(prior_infections, 0.2);
#>   if (seeding_time > 1) {
#>     initial_growth ~ normal(prior_growth, 0.2);
#>   }
#> }
#> // calculate infectiousness (weighted sum of the generation time and infections)
#> // for a single time point
#> real update_infectiousness(vector infections, vector gt_rev_pmf,
#>                            int seeding_time, int index){
#>   int gt_max = num_elements(gt_rev_pmf);
#>   // work out where to start the convolution of past infections with the
#>   // generation time distribution: (current_time - maximal generation time) if
#>   // that is >= 1, otherwise 1
#>   int inf_start = max(1, (index + seeding_time - gt_max + 1));
#>   // work out where to end the convolution: current_time
#>   int inf_end = (index + seeding_time);
#>   // number of indices of the generation time to sum over (inf_end - inf_start + 1)
#>   int pmf_accessed = min(gt_max, index + seeding_time);
#>   // calculate the elements of the convolution
#>   real new_inf = dot_product(
#>     infections[inf_start:inf_end], tail(gt_rev_pmf, pmf_accessed)
#>   );
#>   return(new_inf);
#> }
#> // generate infections by using Rt = Rt-1 * sum(reversed generation time pmf * infections)
#> vector generate_infections(vector oR, int uot, vector gt_rev_pmf,
#>                            array[] real initial_infections, array[] real initial_growth,
#>                            int pop, int ht) {
#>   // time indices and storage
#>   int ot = num_elements(oR);
#>   int nht = ot - ht;
#>   int t = ot + uot;
#>   vector[ot] R = oR;
#>   real exp_adj_Rt;
#>   vector[t] infections = rep_vector(0, t);
#>   vector[ot] cum_infections;
#>   vector[ot] infectiousness;
#>   // Initialise infections using daily growth
#>   infections[1] = exp(initial_infections[1]);
#>   if (uot > 1) {
#>     real growth = exp(initial_growth[1]);
#>     for (s in 2:uot) {
#>       infections[s] = infections[s - 1] * growth;
#>     }
#>   }
#>   // calculate cumulative infections
#>   if (pop) {
#>     cum_infections[1] = sum(infections[1:uot]);
#>   }
#>   // iteratively update infections
#>   for (s in 1:ot) {
#>     infectiousness[s] = update_infectiousness(infections, gt_rev_pmf, uot, s);
#>     if (pop && s > nht) {
#>       exp_adj_Rt = exp(-R[s] * infectiousness[s] / (pop - cum_infections[nht]));
#>       exp_adj_Rt = exp_adj_Rt > 1 ? 1 : exp_adj_Rt;
#>       infections[s + uot] = (pop - cum_infections[s]) * (1 - exp_adj_Rt);
#>     }else{
#>       infections[s + uot] = R[s] * infectiousness[s];
#>     }
#>     if (pop && s < ot) {
#>       cum_infections[s + 1] = cum_infections[s] + infections[s + uot];
#>     }
#>   }
#>   return(infections);
#> }
#> // backcalculate infections using mean shifted cases and non-parametric noise
#> vector deconvolve_infections(vector shifted_cases, vector noise, int fixed,
#>                              int prior) {
#>   int t = num_elements(shifted_cases);
#>   vector[t] infections = rep_vector(1e-5, t);
#>   if(!fixed) {
#>     vector[t] exp_noise = exp(noise);
#>     if (prior == 1) {
#>       infections = infections + shifted_cases .* exp_noise;
#>     }else if (prior == 0) {
#>      infections = infections + exp_noise;
#>     }else if (prior == 2) {
#>       infections[1] = infections[1] + shifted_cases[1] * exp_noise[1];
#>       for (i in 2:t) {
#>         infections[i] = infections[i - 1] * exp_noise[i];
#>       }
#>     }
#>   }else{
#>     infections = infections + shifted_cases;
#>   }
#>   return(infections);
#> }
#> // apply day of the week effect
#> vector day_of_week_effect(vector reports, array[] int day_of_week, vector effect) {
#>   int t = num_elements(reports);
#>   int wl = num_elements(effect);
#>   // scale day of week effect
#>   vector[wl] scaled_effect = wl * effect;
#>   vector[t] scaled_reports;
#>   for (s in 1:t) {
#>     // add reporting effects (adjust for simplex scale)
#>     scaled_reports[s] = reports[s] * scaled_effect[day_of_week[s]];
#>    }
#>   return(scaled_reports);
#> }
#> // Scale observations by fraction reported and update log density of
#> // fraction reported
#> vector scale_obs(vector reports, real frac_obs) {
#>   int t = num_elements(reports);
#>   vector[t] scaled_reports;
#>   scaled_reports = reports * frac_obs;
#>   return(scaled_reports);
#> }
#> // Truncate observed data by some truncation distribution
#> vector truncate(vector reports, vector trunc_rev_cmf, int reconstruct) {
#>   int t = num_elements(reports);
#>   vector[t] trunc_reports = reports;
#>   // Calculate cmf of truncation delay
#>   int trunc_max = min(t, num_elements(trunc_rev_cmf));
#>   int first_t = t - trunc_max + 1;
#>   // Apply cdf of truncation delay to truncation max last entries in reports
#>   if (reconstruct) {
#>     trunc_reports[first_t:t] ./= trunc_rev_cmf[1:trunc_max];
#>   } else {
#>     trunc_reports[first_t:t] .*= trunc_rev_cmf[1:trunc_max];
#>   }
#>   return(trunc_reports);
#> }
#> // Truncation distribution priors
#> void truncation_lp(array[] real truncation_mean, array[] real truncation_sd,
#>                    array[] real trunc_mean_mean, array[] real trunc_mean_sd,
#>                    array[] real trunc_sd_mean, array[] real trunc_sd_sd) {
#>   int truncation = num_elements(truncation_mean);
#>   if (truncation) {
#>     if (trunc_mean_sd[1] > 0) {
#>       // uncertain mean
#>       truncation_mean ~ normal(trunc_mean_mean, trunc_mean_sd);
#>     }
#>     if (trunc_sd_sd[1] > 0) {
#>       // uncertain sd
#>       truncation_sd ~ normal(trunc_sd_mean, trunc_sd_sd);
#>     }
#>   }
#> }
#> // update log density for reported cases
#> void report_lp(array[] int cases, vector reports,
#>                array[] real rep_phi, real phi_mean, real phi_sd,
#>                int model_type, real weight) {
#>   if (model_type) {
#>     real sqrt_phi; // the reciprocal overdispersion parameter (phi)
#>     rep_phi[model_type] ~ normal(phi_mean, phi_sd) T[0,];
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>     if (weight == 1) {
#>       cases ~ neg_binomial_2(reports, sqrt_phi);
#>     } else {
#>       target += neg_binomial_2_lpmf(cases | reports, sqrt_phi) * weight;
#>     }
#>   } else {
#>     if (weight == 1) {
#>       cases ~ poisson(reports);
#>     } else {
#>       target += poisson_lpmf(cases | reports) * weight;
#>     }
#>   }
#> }
#> // update log likelihood (as above but not vectorised and returning log likelihood)
#> vector report_log_lik(array[] int cases, vector reports,
#>                       array[] real rep_phi, int model_type, real weight) {
#>   int t = num_elements(reports);
#>   vector[t] log_lik;
#>   // defer to poisson if phi is large, to avoid overflow
#>   if (model_type == 0) {
#>     for (i in 1:t) {
#>       log_lik[i] = poisson_lpmf(cases[i] | reports[i]) * weight;
#>     }
#>   } else {
#>     real sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>     for (i in 1:t) {
#>       log_lik[i] = neg_binomial_2_lpmf(cases[i] | reports[i], sqrt_phi) * weight;
#>     }
#>   }
#>   return(log_lik);
#> }
#> // sample reported cases from the observation model
#> array[] int report_rng(vector reports, array[] real rep_phi, int model_type) {
#>   int t = num_elements(reports);
#>   array[t] int sampled_reports;
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>   }
#> 
#>   for (s in 1:t) {
#>     if (reports[s] < 1e-8) {
#>       sampled_reports[s] = 0;
#>     } else {
#>       // defer to poisson if phi is large, to avoid overflow
#>       if (sqrt_phi > 1e4) {
#>         sampled_reports[s] = poisson_rng(reports[s] > 1e8 ? 1e8 : reports[s]);
#>       } else {
#>         sampled_reports[s] = neg_binomial_2_rng(reports[s] > 1e8 ? 1e8 : reports[s], sqrt_phi);
#>       }
#>     }
#>   }
#>   return(sampled_reports);
#> }
#> // calculate Rt directly from inferred infections
#> vector calculate_Rt(vector infections, int seeding_time,
#>                     vector gt_rev_pmf, int smooth) {
#>   int t = num_elements(infections);
#>   int ot = t - seeding_time;
#>   vector[ot] R;
#>   vector[ot] sR;
#>   vector[ot] infectiousness = rep_vector(1e-5, ot);
#>   // calculate Rt using Cori et al. approach
#>   for (s in 1:ot) {
#>     infectiousness[s] += update_infectiousness(
#>       infections, gt_rev_pmf, seeding_time, s
#>     );
#>     R[s] = infections[s + seeding_time] / infectiousness[s];
#>   }
#>   if (smooth) {
#>     for (s in 1:ot) {
#>       real window = 0;
#>       sR[s] = 0;
#>       for (i in max(1, s - smooth):min(ot, s + smooth)) {
#>         sR[s] += R[i];
#>         window += 1;
#>       }
#>       sR[s] = sR[s] / window;
#>     }
#>   }else{
#>     sR = R;
#>   }
#>   return(sR);
#> }
#> // Convert an estimate of Rt to growth
#> array[] real R_to_growth(vector R, real gt_mean, real gt_var) {
#>   int t = num_elements(R);
#>   array[t] real r;
#>   if (gt_var > 0) {
#>     real k = gt_var / pow(gt_mean, 2);
#>     for (s in 1:t) {
#>       r[s] = (pow(R[s], k) - 1) / (k * gt_mean);
#>     }
#>   } else {
#>     // limit as gt_sd -> 0
#>     for (s in 1:t) {
#>       r[s] = log(R[s]) / gt_mean;
#>     }
#>   }
#>   return(r);
#> }
#> }
#> data {
#>   int t;                                            // unobserved time
#>   int seeding_time;                                 // time period used for seeding and not observed
#>   int horizon;                                      // forecast horizon
#>   int future_time;                                  // time in future for Rt
#>   array[t - horizon - seeding_time] int<lower = 0> cases; // observed cases
#>   vector<lower = 0>[t] shifted_cases;               // prior infections (for backcalculation)
#>   int<lower = 0> delay_n;                     // number of delay distributions
#>   int<lower = 0> delay_n_p;                   // number of parametric delay distributions
#>   int<lower = 0> delay_n_np;                  // number of nonparametric delay distributions
#>   array[delay_n_p] real delay_mean_mean;            // prior mean of mean delay distribution
#>   array[delay_n_p] real<lower = 0> delay_mean_sd;   // prior sd of mean delay distribution
#>   array[delay_n_p] real<lower = 0> delay_sd_mean;   // prior sd of sd of delay distribution
#>   array[delay_n_p] real<lower = 0> delay_sd_sd;     // prior sd of sd of delay distribution
#>   array[delay_n_p] int<lower = 1> delay_max;        // maximum delay distribution
#>   array[delay_n_p] int<lower = 0> delay_dist;       // 0 = lognormal; 1 = gamma
#>   int<lower = 0> delay_np_pmf_max;            // number of nonparametric pmf elements
#>   vector<lower = 0, upper = 1>[delay_np_pmf_max] delay_np_pmf; // ragged array of fixed PMFs
#>   array[delay_n_np + 1] int<lower = 1> delay_np_pmf_groups;              // links to ragged array
#>   array[delay_n_p] int<lower = 0> delay_weight;
#>   int<lower = 0> delay_types;                     // number of delay types
#>   array[delay_n] int<lower = 0> delay_types_p;          // whether delay types are parametric
#>   array[delay_n] int<lower = 0> delay_types_id;          // whether delay types are parametric
#>   array[delay_types + 1] int<lower = 0> delay_types_groups; // index of each delay (parametric or non)
#>   real L;				                     // boundary value for infections gp
#>   int<lower=1> M;			               // basis functions for infections gp
#>   real ls_meanlog;                   // meanlog for gp lengthscale prior
#>   real ls_sdlog;                     // sdlog for gp lengthscale prior
#>   real<lower=0> ls_min;              // Lower bound for the lengthscale
#>   real<lower=0> ls_max;              // Upper bound for the lengthscale
#>   real alpha_sd;                     // standard deviation of the alpha gp kernal parameter
#>   int gp_type;                       // type of gp, 0 = squared exponential, 1 = 3/2 matern
#>   int stationary;                    // is underlying gaussian process first or second order
#>   int fixed;                         //  should a gaussian process be used
#>   int estimate_r;                    // should the reproduction no be estimated (1 = yes)
#>   real prior_infections;             // prior for initial infections
#>   real prior_growth;                 // prior on initial growth rate
#>   real <lower = 0> r_mean;           // prior mean of reproduction number
#>   real <lower = 0> r_sd;             // prior standard deviation of reproduction number
#>   int bp_n;                          // no of breakpoints (0 = no breakpoints)
#>   array[t - seeding_time] int breakpoints; // when do breakpoints occur
#>   int future_fixed;                  // is underlying future Rt assumed to be fixed
#>   int fixed_from;                    // Reference date for when Rt estimation should be fixed
#>   int pop;                           // Initial susceptible population
#>   int<lower = 0> gt_id;              // id of generation time
#>   int backcalc_prior;                // Prior type to use for backcalculation
#>   int rt_half_window;                // Half the moving average window used when calculating Rt
#>   array[t - seeding_time] int day_of_week; // day of the week indicator (1 - 7)
#>   int model_type;                    // type of model: 0 = poisson otherwise negative binomial
#>   real phi_mean;                     // Mean and sd of the normal prior for the
#>   real phi_sd;                       // reporting process
#>   int week_effect;                   // length of week effect
#>   int obs_scale;                     // logical controlling scaling of observations
#>   real obs_scale_mean;               // mean scaling factor for observations
#>   real obs_scale_sd;                 // standard deviation of observation scaling
#>   real obs_weight;                   // weight given to observation in log density
#>   int likelihood;                    // Should the likelihood be included in the model
#>   int return_likelihood; // Should the likehood be returned by the model
#>   int<lower = 0> trunc_id; // id of truncation
#>   int<lower = 0> delay_id; // id of delay
#> }
#> transformed data{
#>   // observations
#>   int ot = t - seeding_time - horizon;  // observed time
#>   int ot_h = ot + horizon;  // observed time + forecast horizon
#>   // gaussian process
#>   int noise_terms = setup_noise(ot_h, t, horizon, estimate_r, stationary, future_fixed, fixed_from);
#>   matrix[noise_terms, M] PHI = setup_gp(M, L, noise_terms);  // basis function
#>   // Rt
#>   real r_logmean = log(r_mean^2 / sqrt(r_sd^2 + r_mean^2));
#>   real r_logsd = sqrt(log(1 + (r_sd^2 / r_mean^2)));
#>   array[delay_types] int delay_type_max = get_delay_type_max(
#>     delay_types, delay_types_p, delay_types_id,
#>     delay_types_groups, delay_max, delay_np_pmf_groups
#>   );
#> }
#> parameters{
#>   // gaussian process
#>   array[fixed ? 0 : 1] real<lower = ls_min,upper=ls_max> rho;  // length scale of noise GP
#>   array[fixed ? 0 : 1] real<lower = 0> alpha;    // scale of of noise GP
#>   vector[fixed ? 0 : M] eta;               // unconstrained noise
#>   // Rt
#>   vector[estimate_r] log_R;                // baseline reproduction number estimate (log)
#>   array[estimate_r] real initial_infections ;    // seed infections
#>   array[estimate_r && seeding_time > 1 ? 1 : 0] real initial_growth; // seed growth rate
#>   array[bp_n > 0 ? 1 : 0] real<lower = 0> bp_sd; // standard deviation of breakpoint effect
#>   array[bp_n] real bp_effects;                   // Rt breakpoint effects
#>   // observation model
#>   array[delay_n_p] real delay_mean;         // mean of delays
#>   array[delay_n_p] real<lower = 0> delay_sd;  // sd of delays
#>   simplex[week_effect] day_of_week_simplex;// day of week reporting effect
#>   array[obs_scale] real<lower = 0, upper = 1> frac_obs;     // fraction of cases that are ultimately observed
#>   array[model_type] real<lower = 0> rep_phi;     // overdispersion of the reporting process
#> }
#> transformed parameters {
#>   vector[fixed ? 0 : noise_terms] noise;                    // noise  generated by the gaussian process
#>   vector<lower = 0, upper = 10 * r_mean>[estimate_r > 0 ? ot_h : 0] R; // reproduction number
#>   vector[t] infections;                                     // latent infections
#>   vector[ot_h] reports;                                     // estimated reported cases
#>   vector[ot] obs_reports;                                   // observed estimated reported cases
#>   vector[delay_type_max[gt_id]] gt_rev_pmf;
#>   // GP in noise - spectral densities
#>   if (!fixed) {
#>     noise = update_gp(PHI, M, L, alpha[1], rho[1], eta, gp_type);
#>   }
#>   // Estimate latent infections
#>   if (estimate_r) {
#>     gt_rev_pmf = get_delay_rev_pmf(
#>       gt_id, delay_type_max[gt_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean, delay_sd, delay_dist,
#>       1, 1, 0
#>     );
#>     R = update_Rt(
#>       ot_h, log_R[estimate_r], noise, breakpoints, bp_effects, stationary
#>     );
#>     infections = generate_infections(
#>       R, seeding_time, gt_rev_pmf, initial_infections, initial_growth, pop,
#>       future_time
#>     );
#>   } else {
#>     // via deconvolution
#>     infections = deconvolve_infections(
#>       shifted_cases, noise, fixed, backcalc_prior
#>     );
#>   }
#>   // convolve from latent infections to mean of observations
#>   if (delay_id) {
#>     vector[delay_type_max[delay_id]] delay_rev_pmf = get_delay_rev_pmf(
#>       delay_id, delay_type_max[delay_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean, delay_sd, delay_dist,
#>       0, 1, 0
#>     );
#>     reports = convolve_to_report(infections, delay_rev_pmf, seeding_time);
#>   } else {
#>     reports = infections[(seeding_time + 1):t];
#>   }
#>   // weekly reporting effect
#>   if (week_effect > 1) {
#>    reports = day_of_week_effect(reports, day_of_week, day_of_week_simplex);
#>   }
#>   // scaling of reported cases by fraction observed
#>  if (obs_scale) {
#>    reports = scale_obs(reports, frac_obs[1]);
#>  }
#>  // truncate near time cases to observed reports
#>  if (trunc_id) {
#>     vector[delay_type_max[trunc_id]] trunc_rev_cmf = get_delay_rev_pmf(
#>       trunc_id, delay_type_max[trunc_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean, delay_sd, delay_dist,
#>       0, 1, 1
#>     );
#>     obs_reports = truncate(reports[1:ot], trunc_rev_cmf, 0);
#>  } else {
#>    obs_reports = reports[1:ot];
#>  }
#> }
#> model {
#>   // priors for noise GP
#>   if (!fixed) {
#>     gaussian_process_lp(
#>       rho[1], alpha[1], eta, ls_meanlog, ls_sdlog, ls_min, ls_max, alpha_sd
#>     );
#>   }
#>   // penalised priors for delay distributions
#>   delays_lp(
#>     delay_mean, delay_mean_mean,
#>     delay_mean_sd, delay_sd, delay_sd_mean, delay_sd_sd,
#>     delay_dist, delay_weight
#>   );
#>   if (estimate_r) {
#>     // priors on Rt
#>     rt_lp(
#>       log_R, initial_infections, initial_growth, bp_effects, bp_sd, bp_n,
#>       seeding_time, r_logmean, r_logsd, prior_infections, prior_growth
#>     );
#>   }
#>   // prior observation scaling
#>   if (obs_scale) {
#>     frac_obs[1] ~ normal(obs_scale_mean, obs_scale_sd) T[0, 1];
#>   }
#>   // observed reports from mean of reports (update likelihood)
#>   if (likelihood) {
#>     report_lp(
#>       cases, obs_reports, rep_phi, phi_mean, phi_sd, model_type, obs_weight
#>     );
#>   }
#> }
#> generated quantities {
#>   array[ot_h] int imputed_reports;
#>   vector[estimate_r > 0 ? 0: ot_h] gen_R;
#>   array[ot_h] real r;
#>   real gt_mean;
#>   real gt_var;
#>   vector[return_likelihood ? ot : 0] log_lik;
#>   if (estimate_r){
#>     // estimate growth from estimated Rt
#>     gt_mean = rev_pmf_mean(gt_rev_pmf, 1);
#>     gt_var = rev_pmf_var(gt_rev_pmf, 1, gt_mean);
#>     r = R_to_growth(R, gt_mean, gt_var);
#>   } else {
#>     // sample generation time
#>     array[delay_n_p] real delay_mean_sample =
#>       normal_rng(delay_mean_mean, delay_mean_sd);
#>     array[delay_n_p] real delay_sd_sample =
#>       normal_rng(delay_sd_mean, delay_sd_sd);
#>     vector[delay_type_max[gt_id]] sampled_gt_rev_pmf = get_delay_rev_pmf(
#>       gt_id, delay_type_max[gt_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean_sample, delay_sd_sample,
#>       delay_dist, 1, 1, 0
#>     );
#>     gt_mean = rev_pmf_mean(sampled_gt_rev_pmf, 1);
#>     gt_var = rev_pmf_var(sampled_gt_rev_pmf, 1, gt_mean);
#>     // calculate Rt using infections and generation time
#>     gen_R = calculate_Rt(
#>       infections, seeding_time, sampled_gt_rev_pmf, rt_half_window
#>     );
#>     // estimate growth from calculated Rt
#>     r = R_to_growth(gen_R, gt_mean, gt_var);
#>   }
#>   // simulate reported cases
#>   imputed_reports = report_rng(reports, rep_phi, model_type);
#>   // log likelihood of model
#>   if (return_likelihood) {
#>     log_lik = report_log_lik(
#>       cases, obs_reports, rep_phi, model_type, obs_weight
#>     );
#>   }
#> } 
#> 
#> $method
#> [1] "sampling"
#> 
#> $cores
#> [1] 1
#> 
#> $warmup
#> [1] 250
#> 
#> $chains
#> [1] 4
#> 
#> $save_warmup
#> [1] FALSE
#> 
#> $seed
#> [1] 35453657
#> 
#> $future
#> [1] FALSE
#> 
#> $max_execution_time
#> [1] Inf
#> 
#> $control
#> $control$adapt_delta
#> [1] 0.95
#> 
#> $control$max_treedepth
#> [1] 15
#> 
#> 
#> $iter
#> [1] 750
#> 

# increasing warmup
create_stan_args(stan = stan_opts(warmup = 1000))
#> $data
#> NULL
#> 
#> $init
#> [1] "random"
#> 
#> $refresh
#> [1] 0
#> 
#> $object
#> S4 class stanmodel 'estimate_infections' coded as follows:
#> functions {
#> // convolve two vectors as a backwards dot product
#> // y vector should be reversed
#> // limited to the length of x and backwards looking for x indexes
#> vector convolve_with_rev_pmf(vector x, vector y, int len) {
#>     int xlen = num_elements(x);
#>     int ylen = num_elements(y);
#>     vector[len] z;
#>     if (xlen + ylen <= len) {
#>       reject("convolve_with_rev_pmf: len is longer then x and y combined");
#>     }
#>     for (s in 1:len) {
#>       z[s] = dot_product(
#>         x[max(1, (s - ylen + 1)):min(s, xlen)],
#>         y[max(1, ylen - s + 1):min(ylen, ylen + xlen - s)]
#>       );
#>     }
#>    return(z);
#>   }
#> // convolve latent infections to reported (but still unobserved) cases
#> vector convolve_to_report(vector infections,
#>                           vector delay_rev_pmf,
#>                           int seeding_time) {
#>   int t = num_elements(infections);
#>   vector[t - seeding_time] reports;
#>   vector[t] unobs_reports = infections;
#>   int delays = num_elements(delay_rev_pmf);
#>   if (delays) {
#>     unobs_reports = convolve_with_rev_pmf(unobs_reports, delay_rev_pmf, t);
#>     reports = unobs_reports[(seeding_time + 1):t];
#>   } else {
#>     reports = infections[(seeding_time + 1):t];
#>   }
#>   return(reports);
#> }
#> // Calculate the daily probability of reporting using parametric
#> // distributions up to the maximum observed delay.
#> // If sigma is 0 all the probability mass is put on n.
#> // Adapted from https://github.com/epiforecasts/epinowcast
#> // @author Sam Abbott
#> // @author Adrian Lison
#> vector discretised_pmf(real mu, real sigma, int n, int dist) {
#>   vector[n] lpmf;
#>   if (sigma > 0) {
#>     vector[n] upper_lcdf;
#>     if (dist == 0) {
#>       for (i in 1:n) {
#>         upper_lcdf[i] = lognormal_lcdf(i | mu, sigma);
#>       }
#>     } else if (dist == 1) {
#>       real alpha = mu^2 / sigma^2;
#>       real beta = mu / sigma^2;
#>       for (i in 1:n) {
#>         upper_lcdf[i] = gamma_lcdf(i | alpha, beta);
#>       }
#>     } else {
#>       reject("Unknown distribution function provided.");
#>     }
#>     // discretise
#>     lpmf[1] = upper_lcdf[1];
#>     lpmf[2:n] = log_diff_exp(upper_lcdf[2:n], upper_lcdf[1:(n-1)]);
#>     // normalize
#>     lpmf = lpmf - upper_lcdf[n];
#>   } else {
#>     // delta function
#>     lpmf = rep_vector(negative_infinity(), n);
#>     lpmf[n] = 0;
#>   }
#>   return(exp(lpmf));
#> }
#> // reverse a mf
#> vector reverse_mf(vector pmf) {
#>   int max_pmf = num_elements(pmf);
#>   vector[max_pmf] rev_pmf;
#>   for (d in 1:max_pmf) {
#>     rev_pmf[d] = pmf[max_pmf - d + 1];
#>   }
#>   return rev_pmf;
#> }
#> vector rev_seq(int base, int len) {
#>   vector[len] seq;
#>   for (i in 1:len) {
#>     seq[i] = len + base - i;
#>   }
#>   return(seq);
#> }
#> real rev_pmf_mean(vector rev_pmf, int base) {
#>   int len = num_elements(rev_pmf);
#>   vector[len] rev_pmf_seq = rev_seq(base, len);
#>   return(dot_product(rev_pmf_seq, rev_pmf));
#> }
#> real rev_pmf_var(vector rev_pmf, int base, real mean) {
#>   int len = num_elements(rev_pmf);
#>   vector[len] rev_pmf_seq = rev_seq(base, len);
#>   for (i in 1:len) {
#>     rev_pmf_seq[i] = pow(rev_pmf_seq[i], 2);
#>   }
#>   return(dot_product(rev_pmf_seq, rev_pmf) - pow(mean, 2));
#> }
#> array[] int get_delay_type_max(
#>   int delay_types, array[] int delay_types_p, array[] int delay_types_id,
#>   array[] int delay_types_groups, array[] int delay_max, array[] int delay_np_pmf_groups
#> ) {
#>   array[delay_types] int ret;
#>   for (i in 1:delay_types) {
#>     ret[i] = 1;
#>     for (j in delay_types_groups[i]:(delay_types_groups[i + 1] - 1)) {
#>       if (delay_types_p[j]) { // parametric
#>         ret[i] += delay_max[delay_types_id[j]] - 1;
#>       } else { // nonparametric
#>         ret[i] += delay_np_pmf_groups[delay_types_id[j] + 1] -
#>           delay_np_pmf_groups[delay_types_id[j]] - 1;
#>       }
#>     }
#>   }
#>   return ret;
#> }
#> vector get_delay_rev_pmf(
#>   int delay_id, int len, array[] int delay_types_p, array[] int delay_types_id,
#>   array[] int delay_types_groups, array[] int delay_max,
#>   vector delay_np_pmf, array[] int delay_np_pmf_groups,
#>   array[] real delay_mean, array[] real delay_sigma, array[] int delay_dist,
#>   int left_truncate, int reverse_pmf, int cumulative
#> ) {
#>   // loop over delays
#>   vector[len] pmf = rep_vector(0, len);
#>   int current_len = 1;
#>   int new_len;
#>   for (i in delay_types_groups[delay_id]:(delay_types_groups[delay_id + 1] - 1)) {
#>     if (delay_types_p[i]) { // parametric
#>       vector[delay_max[delay_types_id[i]]] new_variable_pmf =
#>         discretised_pmf(
#>           delay_mean[delay_types_id[i]],
#>           delay_sigma[delay_types_id[i]],
#>           delay_max[delay_types_id[i]],
#>           delay_dist[delay_types_id[i]]
#>       );
#>       new_len = current_len + delay_max[delay_types_id[i]] - 1;
#>       if (current_len == 1) { // first delay
#>         pmf[1:new_len] = new_variable_pmf;
#>       } else { // subsequent delay to be convolved
#>         pmf[1:new_len] = convolve_with_rev_pmf(
#>           pmf[1:current_len], reverse_mf(new_variable_pmf), new_len
#>         );
#>       }
#>     } else { // nonparametric
#>       int start = delay_np_pmf_groups[delay_types_id[i]];
#>       int end = delay_np_pmf_groups[delay_types_id[i] + 1] - 1;
#>       new_len = current_len + end - start;
#>       if (current_len == 1) { // first delay
#>         pmf[1:new_len] = delay_np_pmf[start:end];
#>       } else { // subsequent delay to be convolved
#>         pmf[1:new_len] = convolve_with_rev_pmf(
#>           pmf[1:current_len], reverse_mf(delay_np_pmf[start:end]), new_len
#>         );
#>       }
#>     }
#>     current_len = new_len;
#>   }
#>   if (left_truncate) {
#>     pmf = append_row(
#>       rep_vector(0, left_truncate),
#>       pmf[(left_truncate + 1):len] / sum(pmf[(left_truncate + 1):len])
#>     );
#>   }
#>   if (cumulative) {
#>     pmf = cumulative_sum(pmf);
#>   }
#>   if (reverse_pmf) {
#>     pmf = reverse_mf(pmf);
#>   }
#>   return pmf;
#> }
#> void delays_lp(array[] real delay_mean, array[] real delay_mean_mean, array[] real delay_mean_sd,
#>                array[] real delay_sd, array[] real delay_sd_mean, array[] real delay_sd_sd,
#>                array[] int delay_dist, array[] int weight) {
#>     int mean_delays = num_elements(delay_mean);
#>     int sd_delays = num_elements(delay_sd);
#>     if (mean_delays) {
#>       for (s in 1:mean_delays) {
#>         if (delay_mean_sd[s] > 0) {
#>           // uncertain mean
#>           target += normal_lpdf(delay_mean[s] | delay_mean_mean[s], delay_mean_sd[s]) * weight[s];
#>           // if a distribution with postive support only truncate the prior
#>           if (delay_dist[s]) {
#>             target += -normal_lccdf(0 | delay_mean_mean[s], delay_mean_sd[s]) * weight[s];
#>           }
#>         }
#>       }
#>     }
#>     if (sd_delays) {
#>       for (s in 1:sd_delays) {
#>         if (delay_sd_sd[s] > 0) {
#>           // uncertain sd
#>           target += normal_lpdf(delay_sd[s] | delay_sd_mean[s], delay_sd_sd[s]) * weight[s];
#>           target += -normal_lccdf(0 | delay_sd_mean[s], delay_sd_sd[s]) * weight[s];
#>         }
#>      }
#>   }
#> }
#> // eigenvalues for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> real lambda(real L, int m) {
#>   real lam;
#>   lam = ((m*pi())/(2*L))^2;
#>   return lam;
#> }
#> // eigenfunction for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> vector phi(real L, int m, vector x) {
#>   vector[rows(x)] fi;
#>   fi = 1/sqrt(L) * sin(m*pi()/(2*L) * (x+L));
#>   return fi;
#> }
#> // spectral density of the exponential quadratic kernal
#> real spd_se(real alpha, real rho, real w) {
#>   real S;
#>   S = (alpha^2) * sqrt(2*pi()) * rho * exp(-0.5*(rho^2)*(w^2));
#>   return S;
#> }
#> // spectral density of the Matern 3/2 kernel
#> real spd_matern(real alpha, real rho, real w) {
#>   real S;
#>   S = 4*alpha^2 * (sqrt(3)/rho)^3 * 1/((sqrt(3)/rho)^2 + w^2)^2;
#>   return S;
#> }
#> // setup gaussian process noise dimensions
#> int setup_noise(int ot_h, int t, int horizon, int estimate_r,
#>                 int stationary, int future_fixed, int fixed_from) {
#>   int noise_time = estimate_r > 0 ? (stationary > 0 ? ot_h : ot_h - 1) : t;
#>   int noise_terms =  future_fixed > 0 ? (noise_time - horizon + fixed_from) : noise_time;
#>   return(noise_terms);
#> }
#> // setup approximate gaussian process
#> matrix setup_gp(int M, real L, int dimension) {
#>   vector[dimension] time;
#>   matrix[dimension, M] PHI;
#>   real half_dim = dimension / 2.0;
#>   for (s in 1:dimension) {
#>     time[s] = (s - half_dim) / half_dim;
#>   }
#>   for (m in 1:M){
#>     PHI[,m] = phi(L, m, time);
#>   }
#>   return(PHI);
#> }
#> // update gaussian process using spectral densities
#> vector update_gp(matrix PHI, int M, real L, real alpha,
#>                  real rho, vector eta, int type) {
#>   vector[M] diagSPD;    // spectral density
#>   vector[M] SPD_eta;    // spectral density * noise
#>   int noise_terms = rows(PHI);
#>   vector[noise_terms] noise = rep_vector(1e-6, noise_terms);
#>   real unit_rho = rho / noise_terms;
#>   // GP in noise - spectral densities
#>   if (type == 0) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_se(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }else if (type == 1) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_matern(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }
#>   SPD_eta = diagSPD .* eta;
#>   noise = noise + PHI[,] * SPD_eta;
#>   return(noise);
#> }
#> // priors for gaussian process
#> void gaussian_process_lp(real rho, real alpha, vector eta,
#>                          real ls_meanlog, real ls_sdlog,
#>                          real ls_min, real ls_max, real alpha_sd) {
#>   if (ls_sdlog > 0) {
#>     rho ~ lognormal(ls_meanlog, ls_sdlog) T[ls_min, ls_max];
#>   } else {
#>     rho ~ inv_gamma(1.499007, 0.057277 * ls_max) T[ls_min, ls_max];
#>   }
#>   alpha ~ normal(0, alpha_sd);
#>   eta ~ std_normal();
#> }
#> // update a vector of Rts
#> vector update_Rt(int t, real log_R, vector noise, array[] int bps,
#>                  array[] real bp_effects, int stationary) {
#>   // define control parameters
#>   int bp_n = num_elements(bp_effects);
#>   int bp_c = 0;
#>   int gp_n = num_elements(noise);
#>   // define result vectors
#>   vector[t] bp = rep_vector(0, t);
#>   vector[t] gp = rep_vector(0, t);
#>   vector[t] R;
#>   // initialise breakpoints
#>   if (bp_n) {
#>     for (s in 1:t) {
#>       if (bps[s]) {
#>         bp_c += bps[s];
#>         bp[s] = bp_effects[bp_c];
#>       }
#>     }
#>     bp = cumulative_sum(bp);
#>   }
#>   //initialise gaussian process
#>   if (gp_n) {
#>     if (stationary) {
#>       gp[1:gp_n] = noise;
#>       // fix future gp based on last estimated
#>       if (t > gp_n) {
#>         gp[(gp_n + 1):t] = rep_vector(noise[gp_n], t - gp_n);
#>       }
#>     }else{
#>       gp[2:(gp_n + 1)] = noise;
#>       gp = cumulative_sum(gp);
#>     }
#>   }
#>   // Calculate Rt
#>   R = rep_vector(log_R, t) + bp + gp;
#>   R = exp(R);
#>   return(R);
#> }
#> // Rt priors
#> void rt_lp(vector log_R, array[] real initial_infections, array[] real initial_growth,
#>            array[] real bp_effects, array[] real bp_sd, int bp_n, int seeding_time,
#>            real r_logmean, real r_logsd, real prior_infections,
#>            real prior_growth) {
#>   // prior on R
#>   log_R ~ normal(r_logmean, r_logsd);
#>   //breakpoint effects on Rt
#>   if (bp_n > 0) {
#>     bp_sd[1] ~ normal(0, 0.1) T[0,];
#>     bp_effects ~ normal(0, bp_sd[1]);
#>   }
#>   // initial infections
#>   initial_infections ~ normal(prior_infections, 0.2);
#>   if (seeding_time > 1) {
#>     initial_growth ~ normal(prior_growth, 0.2);
#>   }
#> }
#> // calculate infectiousness (weighted sum of the generation time and infections)
#> // for a single time point
#> real update_infectiousness(vector infections, vector gt_rev_pmf,
#>                            int seeding_time, int index){
#>   int gt_max = num_elements(gt_rev_pmf);
#>   // work out where to start the convolution of past infections with the
#>   // generation time distribution: (current_time - maximal generation time) if
#>   // that is >= 1, otherwise 1
#>   int inf_start = max(1, (index + seeding_time - gt_max + 1));
#>   // work out where to end the convolution: current_time
#>   int inf_end = (index + seeding_time);
#>   // number of indices of the generation time to sum over (inf_end - inf_start + 1)
#>   int pmf_accessed = min(gt_max, index + seeding_time);
#>   // calculate the elements of the convolution
#>   real new_inf = dot_product(
#>     infections[inf_start:inf_end], tail(gt_rev_pmf, pmf_accessed)
#>   );
#>   return(new_inf);
#> }
#> // generate infections by using Rt = Rt-1 * sum(reversed generation time pmf * infections)
#> vector generate_infections(vector oR, int uot, vector gt_rev_pmf,
#>                            array[] real initial_infections, array[] real initial_growth,
#>                            int pop, int ht) {
#>   // time indices and storage
#>   int ot = num_elements(oR);
#>   int nht = ot - ht;
#>   int t = ot + uot;
#>   vector[ot] R = oR;
#>   real exp_adj_Rt;
#>   vector[t] infections = rep_vector(0, t);
#>   vector[ot] cum_infections;
#>   vector[ot] infectiousness;
#>   // Initialise infections using daily growth
#>   infections[1] = exp(initial_infections[1]);
#>   if (uot > 1) {
#>     real growth = exp(initial_growth[1]);
#>     for (s in 2:uot) {
#>       infections[s] = infections[s - 1] * growth;
#>     }
#>   }
#>   // calculate cumulative infections
#>   if (pop) {
#>     cum_infections[1] = sum(infections[1:uot]);
#>   }
#>   // iteratively update infections
#>   for (s in 1:ot) {
#>     infectiousness[s] = update_infectiousness(infections, gt_rev_pmf, uot, s);
#>     if (pop && s > nht) {
#>       exp_adj_Rt = exp(-R[s] * infectiousness[s] / (pop - cum_infections[nht]));
#>       exp_adj_Rt = exp_adj_Rt > 1 ? 1 : exp_adj_Rt;
#>       infections[s + uot] = (pop - cum_infections[s]) * (1 - exp_adj_Rt);
#>     }else{
#>       infections[s + uot] = R[s] * infectiousness[s];
#>     }
#>     if (pop && s < ot) {
#>       cum_infections[s + 1] = cum_infections[s] + infections[s + uot];
#>     }
#>   }
#>   return(infections);
#> }
#> // backcalculate infections using mean shifted cases and non-parametric noise
#> vector deconvolve_infections(vector shifted_cases, vector noise, int fixed,
#>                              int prior) {
#>   int t = num_elements(shifted_cases);
#>   vector[t] infections = rep_vector(1e-5, t);
#>   if(!fixed) {
#>     vector[t] exp_noise = exp(noise);
#>     if (prior == 1) {
#>       infections = infections + shifted_cases .* exp_noise;
#>     }else if (prior == 0) {
#>      infections = infections + exp_noise;
#>     }else if (prior == 2) {
#>       infections[1] = infections[1] + shifted_cases[1] * exp_noise[1];
#>       for (i in 2:t) {
#>         infections[i] = infections[i - 1] * exp_noise[i];
#>       }
#>     }
#>   }else{
#>     infections = infections + shifted_cases;
#>   }
#>   return(infections);
#> }
#> // apply day of the week effect
#> vector day_of_week_effect(vector reports, array[] int day_of_week, vector effect) {
#>   int t = num_elements(reports);
#>   int wl = num_elements(effect);
#>   // scale day of week effect
#>   vector[wl] scaled_effect = wl * effect;
#>   vector[t] scaled_reports;
#>   for (s in 1:t) {
#>     // add reporting effects (adjust for simplex scale)
#>     scaled_reports[s] = reports[s] * scaled_effect[day_of_week[s]];
#>    }
#>   return(scaled_reports);
#> }
#> // Scale observations by fraction reported and update log density of
#> // fraction reported
#> vector scale_obs(vector reports, real frac_obs) {
#>   int t = num_elements(reports);
#>   vector[t] scaled_reports;
#>   scaled_reports = reports * frac_obs;
#>   return(scaled_reports);
#> }
#> // Truncate observed data by some truncation distribution
#> vector truncate(vector reports, vector trunc_rev_cmf, int reconstruct) {
#>   int t = num_elements(reports);
#>   vector[t] trunc_reports = reports;
#>   // Calculate cmf of truncation delay
#>   int trunc_max = min(t, num_elements(trunc_rev_cmf));
#>   int first_t = t - trunc_max + 1;
#>   // Apply cdf of truncation delay to truncation max last entries in reports
#>   if (reconstruct) {
#>     trunc_reports[first_t:t] ./= trunc_rev_cmf[1:trunc_max];
#>   } else {
#>     trunc_reports[first_t:t] .*= trunc_rev_cmf[1:trunc_max];
#>   }
#>   return(trunc_reports);
#> }
#> // Truncation distribution priors
#> void truncation_lp(array[] real truncation_mean, array[] real truncation_sd,
#>                    array[] real trunc_mean_mean, array[] real trunc_mean_sd,
#>                    array[] real trunc_sd_mean, array[] real trunc_sd_sd) {
#>   int truncation = num_elements(truncation_mean);
#>   if (truncation) {
#>     if (trunc_mean_sd[1] > 0) {
#>       // uncertain mean
#>       truncation_mean ~ normal(trunc_mean_mean, trunc_mean_sd);
#>     }
#>     if (trunc_sd_sd[1] > 0) {
#>       // uncertain sd
#>       truncation_sd ~ normal(trunc_sd_mean, trunc_sd_sd);
#>     }
#>   }
#> }
#> // update log density for reported cases
#> void report_lp(array[] int cases, vector reports,
#>                array[] real rep_phi, real phi_mean, real phi_sd,
#>                int model_type, real weight) {
#>   if (model_type) {
#>     real sqrt_phi; // the reciprocal overdispersion parameter (phi)
#>     rep_phi[model_type] ~ normal(phi_mean, phi_sd) T[0,];
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>     if (weight == 1) {
#>       cases ~ neg_binomial_2(reports, sqrt_phi);
#>     } else {
#>       target += neg_binomial_2_lpmf(cases | reports, sqrt_phi) * weight;
#>     }
#>   } else {
#>     if (weight == 1) {
#>       cases ~ poisson(reports);
#>     } else {
#>       target += poisson_lpmf(cases | reports) * weight;
#>     }
#>   }
#> }
#> // update log likelihood (as above but not vectorised and returning log likelihood)
#> vector report_log_lik(array[] int cases, vector reports,
#>                       array[] real rep_phi, int model_type, real weight) {
#>   int t = num_elements(reports);
#>   vector[t] log_lik;
#>   // defer to poisson if phi is large, to avoid overflow
#>   if (model_type == 0) {
#>     for (i in 1:t) {
#>       log_lik[i] = poisson_lpmf(cases[i] | reports[i]) * weight;
#>     }
#>   } else {
#>     real sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>     for (i in 1:t) {
#>       log_lik[i] = neg_binomial_2_lpmf(cases[i] | reports[i], sqrt_phi) * weight;
#>     }
#>   }
#>   return(log_lik);
#> }
#> // sample reported cases from the observation model
#> array[] int report_rng(vector reports, array[] real rep_phi, int model_type) {
#>   int t = num_elements(reports);
#>   array[t] int sampled_reports;
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>   }
#> 
#>   for (s in 1:t) {
#>     if (reports[s] < 1e-8) {
#>       sampled_reports[s] = 0;
#>     } else {
#>       // defer to poisson if phi is large, to avoid overflow
#>       if (sqrt_phi > 1e4) {
#>         sampled_reports[s] = poisson_rng(reports[s] > 1e8 ? 1e8 : reports[s]);
#>       } else {
#>         sampled_reports[s] = neg_binomial_2_rng(reports[s] > 1e8 ? 1e8 : reports[s], sqrt_phi);
#>       }
#>     }
#>   }
#>   return(sampled_reports);
#> }
#> // calculate Rt directly from inferred infections
#> vector calculate_Rt(vector infections, int seeding_time,
#>                     vector gt_rev_pmf, int smooth) {
#>   int t = num_elements(infections);
#>   int ot = t - seeding_time;
#>   vector[ot] R;
#>   vector[ot] sR;
#>   vector[ot] infectiousness = rep_vector(1e-5, ot);
#>   // calculate Rt using Cori et al. approach
#>   for (s in 1:ot) {
#>     infectiousness[s] += update_infectiousness(
#>       infections, gt_rev_pmf, seeding_time, s
#>     );
#>     R[s] = infections[s + seeding_time] / infectiousness[s];
#>   }
#>   if (smooth) {
#>     for (s in 1:ot) {
#>       real window = 0;
#>       sR[s] = 0;
#>       for (i in max(1, s - smooth):min(ot, s + smooth)) {
#>         sR[s] += R[i];
#>         window += 1;
#>       }
#>       sR[s] = sR[s] / window;
#>     }
#>   }else{
#>     sR = R;
#>   }
#>   return(sR);
#> }
#> // Convert an estimate of Rt to growth
#> array[] real R_to_growth(vector R, real gt_mean, real gt_var) {
#>   int t = num_elements(R);
#>   array[t] real r;
#>   if (gt_var > 0) {
#>     real k = gt_var / pow(gt_mean, 2);
#>     for (s in 1:t) {
#>       r[s] = (pow(R[s], k) - 1) / (k * gt_mean);
#>     }
#>   } else {
#>     // limit as gt_sd -> 0
#>     for (s in 1:t) {
#>       r[s] = log(R[s]) / gt_mean;
#>     }
#>   }
#>   return(r);
#> }
#> }
#> data {
#>   int t;                                            // unobserved time
#>   int seeding_time;                                 // time period used for seeding and not observed
#>   int horizon;                                      // forecast horizon
#>   int future_time;                                  // time in future for Rt
#>   array[t - horizon - seeding_time] int<lower = 0> cases; // observed cases
#>   vector<lower = 0>[t] shifted_cases;               // prior infections (for backcalculation)
#>   int<lower = 0> delay_n;                     // number of delay distributions
#>   int<lower = 0> delay_n_p;                   // number of parametric delay distributions
#>   int<lower = 0> delay_n_np;                  // number of nonparametric delay distributions
#>   array[delay_n_p] real delay_mean_mean;            // prior mean of mean delay distribution
#>   array[delay_n_p] real<lower = 0> delay_mean_sd;   // prior sd of mean delay distribution
#>   array[delay_n_p] real<lower = 0> delay_sd_mean;   // prior sd of sd of delay distribution
#>   array[delay_n_p] real<lower = 0> delay_sd_sd;     // prior sd of sd of delay distribution
#>   array[delay_n_p] int<lower = 1> delay_max;        // maximum delay distribution
#>   array[delay_n_p] int<lower = 0> delay_dist;       // 0 = lognormal; 1 = gamma
#>   int<lower = 0> delay_np_pmf_max;            // number of nonparametric pmf elements
#>   vector<lower = 0, upper = 1>[delay_np_pmf_max] delay_np_pmf; // ragged array of fixed PMFs
#>   array[delay_n_np + 1] int<lower = 1> delay_np_pmf_groups;              // links to ragged array
#>   array[delay_n_p] int<lower = 0> delay_weight;
#>   int<lower = 0> delay_types;                     // number of delay types
#>   array[delay_n] int<lower = 0> delay_types_p;          // whether delay types are parametric
#>   array[delay_n] int<lower = 0> delay_types_id;          // whether delay types are parametric
#>   array[delay_types + 1] int<lower = 0> delay_types_groups; // index of each delay (parametric or non)
#>   real L;				                     // boundary value for infections gp
#>   int<lower=1> M;			               // basis functions for infections gp
#>   real ls_meanlog;                   // meanlog for gp lengthscale prior
#>   real ls_sdlog;                     // sdlog for gp lengthscale prior
#>   real<lower=0> ls_min;              // Lower bound for the lengthscale
#>   real<lower=0> ls_max;              // Upper bound for the lengthscale
#>   real alpha_sd;                     // standard deviation of the alpha gp kernal parameter
#>   int gp_type;                       // type of gp, 0 = squared exponential, 1 = 3/2 matern
#>   int stationary;                    // is underlying gaussian process first or second order
#>   int fixed;                         //  should a gaussian process be used
#>   int estimate_r;                    // should the reproduction no be estimated (1 = yes)
#>   real prior_infections;             // prior for initial infections
#>   real prior_growth;                 // prior on initial growth rate
#>   real <lower = 0> r_mean;           // prior mean of reproduction number
#>   real <lower = 0> r_sd;             // prior standard deviation of reproduction number
#>   int bp_n;                          // no of breakpoints (0 = no breakpoints)
#>   array[t - seeding_time] int breakpoints; // when do breakpoints occur
#>   int future_fixed;                  // is underlying future Rt assumed to be fixed
#>   int fixed_from;                    // Reference date for when Rt estimation should be fixed
#>   int pop;                           // Initial susceptible population
#>   int<lower = 0> gt_id;              // id of generation time
#>   int backcalc_prior;                // Prior type to use for backcalculation
#>   int rt_half_window;                // Half the moving average window used when calculating Rt
#>   array[t - seeding_time] int day_of_week; // day of the week indicator (1 - 7)
#>   int model_type;                    // type of model: 0 = poisson otherwise negative binomial
#>   real phi_mean;                     // Mean and sd of the normal prior for the
#>   real phi_sd;                       // reporting process
#>   int week_effect;                   // length of week effect
#>   int obs_scale;                     // logical controlling scaling of observations
#>   real obs_scale_mean;               // mean scaling factor for observations
#>   real obs_scale_sd;                 // standard deviation of observation scaling
#>   real obs_weight;                   // weight given to observation in log density
#>   int likelihood;                    // Should the likelihood be included in the model
#>   int return_likelihood; // Should the likehood be returned by the model
#>   int<lower = 0> trunc_id; // id of truncation
#>   int<lower = 0> delay_id; // id of delay
#> }
#> transformed data{
#>   // observations
#>   int ot = t - seeding_time - horizon;  // observed time
#>   int ot_h = ot + horizon;  // observed time + forecast horizon
#>   // gaussian process
#>   int noise_terms = setup_noise(ot_h, t, horizon, estimate_r, stationary, future_fixed, fixed_from);
#>   matrix[noise_terms, M] PHI = setup_gp(M, L, noise_terms);  // basis function
#>   // Rt
#>   real r_logmean = log(r_mean^2 / sqrt(r_sd^2 + r_mean^2));
#>   real r_logsd = sqrt(log(1 + (r_sd^2 / r_mean^2)));
#>   array[delay_types] int delay_type_max = get_delay_type_max(
#>     delay_types, delay_types_p, delay_types_id,
#>     delay_types_groups, delay_max, delay_np_pmf_groups
#>   );
#> }
#> parameters{
#>   // gaussian process
#>   array[fixed ? 0 : 1] real<lower = ls_min,upper=ls_max> rho;  // length scale of noise GP
#>   array[fixed ? 0 : 1] real<lower = 0> alpha;    // scale of of noise GP
#>   vector[fixed ? 0 : M] eta;               // unconstrained noise
#>   // Rt
#>   vector[estimate_r] log_R;                // baseline reproduction number estimate (log)
#>   array[estimate_r] real initial_infections ;    // seed infections
#>   array[estimate_r && seeding_time > 1 ? 1 : 0] real initial_growth; // seed growth rate
#>   array[bp_n > 0 ? 1 : 0] real<lower = 0> bp_sd; // standard deviation of breakpoint effect
#>   array[bp_n] real bp_effects;                   // Rt breakpoint effects
#>   // observation model
#>   array[delay_n_p] real delay_mean;         // mean of delays
#>   array[delay_n_p] real<lower = 0> delay_sd;  // sd of delays
#>   simplex[week_effect] day_of_week_simplex;// day of week reporting effect
#>   array[obs_scale] real<lower = 0, upper = 1> frac_obs;     // fraction of cases that are ultimately observed
#>   array[model_type] real<lower = 0> rep_phi;     // overdispersion of the reporting process
#> }
#> transformed parameters {
#>   vector[fixed ? 0 : noise_terms] noise;                    // noise  generated by the gaussian process
#>   vector<lower = 0, upper = 10 * r_mean>[estimate_r > 0 ? ot_h : 0] R; // reproduction number
#>   vector[t] infections;                                     // latent infections
#>   vector[ot_h] reports;                                     // estimated reported cases
#>   vector[ot] obs_reports;                                   // observed estimated reported cases
#>   vector[delay_type_max[gt_id]] gt_rev_pmf;
#>   // GP in noise - spectral densities
#>   if (!fixed) {
#>     noise = update_gp(PHI, M, L, alpha[1], rho[1], eta, gp_type);
#>   }
#>   // Estimate latent infections
#>   if (estimate_r) {
#>     gt_rev_pmf = get_delay_rev_pmf(
#>       gt_id, delay_type_max[gt_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean, delay_sd, delay_dist,
#>       1, 1, 0
#>     );
#>     R = update_Rt(
#>       ot_h, log_R[estimate_r], noise, breakpoints, bp_effects, stationary
#>     );
#>     infections = generate_infections(
#>       R, seeding_time, gt_rev_pmf, initial_infections, initial_growth, pop,
#>       future_time
#>     );
#>   } else {
#>     // via deconvolution
#>     infections = deconvolve_infections(
#>       shifted_cases, noise, fixed, backcalc_prior
#>     );
#>   }
#>   // convolve from latent infections to mean of observations
#>   if (delay_id) {
#>     vector[delay_type_max[delay_id]] delay_rev_pmf = get_delay_rev_pmf(
#>       delay_id, delay_type_max[delay_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean, delay_sd, delay_dist,
#>       0, 1, 0
#>     );
#>     reports = convolve_to_report(infections, delay_rev_pmf, seeding_time);
#>   } else {
#>     reports = infections[(seeding_time + 1):t];
#>   }
#>   // weekly reporting effect
#>   if (week_effect > 1) {
#>    reports = day_of_week_effect(reports, day_of_week, day_of_week_simplex);
#>   }
#>   // scaling of reported cases by fraction observed
#>  if (obs_scale) {
#>    reports = scale_obs(reports, frac_obs[1]);
#>  }
#>  // truncate near time cases to observed reports
#>  if (trunc_id) {
#>     vector[delay_type_max[trunc_id]] trunc_rev_cmf = get_delay_rev_pmf(
#>       trunc_id, delay_type_max[trunc_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean, delay_sd, delay_dist,
#>       0, 1, 1
#>     );
#>     obs_reports = truncate(reports[1:ot], trunc_rev_cmf, 0);
#>  } else {
#>    obs_reports = reports[1:ot];
#>  }
#> }
#> model {
#>   // priors for noise GP
#>   if (!fixed) {
#>     gaussian_process_lp(
#>       rho[1], alpha[1], eta, ls_meanlog, ls_sdlog, ls_min, ls_max, alpha_sd
#>     );
#>   }
#>   // penalised priors for delay distributions
#>   delays_lp(
#>     delay_mean, delay_mean_mean,
#>     delay_mean_sd, delay_sd, delay_sd_mean, delay_sd_sd,
#>     delay_dist, delay_weight
#>   );
#>   if (estimate_r) {
#>     // priors on Rt
#>     rt_lp(
#>       log_R, initial_infections, initial_growth, bp_effects, bp_sd, bp_n,
#>       seeding_time, r_logmean, r_logsd, prior_infections, prior_growth
#>     );
#>   }
#>   // prior observation scaling
#>   if (obs_scale) {
#>     frac_obs[1] ~ normal(obs_scale_mean, obs_scale_sd) T[0, 1];
#>   }
#>   // observed reports from mean of reports (update likelihood)
#>   if (likelihood) {
#>     report_lp(
#>       cases, obs_reports, rep_phi, phi_mean, phi_sd, model_type, obs_weight
#>     );
#>   }
#> }
#> generated quantities {
#>   array[ot_h] int imputed_reports;
#>   vector[estimate_r > 0 ? 0: ot_h] gen_R;
#>   array[ot_h] real r;
#>   real gt_mean;
#>   real gt_var;
#>   vector[return_likelihood ? ot : 0] log_lik;
#>   if (estimate_r){
#>     // estimate growth from estimated Rt
#>     gt_mean = rev_pmf_mean(gt_rev_pmf, 1);
#>     gt_var = rev_pmf_var(gt_rev_pmf, 1, gt_mean);
#>     r = R_to_growth(R, gt_mean, gt_var);
#>   } else {
#>     // sample generation time
#>     array[delay_n_p] real delay_mean_sample =
#>       normal_rng(delay_mean_mean, delay_mean_sd);
#>     array[delay_n_p] real delay_sd_sample =
#>       normal_rng(delay_sd_mean, delay_sd_sd);
#>     vector[delay_type_max[gt_id]] sampled_gt_rev_pmf = get_delay_rev_pmf(
#>       gt_id, delay_type_max[gt_id], delay_types_p, delay_types_id,
#>       delay_types_groups, delay_max, delay_np_pmf,
#>       delay_np_pmf_groups, delay_mean_sample, delay_sd_sample,
#>       delay_dist, 1, 1, 0
#>     );
#>     gt_mean = rev_pmf_mean(sampled_gt_rev_pmf, 1);
#>     gt_var = rev_pmf_var(sampled_gt_rev_pmf, 1, gt_mean);
#>     // calculate Rt using infections and generation time
#>     gen_R = calculate_Rt(
#>       infections, seeding_time, sampled_gt_rev_pmf, rt_half_window
#>     );
#>     // estimate growth from calculated Rt
#>     r = R_to_growth(gen_R, gt_mean, gt_var);
#>   }
#>   // simulate reported cases
#>   imputed_reports = report_rng(reports, rep_phi, model_type);
#>   // log likelihood of model
#>   if (return_likelihood) {
#>     log_lik = report_log_lik(
#>       cases, obs_reports, rep_phi, model_type, obs_weight
#>     );
#>   }
#> } 
#> 
#> $method
#> [1] "sampling"
#> 
#> $cores
#> [1] 1
#> 
#> $warmup
#> [1] 1000
#> 
#> $chains
#> [1] 4
#> 
#> $save_warmup
#> [1] FALSE
#> 
#> $seed
#> [1] 36003562
#> 
#> $future
#> [1] FALSE
#> 
#> $max_execution_time
#> [1] Inf
#> 
#> $control
#> $control$adapt_delta
#> [1] 0.95
#> 
#> $control$max_treedepth
#> [1] 15
#> 
#> 
#> $iter
#> [1] 1500
#>