[Stable] Generates a list of arguments as required by rstan::sampling or rstan::vb by combining the required options, with data, and type of initialisation. Initialisation defaults to random but it is expected that create_initial_conditions will be used.

create_stan_args(
  stan = stan_opts(),
  data = NULL,
  init = "random",
  verbose = FALSE
)

Arguments

stan

A list of stan options as generated by stan_opts(). Defaults to stan_opts(). Can be used to override data, init, and verbose settings if desired.

data

A list of stan data as created by create_stan_data

init

Initial conditions passed to rstan. Defaults to "random" but can also be a function ( as supplied by create_intitial_conditions).

verbose

Logical, defaults to FALSE. Should verbose progress messages be returned.

Value

A list of stan arguments

Examples

# default settings
create_stan_args()
#> $data
#> NULL
#> 
#> $init
#> [1] "random"
#> 
#> $refresh
#> [1] 0
#> 
#> $object
#> S4 class stanmodel 'estimate_infections' coded as follows:
#> functions {
#> 
#> // discretised truncated gamma pmf
#> vector discretised_gamma_pmf(int[] y, real mu, real sigma, int max_val) {
#>   int n = num_elements(y);
#>   vector[n] pmf;
#>   real trunc_pmf;
#>   // calculate alpha and beta for gamma distribution
#>   real small = 1e-5;
#>   real large = 1e8;
#>   real c_sigma = fmax(small, sigma);
#>   real c_mu = fmax(small, mu);
#>   real alpha = ((c_mu) / c_sigma)^2;
#>   real beta = (c_mu) / (c_sigma^2);
#>   // account for numerical issues
#>   alpha = fmax(small, alpha);
#>   alpha = fmin(large, alpha);
#>   beta = fmax(small, beta);
#>   beta = fmin(large, beta);
#>   // calculate pmf
#>   trunc_pmf = gamma_cdf(max_val + 1, alpha, beta) - gamma_cdf(1, alpha, beta);
#>   for (i in 1:n){
#>     pmf[i] = (gamma_cdf(y[i] + 1, alpha, beta) - gamma_cdf(y[i], alpha, beta)) /
#>     trunc_pmf;
#>   }
#>   return(pmf);
#> }
#> 
#> // discretised truncated lognormal pmf
#> vector discretised_lognormal_pmf(int[] y, real mu, real sigma, int max_val) {
#>   int n = num_elements(y);
#>   vector[n] pmf;
#>   real small = 1e-5;
#>   real c_sigma = fmax(small, sigma);
#>   real c_mu = fmax(small, mu);
#>   vector[n] adj_y = to_vector(y) + small;
#>   vector[n] upper_y = (log(adj_y + 1) - c_mu) / c_sigma;
#>   vector[n] lower_y = (log(adj_y) - c_mu) / c_sigma;
#>   real max_cdf = normal_cdf((log(max_val + small) - c_mu) / c_sigma, 0.0, 1.0);
#>   real min_cdf = normal_cdf((log(small) - c_mu) / c_sigma, 0.0, 1.0);
#>   real trunc_cdf = max_cdf - min_cdf;
#>   for (i in 1:n) {
#>     pmf[i] = (normal_cdf(upper_y[i], 0.0, 1.0) - normal_cdf(lower_y[i], 0.0, 1.0)) /
#>     trunc_cdf;
#>   }
#>   return(pmf);
#> }
#> 
#> // reverse a mf
#> vector reverse_mf(vector pmf, int max_pmf) {
#>   vector[max_pmf] rev_pmf;
#>   for (d in 1:max_pmf) {
#>     rev_pmf[d] = pmf[max_pmf - d + 1];
#>   }
#>   return rev_pmf;
#> }
#> 
#> // discretised truncated gamma pmf
#> vector discretised_delta_pmf(int[] y) {
#>   int n = num_elements(y);
#>   vector[n] pmf;
#>   pmf[y[1]] = 1;
#>   if (n > 1) {
#>     for (i in 2:n) {
#>       pmf[y[i]] = 0;
#>     }
#>   }
#>   return(pmf);
#> }
#> 
#> // convolve a pdf and case vector
#> vector convolve(vector cases, vector rev_pmf) {
#>     int t = num_elements(cases);
#>     int max_pmf = num_elements(rev_pmf);
#>     vector[t] conv_cases = rep_vector(1e-5, t);
#>     for (s in 1:t) {
#>         conv_cases[s] += dot_product(cases[max(1, (s - max_pmf + 1)):s],
#>                                      tail(rev_pmf, min(max_pmf, s)));
#>     }
#>    return(conv_cases);
#>   }
#> 
#> 
#> // convolve latent infections to reported (but still unobserved) cases
#> vector convolve_to_report(vector infections,
#>                           real[] delay_mean,
#>                           real[] delay_sd,
#>                           int[] max_delay,
#>                           int seeding_time) {
#>   int t = num_elements(infections);
#>   vector[t - seeding_time] reports;
#>   vector[t] unobs_reports = infections;
#>   int delays = num_elements(delay_mean);
#>   if (delays) {
#>     for (s in 1:delays) {
#>       vector[max_delay[s]] pmf = rep_vector(1e-5, max_delay[s]);
#>       int delay_indexes[max_delay[s]];
#>       for (i in 1:max_delay[s]) {
#>         delay_indexes[i] = max_delay[s] - i;
#>       }
#>       pmf = pmf + discretised_lognormal_pmf(delay_indexes, delay_mean[s],
#>                                             delay_sd[s], max_delay[s]);
#>       unobs_reports = convolve(unobs_reports, pmf);
#>     }
#>     reports = unobs_reports[(seeding_time + 1):t];
#>   }else{
#>     reports = infections[(seeding_time + 1):t];
#>   }
#>   return(reports);
#> }
#> 
#> void delays_lp(real[] delay_mean, real[] delay_mean_mean, real[] delay_mean_sd,
#>                real[] delay_sd, real[] delay_sd_mean, real[] delay_sd_sd, int weight){
#>     int delays = num_elements(delay_mean);
#>     if (delays) {
#>       for (s in 1:delays) {
#>        target += normal_lpdf(delay_mean[s] | delay_mean_mean[s], delay_mean_sd[s]) * weight;
#>        target += normal_lpdf(delay_sd[s] | delay_sd_mean[s], delay_sd_sd[s]) * weight;
#>      }
#>   }
#> }
#> 
#> // eigenvalues for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> real lambda(real L, int m) {
#>   real lam;
#>   lam = ((m*pi())/(2*L))^2;
#>   return lam;
#> }
#> // eigenfunction for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> vector phi(real L, int m, vector x) {
#>   vector[rows(x)] fi;
#>   fi = 1/sqrt(L) * sin(m*pi()/(2*L) * (x+L));
#>   return fi;
#> }
#> // spectral density of the exponential quadratic kernal
#> real spd_se(real alpha, real rho, real w) {
#>   real S;
#>   S = (alpha^2) * sqrt(2*pi()) * rho * exp(-0.5*(rho^2)*(w^2));
#>   return S;
#> }
#> // spectral density of the Matern 3/2 kernel
#> real spd_matern(real alpha, real rho, real w) {
#>   real S;
#>   S = 4*alpha^2 * (sqrt(3)/rho)^3 * 1/((sqrt(3)/rho)^2 + w^2)^2;
#>   return S;
#> }
#> // setup gaussian process noise dimensions
#> int setup_noise(int ot_h, int t, int horizon, int estimate_r,
#>                 int stationary, int future_fixed, int fixed_from) {
#>   int noise_time = estimate_r > 0 ? (stationary > 0 ? ot_h : ot_h - 1) : t;
#>   int noise_terms =  future_fixed > 0 ? (noise_time - horizon + fixed_from) : noise_time;
#>   return(noise_terms);
#> }
#> // setup approximate gaussian process
#> matrix setup_gp(int M, real L, int dimension) {
#>   vector[dimension] time;
#>   matrix[dimension, M] PHI;
#>   real half_dim = dimension / 2.0;
#>   for (s in 1:dimension) {
#>     time[s] = (s - half_dim) / half_dim;
#>   }
#>   for (m in 1:M){
#>     PHI[,m] = phi(L, m, time);
#>   }
#>   return(PHI);
#> }
#> // update gaussian process using spectral densities
#> vector update_gp(matrix PHI, int M, real L, real alpha,
#>                  real rho, vector eta, int type) {
#>   vector[M] diagSPD;    // spectral density
#>   vector[M] SPD_eta;    // spectral density * noise
#>   int noise_terms = rows(PHI);
#>   vector[noise_terms] noise = rep_vector(1e-6, noise_terms);
#>   real unit_rho = rho / noise_terms;
#>   // GP in noise - spectral densities
#>   if (type == 0) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_se(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }else if (type == 1) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_matern(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }
#>   SPD_eta = diagSPD .* eta;
#>   noise = noise + PHI[,] * SPD_eta;
#>   return(noise);
#> }
#> // priors for gaussian process
#> void gaussian_process_lp(real rho, real alpha, vector eta,
#>                          real ls_meanlog, real ls_sdlog,
#>                          real ls_min, real ls_max, real alpha_sd) {
#>   if (ls_sdlog > 0) {
#>     rho ~ lognormal(ls_meanlog, ls_sdlog) T[ls_min, ls_max];
#>   } else {
#>     rho ~ inv_gamma(1.499007, 0.057277 * ls_max) T[ls_min, ls_max];
#>   }
#>   alpha ~ normal(0, alpha_sd);
#>   eta ~ std_normal();
#> }
#> 
#> // update a vector of Rts
#> vector update_Rt(vector input_R, real log_R, vector noise, int[] bps,
#>                  real[] bp_effects, int stationary) {
#>   // define control parameters
#>   int t = num_elements(input_R);
#>   int bp_n = num_elements(bp_effects);
#>   int bp_c = 0;
#>   int gp_n = num_elements(noise);
#>   // define result vectors
#>   vector[t] bp = rep_vector(0, t);
#>   vector[t] gp = rep_vector(0, t);
#>   vector[t] R;
#>   // initialise breakpoints
#>   if (bp_n) {
#>     for (s in 1:t) {
#>       if (bps[s]) {
#>         bp_c += bps[s];
#>         bp[s] = bp_effects[bp_c];
#>       }
#>     }
#>     bp = cumulative_sum(bp);
#>   }
#>   //initialise gaussian process
#>   if (gp_n) {
#>     if (stationary) {
#>       gp[1:gp_n] = noise;
#>       // fix future gp based on last estimated
#>       if (t > gp_n) {
#>         gp[(gp_n + 1):t] = rep_vector(noise[gp_n], t - gp_n);
#>       }
#>     }else{
#>       gp[2:(gp_n + 1)] = noise;
#>       gp = cumulative_sum(gp);
#>     }
#>   }
#>   // Calculate Rt
#>   R = rep_vector(log_R, t) + bp + gp;
#>   R = exp(R);
#>   return(R);
#> }
#> // Rt priors
#> void rt_lp(vector log_R, real[] initial_infections, real[] initial_growth,
#>            real[] bp_effects, real[] bp_sd, int bp_n, int seeding_time,
#>            real r_logmean, real r_logsd, real prior_infections,
#>            real prior_growth) {
#>   // prior on R
#>   log_R ~ normal(r_logmean, r_logsd);
#>   //breakpoint effects on Rt
#>   if (bp_n > 0) {
#>     bp_sd[1] ~ normal(0, 0.1) T[0,];
#>     bp_effects ~ normal(0, bp_sd[1]);
#>   }
#>   // initial infections
#>   initial_infections ~ normal(prior_infections, 0.2);
#>   if (seeding_time > 1) {
#>     initial_growth ~ normal(prior_growth, 0.2);
#>   }
#> }
#> 
#> // calculate infectiousness (weighted sum of the generation time and infections)
#> // for a single time point
#> real update_infectiousness(vector infections, vector gt_pmf,
#>                            int seeding_time, int max_gt, int index){
#>   // work out where to start the convolution of past infections with the
#>   // generation time distribution: (current_time - maximal generation time) if
#>   // that is >= 1, otherwise 1
#>   int inf_start = max(1, (index + seeding_time - max_gt));
#>   // work out where to end the convolution: (current_time - 1)
#>   int inf_end = (index + seeding_time - 1);
#>   // number of indices of the generation time to sum over (inf_end - inf_start + 1)
#>   int pmf_accessed = min(max_gt, index + seeding_time - 1);
#>   // calculate the elements of the convolution
#>   real new_inf = dot_product(infections[inf_start:inf_end], tail(gt_pmf, pmf_accessed));
#>   return(new_inf);
#> }
#> // generate infections by using Rt = Rt-1 * sum(reversed generation time pmf * infections)
#> vector generate_infections(vector oR, int uot,
#>                            real gt_mean, real gt_sd, int max_gt,
#>                            real[] initial_infections, real[] initial_growth,
#>                            int pop, int ht) {
#>   // time indices and storage
#>   int ot = num_elements(oR);
#>   int nht = ot - ht;
#>   int t = ot + uot;
#>   vector[ot] R = oR;
#>   real exp_adj_Rt;
#>   vector[t] infections = rep_vector(1e-5, t);
#>   vector[ot] cum_infections = rep_vector(0, ot);
#>   vector[ot] infectiousness = rep_vector(1e-5, ot);
#>   // generation time pmf
#>   vector[max_gt] gt_pmf = rep_vector(1e-5, max_gt);
#>   // revert indices (this is for later doing the convolution with recent cases)
#>   int gt_indexes[max_gt];
#>   for (i in 1:(max_gt)) {
#>     gt_indexes[i] = max_gt - i + 1;
#>   }
#>   if (gt_sd > 0) {
#>     // SD > 0: use discretised gamma
#>     gt_pmf = gt_pmf + discretised_gamma_pmf(gt_indexes, gt_mean, gt_sd, max_gt);
#>   } else {
#>     // SD == 0: use discretised delta
#>     gt_pmf = gt_pmf + discretised_delta_pmf(gt_indexes);
#>   }
#>   // Initialise infections using daily growth
#>   infections[1] = exp(initial_infections[1]);
#>   if (uot > 1) {
#>     for (s in 2:uot) {
#>       infections[s] = exp(initial_infections[1] + initial_growth[1] * (s - 1));
#>     }
#>   }
#>   // calculate cumulative infections
#>   if (pop) {
#>     cum_infections[1] = sum(infections[1:uot]);
#>   }
#>   // iteratively update infections
#>   for (s in 1:ot) {
#>     infectiousness[s] += update_infectiousness(infections, gt_pmf, uot, max_gt, s);
#>     if (pop && s > nht) {
#>       exp_adj_Rt = exp(-R[s] * infectiousness[s] / (pop - cum_infections[nht]));
#>       exp_adj_Rt = exp_adj_Rt > 1 ? 1 : exp_adj_Rt;
#>       infections[s + uot] = (pop - cum_infections[s]) * (1 - exp_adj_Rt);
#>     }else{
#>       infections[s + uot] += R[s] * infectiousness[s];
#>     }
#>     if (pop && s < ot) {
#>       cum_infections[s + 1] = cum_infections[s] + infections[s + uot];
#>     }
#>   }
#>   return(infections);
#> }
#> // backcalculate infections using mean shifted cases and non-parametric noise
#> vector deconvolve_infections(vector shifted_cases, vector noise, int fixed,
#>                              int prior) {
#>   int t = num_elements(shifted_cases);
#>   vector[t] infections = rep_vector(1e-5, t);
#>   if(!fixed) {
#>     vector[t] exp_noise = exp(noise);
#>     if (prior == 1) {
#>       infections = infections + shifted_cases .* exp_noise;
#>     }else if (prior == 0) {
#>      infections = infections + exp_noise;
#>     }else if (prior == 2) {
#>       infections[1] = infections[1] + shifted_cases[1] * exp_noise[1];
#>       for (i in 2:t) {
#>         infections[i] = infections[i - 1] * exp_noise[i];
#>       }
#>     }
#>   }else{
#>     infections = infections + shifted_cases;
#>   }
#>   return(infections);
#> }
#> // Update the log density for the generation time distribution mean and sd
#> void generation_time_lp(real[] gt_mean, real gt_mean_mean, real gt_mean_sd,
#>                         real[] gt_sd, real gt_sd_mean, real gt_sd_sd, int weight) {
#>   if (gt_mean_sd > 0) {
#>     target += normal_lpdf(gt_mean[1] | gt_mean_mean, gt_mean_sd) * weight;
#>   }
#>   if (gt_sd_sd > 0) {
#>     target += normal_lpdf(gt_sd[1] | gt_sd_mean, gt_sd_sd) * weight;
#>   }
#> }
#> 
#> 
#> // apply day of the week effect
#> vector day_of_week_effect(vector reports, int[] day_of_week, vector effect) {
#>   int t = num_elements(reports);
#>   int wl = num_elements(effect);
#>   // scale day of week effect
#>   vector[wl] scaled_effect = wl * effect;
#>   vector[t] scaled_reports;
#>   for (s in 1:t) {
#>     // add reporting effects (adjust for simplex scale)
#>     scaled_reports[s] = reports[s] * scaled_effect[day_of_week[s]];
#>    }
#>   return(scaled_reports);
#> }
#> // Scale observations by fraction reported and update log density of
#> // fraction reported
#> vector scale_obs(vector reports, real frac_obs) {
#>   int t = num_elements(reports);
#>   vector[t] scaled_reports;
#>   scaled_reports = reports * frac_obs;
#>   return(scaled_reports);
#> }
#> // Calculate a truncation CMF
#> vector truncation_cmf(real trunc_mean, real trunc_sd, int trunc_max) {
#>     int  trunc_indexes[trunc_max];
#>     vector[trunc_max] cmf;
#>     for (i in 1:(trunc_max)) {
#>       trunc_indexes[i] = i - 1;
#>     }
#>     cmf = discretised_lognormal_pmf(trunc_indexes, trunc_mean, trunc_sd, trunc_max);
#>     cmf[1] = cmf[1] + 1e-8;
#>     cmf = cumulative_sum(cmf);
#>     cmf = reverse_mf(cmf, trunc_max);
#>     return(cmf);
#> }
#> // Truncate observed data by some truncation distribution
#> vector truncate(vector reports, real[] truncation_mean, real[] truncation_sd,
#>                 int[] truncation_max, int reconstruct) {
#>   int t = num_elements(reports);
#>   int truncation = num_elements(truncation_mean);
#>   vector[t] trunc_reports = reports;
#>   if (truncation) {
#>     // Calculate cmf of truncation delay
#>     int trunc_max = truncation_max[1] > t ? t : truncation_max[1];
#>     int  trunc_indexes[trunc_max];
#>     vector[trunc_max] cmf;
#>     int first_t = t - trunc_max + 1;
#>     cmf = truncation_cmf(truncation_mean[1], truncation_sd[1], trunc_max);
#>     // Apply cdf of truncation delay to truncation max last entries in reports
#>     if (reconstruct) {
#>       trunc_reports[first_t:t] = trunc_reports[first_t:t] ./ cmf;
#>     }else{
#>       trunc_reports[first_t:t] = trunc_reports[first_t:t] .* cmf;
#>     }
#>   }
#>   return(trunc_reports);
#> }
#> // Truncation distribution priors
#> void truncation_lp(real[] truncation_mean, real[] truncation_sd,
#>                    real[] trunc_mean_mean, real[] trunc_mean_sd,
#>                    real[] trunc_sd_mean, real[] trunc_sd_sd) {
#>   int truncation = num_elements(truncation_mean);
#>   if (truncation) {
#>     truncation_mean ~ normal(trunc_mean_mean, trunc_mean_sd);
#>     truncation_sd ~ normal(trunc_sd_mean, trunc_sd_sd);
#>   }
#> }
#> // update log density for reported cases
#> void report_lp(int[] cases, vector reports,
#>                real[] rep_phi, real phi_mean, real phi_sd,
#>                int model_type, real weight) {
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>     // the reciprocal overdispersion parameter (phi)
#>     rep_phi[model_type] ~ normal(phi_mean, phi_sd) T[0,];
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>     // defer to poisson if phi is large, to avoid overflow or
#>     // if poisson specified
#>   }
#>   if (sqrt_phi > 1e4) {
#>     if (weight == 1) {
#>       cases ~ poisson(reports);
#>     }else{
#>       target += poisson_lpmf(cases | reports) * weight;
#>     }
#>   } else {
#>     if (weight == 1) {
#>       cases ~ neg_binomial_2(reports, sqrt_phi);
#>     }else{
#>       target += neg_binomial_2_lpmf(cases | reports, sqrt_phi);
#>     }
#>   }
#>   
#> }
#> // update log likelihood (as above but not vectorised and returning log likelihood)
#> vector report_log_lik(int[] cases, vector reports,
#>                       real[] rep_phi, int model_type, real weight) {
#>   int t = num_elements(reports);
#>   vector[t] log_lik;
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>   // the reciprocal overdispersion parameter (phi)
#>   sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>   }
#> 
#>   // defer to poisson if phi is large, to avoid overflow
#>   if (sqrt_phi > 1e4) {
#>     for (i in 1:t) {
#>       log_lik[i] = poisson_lpmf(cases[i] | reports[i]) * weight;
#>     }
#>   } else {
#>     for (i in 1:t) {
#>       log_lik[i] = neg_binomial_2_lpmf(cases[i] | reports[i], sqrt_phi) * weight;
#>     }
#>     }
#>   return(log_lik);
#> }
#> // sample reported cases from the observation model
#> int[] report_rng(vector reports, real[] rep_phi, int model_type) {
#>   int t = num_elements(reports);
#>   int sampled_reports[t];
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>   }
#>     
#>   for (s in 1:t) {
#>     // defer to poisson if phi is large, to avoid overflow
#>     if (sqrt_phi > 1e4) {
#>       sampled_reports[s] = poisson_rng(reports[s] > 1e8 ? 1e8 : reports[s]);
#>     } else {
#>       sampled_reports[s] = neg_binomial_2_rng(reports[s] > 1e8 ? 1e8 : reports[s], sqrt_phi);
#>     }
#>   }
#>   return(sampled_reports);
#> }
#> 
#> // calculate Rt directly from inferred infections
#> vector calculate_Rt(vector infections, int seeding_time,
#>                     real gt_mean, real gt_sd, int max_gt,
#>                     int smooth) {
#>   vector[max_gt] gt_pmf;
#>   int gt_indexes[max_gt];
#>   int t = num_elements(infections);
#>   int ot = t - seeding_time;
#>   vector[ot] R;
#>   vector[ot] sR;
#>   vector[ot] infectiousness = rep_vector(1e-5, ot);
#>   // calculate PMF of the generation time
#>   for (i in 1:(max_gt)) {
#>     gt_indexes[i] = max_gt - i + 1;
#>   }
#>   if (gt_sd > 0) {
#>     gt_pmf = discretised_gamma_pmf(gt_indexes, gt_mean, gt_sd, max_gt);
#>   } else {
#>     gt_pmf = discretised_delta_pmf(gt_indexes);
#>   }
#>   // calculate Rt using Cori et al. approach
#>   for (s in 1:ot) {
#>     infectiousness[s] += update_infectiousness(infections, gt_pmf, seeding_time, max_gt, s);
#>     R[s] = infections[s + seeding_time] / infectiousness[s];
#>   }
#>   if (smooth) {
#>     for (s in 1:ot) {
#>       real window = 0;
#>       sR[s] = 0;
#>       for (i in max(1, s - smooth):min(ot, s + smooth)) {
#>         sR[s] += R[i];
#>         window += 1;
#>       }
#>       sR[s] = sR[s] / window;
#>     }
#>   }else{
#>     sR = R;
#>   }
#>   return(sR);
#> }
#> // Convert an estimate of Rt to growth
#> real[] R_to_growth(vector R, real gt_mean, real gt_sd) {
#>   int t = num_elements(R);
#>   real r[t];
#>   if (gt_sd > 0) {
#>     real k = pow(gt_sd / gt_mean, 2);
#>     for (s in 1:t) {
#>       r[s] = (pow(R[s], k) - 1) / (k * gt_mean);
#>     }
#>   } else {
#>     // limit as gt_sd -> 0
#>     for (s in 1:t) {
#>       r[s] = log(R[s]) / gt_mean;
#>     }
#>   }
#>   return(r);
#> }
#> }
#> 
#> 
#> data {
#> 
#>   int t;                                            // unobserved time
#>   int seeding_time;                                 // time period used for seeding and not observed
#>   int horizon;                                      // forecast horizon
#>   int future_time;                                  // time in future for Rt
#>   int<lower = 0> cases[t - horizon - seeding_time]; // observed cases
#>   vector<lower = 0>[t] shifted_cases;               // prior infections (for backcalculation)
#> 
#>   int delays;                  // no. of delay distributions
#>   real delay_mean_sd[delays];  // prior sd of mean incubation period
#>   real delay_mean_mean[delays];// prior mean of mean incubation period
#>   real delay_sd_mean[delays];  // prior sd of sd of incubation period
#>   real delay_sd_sd[delays];    // prior sd of sd of incubation period
#>   int max_delay[delays];       // maximum incubation period
#> 
#>   real L;				                     // boundary value for infections gp
#>   int<lower=1> M;			               // basis functions for infections gp
#>   real ls_meanlog;                   // meanlog for gp lengthscale prior
#>   real ls_sdlog;                     // sdlog for gp lengthscale prior
#>   real<lower=0> ls_min;              // Lower bound for the lengthscale
#>   real<lower=0> ls_max;              // Upper bound for the lengthscale
#>   real alpha_sd;                     // standard deviation of the alpha gp kernal parameter
#>   int gp_type;                       // type of gp, 0 = squared exponential, 1 = 3/2 matern
#>   int stationary;                    // is underlying gaussian process first or second order
#>   int fixed;                         //  should a gaussian process be used
#> 
#>   real gt_mean_sd;                   // prior sd of mean generation time
#>   real gt_mean_mean;                 // prior mean of mean generation time
#>   real gt_sd_mean;                   // prior mean of sd of generation time
#>   real gt_sd_sd;                     // prior sd of sd of generation time
#>   int max_gt;                        // maximum generation time
#> 
#>   int estimate_r;                    // should the reproduction no be estimated (1 = yes)
#>   real prior_infections;             // prior for initial infections
#>   real prior_growth;                 // prior on initial growth rate
#>   real <lower = 0> r_mean;           // prior mean of reproduction number
#>   real <lower = 0> r_sd;             // prior standard deviation of reproduction number
#>   int bp_n;                          // no of breakpoints (0 = no breakpoints)
#>   int breakpoints[t - seeding_time]; // when do breakpoints occur
#>   int future_fixed;                  // is underlying future Rt assumed to be fixed
#>   int fixed_from;                    // Reference date for when Rt estimation should be fixed
#>   int pop;                           // Initial susceptible population
#> 
#>   int backcalc_prior;                // Prior type to use for backcalculation
#>   int rt_half_window;                // Half the moving average window used when calculating Rt
#> 
#>   int day_of_week[t - seeding_time]; // day of the week indicator (1 - 7)
#>   int model_type;                    // type of model: 0 = poisson otherwise negative binomial
#>   real phi_mean;                      // Mean and sd of the normal prior for the
#>   real phi_sd;                        // reporting process
#>   int week_effect;                   // length of week effect
#>   int truncation;                    // 1/0 indicating if truncation should be adjusted for
#>   real trunc_mean_mean[truncation];  // truncation mean of mean
#>   real trunc_mean_sd[truncation];    // truncation sd of mean
#>   real trunc_sd_mean[truncation];    // truncation mean of sd
#>   real trunc_sd_sd[truncation];      // truncation sd of sd
#>   int max_truncation[truncation];    // maximum truncation supported
#>   int obs_scale;                     // logical controlling scaling of observations
#>   real obs_scale_mean;               // mean scaling factor for observations
#>   real obs_scale_sd;                 // standard deviation of observation scaling
#>   real obs_weight;                   // weight given to observation in log density
#>   int likelihood;                    // Should the likelihood be included in the model
#>   int return_likelihood; // Should the likehood be returned by the model
#> }
#> 
#> transformed data{
#>   // observations
#>   int ot = t - seeding_time - horizon;  // observed time
#>   int ot_h = ot + horizon;  // observed time + forecast horizon
#>   // gaussian process
#>   int noise_terms = setup_noise(ot_h, t, horizon, estimate_r, stationary, future_fixed, fixed_from);
#>   matrix[noise_terms, M] PHI = setup_gp(M, L, noise_terms);  // basis function
#>   // Rt
#>   real r_logmean = log(r_mean^2 / sqrt(r_sd^2 + r_mean^2));
#>   real r_logsd = sqrt(log(1 + (r_sd^2 / r_mean^2)));
#> }
#> 
#> parameters{
#>   // gaussian process
#>   real<lower = ls_min,upper=ls_max> rho[fixed ? 0 : 1];  // length scale of noise GP
#>   real<lower = 0> alpha[fixed ? 0 : 1];    // scale of of noise GP
#>   vector[fixed ? 0 : M] eta;               // unconstrained noise
#>   // Rt
#>   vector[estimate_r] log_R;                // baseline reproduction number estimate (log)
#>   real initial_infections[estimate_r] ;    // seed infections
#>   real initial_growth[estimate_r && seeding_time > 1 ? 1 : 0]; // seed growth rate
#>   real<lower = 0, upper = max_gt> gt_mean[estimate_r && gt_mean_sd > 0]; // mean of generation time (if uncertain)
#>   real<lower = 0> gt_sd[estimate_r && gt_sd_sd > 0];       // sd of generation time (if uncertain)
#>   real<lower = 0> bp_sd[bp_n > 0 ? 1 : 0]; // standard deviation of breakpoint effect
#>   real bp_effects[bp_n];                   // Rt breakpoint effects
#>   // observation model
#>   real delay_mean[delays];                 // mean of delays
#>   real<lower = 0> delay_sd[delays];        // sd of delays
#>   simplex[week_effect] day_of_week_simplex;// day of week reporting effect
#>   real<lower = 0> frac_obs[obs_scale];     // fraction of cases that are ultimately observed
#>   real truncation_mean[truncation];        // mean of truncation
#>   real<lower = 0> truncation_sd[truncation]; // sd of truncation
#>   real<lower = 0> rep_phi[model_type];     // overdispersion of the reporting process
#> }
#> 
#> transformed parameters {
#>   vector[fixed ? 0 : noise_terms] noise;                    // noise  generated by the gaussian process
#>   vector<lower = 0, upper = 10 * r_mean>[estimate_r > 0 ? ot_h : 0] R; // reproduction number
#>   vector[t] infections;                                     // latent infections
#>   vector[ot_h] reports;                                     // estimated reported cases
#>   vector[ot] obs_reports;                                   // observed estimated reported cases
#>   // GP in noise - spectral densities
#>   if (!fixed) {
#>     noise = update_gp(PHI, M, L, alpha[1], rho[1], eta, gp_type);
#>   }
#>   // Estimate latent infections
#>   if (estimate_r) {
#>     // via Rt
#>     real set_gt_mean = (gt_mean_sd > 0 ? gt_mean[1] : gt_mean_mean);
#>     real set_gt_sd = (gt_sd_sd > 0 ? gt_sd[1] : gt_sd_mean);
#>     R = update_Rt(R, log_R[estimate_r], noise, breakpoints, bp_effects, stationary);
#>     infections = generate_infections(R, seeding_time, set_gt_mean, set_gt_sd, max_gt,
#>                                      initial_infections, initial_growth,
#>                                      pop, future_time);
#>   } else {
#>     // via deconvolution
#>     infections = deconvolve_infections(shifted_cases, noise, fixed, backcalc_prior);
#>   }
#>   // convolve from latent infections to mean of observations
#>   reports = convolve_to_report(infections, delay_mean, delay_sd, max_delay, seeding_time);
#>  // weekly reporting effect
#>  if (week_effect > 1) {
#>    reports = day_of_week_effect(reports, day_of_week, day_of_week_simplex);
#>   }
#>   // scaling of reported cases by fraction observed
#>  if (obs_scale) {
#>    reports = scale_obs(reports, frac_obs[1]);
#>  }
#>  // truncate near time cases to observed reports
#>  obs_reports = truncate(reports[1:ot], truncation_mean, truncation_sd,
#>                         max_truncation, 0);
#> }
#> 
#> model {
#>   // priors for noise GP
#>   if (!fixed) {
#>     gaussian_process_lp(rho[1], alpha[1], eta, ls_meanlog,
#>                         ls_sdlog, ls_min, ls_max, alpha_sd);
#>   }
#>   // penalised priors for delay distributions
#>   delays_lp(delay_mean, delay_mean_mean, delay_mean_sd, delay_sd, delay_sd_mean, delay_sd_sd, t);
#>   // priors for truncation
#>   truncation_lp(truncation_mean, truncation_sd, trunc_mean_mean, trunc_mean_sd,
#>                 trunc_sd_mean, trunc_sd_sd);
#>   if (estimate_r) {
#>     // priors on Rt
#>     rt_lp(log_R, initial_infections, initial_growth, bp_effects, bp_sd, bp_n, seeding_time,
#>           r_logmean, r_logsd, prior_infections, prior_growth);
#>     // penalised_prior on generation interval
#>     generation_time_lp(gt_mean, gt_mean_mean, gt_mean_sd, gt_sd, gt_sd_mean, gt_sd_sd, ot);
#>   }
#>   // prior observation scaling
#>   if (obs_scale) {
#>     frac_obs[1] ~ normal(obs_scale_mean, obs_scale_sd) T[0,];
#>   }
#>   // observed reports from mean of reports (update likelihood)
#>   if (likelihood) {
#>     report_lp(cases, obs_reports, rep_phi, phi_mean, phi_sd,
#>               model_type, obs_weight);
#>   }
#> }
#> 
#> generated quantities {
#>   int imputed_reports[ot_h];
#>   vector[estimate_r > 0 ? 0: ot_h] gen_R;
#>   real r[ot_h];
#>   vector[return_likelihood > 1 ? ot : 0] log_lik;
#>   if (estimate_r){
#>     // estimate growth from estimated Rt
#>     real set_gt_mean = (gt_mean_sd > 0 ? gt_mean[1] : gt_mean_mean);
#>     real set_gt_sd = (gt_sd_sd > 0 ? gt_sd[1] : gt_sd_mean);
#>     r = R_to_growth(R, set_gt_mean, set_gt_sd);
#>   }else{
#>     // sample generation time
#>     real gt_mean_sample = (gt_mean_sd > 0 ? normal_rng(gt_mean_mean, gt_mean_sd) : gt_mean_mean);
#>     real gt_sd_sample = (gt_sd_sd > 0 ? normal_rng(gt_sd_mean, gt_sd_sd) : gt_sd_mean);
#>     // calculate Rt using infections and generation time
#>     gen_R = calculate_Rt(infections, seeding_time, gt_mean_sample, gt_sd_sample,
#>                          max_gt, rt_half_window);
#>     // estimate growth from calculated Rt
#>     r = R_to_growth(gen_R, gt_mean_sample, gt_sd_sample);
#>   }
#>   // simulate reported cases
#>   imputed_reports = report_rng(reports, rep_phi, model_type);
#>   // log likelihood of model
#>   if (return_likelihood) {
#>     log_lik = report_log_lik(cases, obs_reports, rep_phi, model_type,
#>                              obs_weight);
#>   }
#> } 
#> 
#> $method
#> [1] "sampling"
#> 
#> $cores
#> [1] 1
#> 
#> $warmup
#> [1] 250
#> 
#> $chains
#> [1] 4
#> 
#> $save_warmup
#> [1] FALSE
#> 
#> $seed
#> [1] 32139516
#> 
#> $future
#> [1] FALSE
#> 
#> $max_execution_time
#> [1] Inf
#> 
#> $control
#> $control$adapt_delta
#> [1] 0.95
#> 
#> $control$max_treedepth
#> [1] 15
#> 
#> 
#> $iter
#> [1] 750
#> 

# increasing warmup
create_stan_args(stan = stan_opts(warmup = 1000))
#> $data
#> NULL
#> 
#> $init
#> [1] "random"
#> 
#> $refresh
#> [1] 0
#> 
#> $object
#> S4 class stanmodel 'estimate_infections' coded as follows:
#> functions {
#> 
#> // discretised truncated gamma pmf
#> vector discretised_gamma_pmf(int[] y, real mu, real sigma, int max_val) {
#>   int n = num_elements(y);
#>   vector[n] pmf;
#>   real trunc_pmf;
#>   // calculate alpha and beta for gamma distribution
#>   real small = 1e-5;
#>   real large = 1e8;
#>   real c_sigma = fmax(small, sigma);
#>   real c_mu = fmax(small, mu);
#>   real alpha = ((c_mu) / c_sigma)^2;
#>   real beta = (c_mu) / (c_sigma^2);
#>   // account for numerical issues
#>   alpha = fmax(small, alpha);
#>   alpha = fmin(large, alpha);
#>   beta = fmax(small, beta);
#>   beta = fmin(large, beta);
#>   // calculate pmf
#>   trunc_pmf = gamma_cdf(max_val + 1, alpha, beta) - gamma_cdf(1, alpha, beta);
#>   for (i in 1:n){
#>     pmf[i] = (gamma_cdf(y[i] + 1, alpha, beta) - gamma_cdf(y[i], alpha, beta)) /
#>     trunc_pmf;
#>   }
#>   return(pmf);
#> }
#> 
#> // discretised truncated lognormal pmf
#> vector discretised_lognormal_pmf(int[] y, real mu, real sigma, int max_val) {
#>   int n = num_elements(y);
#>   vector[n] pmf;
#>   real small = 1e-5;
#>   real c_sigma = fmax(small, sigma);
#>   real c_mu = fmax(small, mu);
#>   vector[n] adj_y = to_vector(y) + small;
#>   vector[n] upper_y = (log(adj_y + 1) - c_mu) / c_sigma;
#>   vector[n] lower_y = (log(adj_y) - c_mu) / c_sigma;
#>   real max_cdf = normal_cdf((log(max_val + small) - c_mu) / c_sigma, 0.0, 1.0);
#>   real min_cdf = normal_cdf((log(small) - c_mu) / c_sigma, 0.0, 1.0);
#>   real trunc_cdf = max_cdf - min_cdf;
#>   for (i in 1:n) {
#>     pmf[i] = (normal_cdf(upper_y[i], 0.0, 1.0) - normal_cdf(lower_y[i], 0.0, 1.0)) /
#>     trunc_cdf;
#>   }
#>   return(pmf);
#> }
#> 
#> // reverse a mf
#> vector reverse_mf(vector pmf, int max_pmf) {
#>   vector[max_pmf] rev_pmf;
#>   for (d in 1:max_pmf) {
#>     rev_pmf[d] = pmf[max_pmf - d + 1];
#>   }
#>   return rev_pmf;
#> }
#> 
#> // discretised truncated gamma pmf
#> vector discretised_delta_pmf(int[] y) {
#>   int n = num_elements(y);
#>   vector[n] pmf;
#>   pmf[y[1]] = 1;
#>   if (n > 1) {
#>     for (i in 2:n) {
#>       pmf[y[i]] = 0;
#>     }
#>   }
#>   return(pmf);
#> }
#> 
#> // convolve a pdf and case vector
#> vector convolve(vector cases, vector rev_pmf) {
#>     int t = num_elements(cases);
#>     int max_pmf = num_elements(rev_pmf);
#>     vector[t] conv_cases = rep_vector(1e-5, t);
#>     for (s in 1:t) {
#>         conv_cases[s] += dot_product(cases[max(1, (s - max_pmf + 1)):s],
#>                                      tail(rev_pmf, min(max_pmf, s)));
#>     }
#>    return(conv_cases);
#>   }
#> 
#> 
#> // convolve latent infections to reported (but still unobserved) cases
#> vector convolve_to_report(vector infections,
#>                           real[] delay_mean,
#>                           real[] delay_sd,
#>                           int[] max_delay,
#>                           int seeding_time) {
#>   int t = num_elements(infections);
#>   vector[t - seeding_time] reports;
#>   vector[t] unobs_reports = infections;
#>   int delays = num_elements(delay_mean);
#>   if (delays) {
#>     for (s in 1:delays) {
#>       vector[max_delay[s]] pmf = rep_vector(1e-5, max_delay[s]);
#>       int delay_indexes[max_delay[s]];
#>       for (i in 1:max_delay[s]) {
#>         delay_indexes[i] = max_delay[s] - i;
#>       }
#>       pmf = pmf + discretised_lognormal_pmf(delay_indexes, delay_mean[s],
#>                                             delay_sd[s], max_delay[s]);
#>       unobs_reports = convolve(unobs_reports, pmf);
#>     }
#>     reports = unobs_reports[(seeding_time + 1):t];
#>   }else{
#>     reports = infections[(seeding_time + 1):t];
#>   }
#>   return(reports);
#> }
#> 
#> void delays_lp(real[] delay_mean, real[] delay_mean_mean, real[] delay_mean_sd,
#>                real[] delay_sd, real[] delay_sd_mean, real[] delay_sd_sd, int weight){
#>     int delays = num_elements(delay_mean);
#>     if (delays) {
#>       for (s in 1:delays) {
#>        target += normal_lpdf(delay_mean[s] | delay_mean_mean[s], delay_mean_sd[s]) * weight;
#>        target += normal_lpdf(delay_sd[s] | delay_sd_mean[s], delay_sd_sd[s]) * weight;
#>      }
#>   }
#> }
#> 
#> // eigenvalues for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> real lambda(real L, int m) {
#>   real lam;
#>   lam = ((m*pi())/(2*L))^2;
#>   return lam;
#> }
#> // eigenfunction for approximate hilbert space gp
#> // see here for details: https://arxiv.org/pdf/2004.11408.pdf
#> vector phi(real L, int m, vector x) {
#>   vector[rows(x)] fi;
#>   fi = 1/sqrt(L) * sin(m*pi()/(2*L) * (x+L));
#>   return fi;
#> }
#> // spectral density of the exponential quadratic kernal
#> real spd_se(real alpha, real rho, real w) {
#>   real S;
#>   S = (alpha^2) * sqrt(2*pi()) * rho * exp(-0.5*(rho^2)*(w^2));
#>   return S;
#> }
#> // spectral density of the Matern 3/2 kernel
#> real spd_matern(real alpha, real rho, real w) {
#>   real S;
#>   S = 4*alpha^2 * (sqrt(3)/rho)^3 * 1/((sqrt(3)/rho)^2 + w^2)^2;
#>   return S;
#> }
#> // setup gaussian process noise dimensions
#> int setup_noise(int ot_h, int t, int horizon, int estimate_r,
#>                 int stationary, int future_fixed, int fixed_from) {
#>   int noise_time = estimate_r > 0 ? (stationary > 0 ? ot_h : ot_h - 1) : t;
#>   int noise_terms =  future_fixed > 0 ? (noise_time - horizon + fixed_from) : noise_time;
#>   return(noise_terms);
#> }
#> // setup approximate gaussian process
#> matrix setup_gp(int M, real L, int dimension) {
#>   vector[dimension] time;
#>   matrix[dimension, M] PHI;
#>   real half_dim = dimension / 2.0;
#>   for (s in 1:dimension) {
#>     time[s] = (s - half_dim) / half_dim;
#>   }
#>   for (m in 1:M){
#>     PHI[,m] = phi(L, m, time);
#>   }
#>   return(PHI);
#> }
#> // update gaussian process using spectral densities
#> vector update_gp(matrix PHI, int M, real L, real alpha,
#>                  real rho, vector eta, int type) {
#>   vector[M] diagSPD;    // spectral density
#>   vector[M] SPD_eta;    // spectral density * noise
#>   int noise_terms = rows(PHI);
#>   vector[noise_terms] noise = rep_vector(1e-6, noise_terms);
#>   real unit_rho = rho / noise_terms;
#>   // GP in noise - spectral densities
#>   if (type == 0) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_se(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }else if (type == 1) {
#>     for(m in 1:M){
#>       diagSPD[m] =  sqrt(spd_matern(alpha, unit_rho, sqrt(lambda(L, m))));
#>     }
#>   }
#>   SPD_eta = diagSPD .* eta;
#>   noise = noise + PHI[,] * SPD_eta;
#>   return(noise);
#> }
#> // priors for gaussian process
#> void gaussian_process_lp(real rho, real alpha, vector eta,
#>                          real ls_meanlog, real ls_sdlog,
#>                          real ls_min, real ls_max, real alpha_sd) {
#>   if (ls_sdlog > 0) {
#>     rho ~ lognormal(ls_meanlog, ls_sdlog) T[ls_min, ls_max];
#>   } else {
#>     rho ~ inv_gamma(1.499007, 0.057277 * ls_max) T[ls_min, ls_max];
#>   }
#>   alpha ~ normal(0, alpha_sd);
#>   eta ~ std_normal();
#> }
#> 
#> // update a vector of Rts
#> vector update_Rt(vector input_R, real log_R, vector noise, int[] bps,
#>                  real[] bp_effects, int stationary) {
#>   // define control parameters
#>   int t = num_elements(input_R);
#>   int bp_n = num_elements(bp_effects);
#>   int bp_c = 0;
#>   int gp_n = num_elements(noise);
#>   // define result vectors
#>   vector[t] bp = rep_vector(0, t);
#>   vector[t] gp = rep_vector(0, t);
#>   vector[t] R;
#>   // initialise breakpoints
#>   if (bp_n) {
#>     for (s in 1:t) {
#>       if (bps[s]) {
#>         bp_c += bps[s];
#>         bp[s] = bp_effects[bp_c];
#>       }
#>     }
#>     bp = cumulative_sum(bp);
#>   }
#>   //initialise gaussian process
#>   if (gp_n) {
#>     if (stationary) {
#>       gp[1:gp_n] = noise;
#>       // fix future gp based on last estimated
#>       if (t > gp_n) {
#>         gp[(gp_n + 1):t] = rep_vector(noise[gp_n], t - gp_n);
#>       }
#>     }else{
#>       gp[2:(gp_n + 1)] = noise;
#>       gp = cumulative_sum(gp);
#>     }
#>   }
#>   // Calculate Rt
#>   R = rep_vector(log_R, t) + bp + gp;
#>   R = exp(R);
#>   return(R);
#> }
#> // Rt priors
#> void rt_lp(vector log_R, real[] initial_infections, real[] initial_growth,
#>            real[] bp_effects, real[] bp_sd, int bp_n, int seeding_time,
#>            real r_logmean, real r_logsd, real prior_infections,
#>            real prior_growth) {
#>   // prior on R
#>   log_R ~ normal(r_logmean, r_logsd);
#>   //breakpoint effects on Rt
#>   if (bp_n > 0) {
#>     bp_sd[1] ~ normal(0, 0.1) T[0,];
#>     bp_effects ~ normal(0, bp_sd[1]);
#>   }
#>   // initial infections
#>   initial_infections ~ normal(prior_infections, 0.2);
#>   if (seeding_time > 1) {
#>     initial_growth ~ normal(prior_growth, 0.2);
#>   }
#> }
#> 
#> // calculate infectiousness (weighted sum of the generation time and infections)
#> // for a single time point
#> real update_infectiousness(vector infections, vector gt_pmf,
#>                            int seeding_time, int max_gt, int index){
#>   // work out where to start the convolution of past infections with the
#>   // generation time distribution: (current_time - maximal generation time) if
#>   // that is >= 1, otherwise 1
#>   int inf_start = max(1, (index + seeding_time - max_gt));
#>   // work out where to end the convolution: (current_time - 1)
#>   int inf_end = (index + seeding_time - 1);
#>   // number of indices of the generation time to sum over (inf_end - inf_start + 1)
#>   int pmf_accessed = min(max_gt, index + seeding_time - 1);
#>   // calculate the elements of the convolution
#>   real new_inf = dot_product(infections[inf_start:inf_end], tail(gt_pmf, pmf_accessed));
#>   return(new_inf);
#> }
#> // generate infections by using Rt = Rt-1 * sum(reversed generation time pmf * infections)
#> vector generate_infections(vector oR, int uot,
#>                            real gt_mean, real gt_sd, int max_gt,
#>                            real[] initial_infections, real[] initial_growth,
#>                            int pop, int ht) {
#>   // time indices and storage
#>   int ot = num_elements(oR);
#>   int nht = ot - ht;
#>   int t = ot + uot;
#>   vector[ot] R = oR;
#>   real exp_adj_Rt;
#>   vector[t] infections = rep_vector(1e-5, t);
#>   vector[ot] cum_infections = rep_vector(0, ot);
#>   vector[ot] infectiousness = rep_vector(1e-5, ot);
#>   // generation time pmf
#>   vector[max_gt] gt_pmf = rep_vector(1e-5, max_gt);
#>   // revert indices (this is for later doing the convolution with recent cases)
#>   int gt_indexes[max_gt];
#>   for (i in 1:(max_gt)) {
#>     gt_indexes[i] = max_gt - i + 1;
#>   }
#>   if (gt_sd > 0) {
#>     // SD > 0: use discretised gamma
#>     gt_pmf = gt_pmf + discretised_gamma_pmf(gt_indexes, gt_mean, gt_sd, max_gt);
#>   } else {
#>     // SD == 0: use discretised delta
#>     gt_pmf = gt_pmf + discretised_delta_pmf(gt_indexes);
#>   }
#>   // Initialise infections using daily growth
#>   infections[1] = exp(initial_infections[1]);
#>   if (uot > 1) {
#>     for (s in 2:uot) {
#>       infections[s] = exp(initial_infections[1] + initial_growth[1] * (s - 1));
#>     }
#>   }
#>   // calculate cumulative infections
#>   if (pop) {
#>     cum_infections[1] = sum(infections[1:uot]);
#>   }
#>   // iteratively update infections
#>   for (s in 1:ot) {
#>     infectiousness[s] += update_infectiousness(infections, gt_pmf, uot, max_gt, s);
#>     if (pop && s > nht) {
#>       exp_adj_Rt = exp(-R[s] * infectiousness[s] / (pop - cum_infections[nht]));
#>       exp_adj_Rt = exp_adj_Rt > 1 ? 1 : exp_adj_Rt;
#>       infections[s + uot] = (pop - cum_infections[s]) * (1 - exp_adj_Rt);
#>     }else{
#>       infections[s + uot] += R[s] * infectiousness[s];
#>     }
#>     if (pop && s < ot) {
#>       cum_infections[s + 1] = cum_infections[s] + infections[s + uot];
#>     }
#>   }
#>   return(infections);
#> }
#> // backcalculate infections using mean shifted cases and non-parametric noise
#> vector deconvolve_infections(vector shifted_cases, vector noise, int fixed,
#>                              int prior) {
#>   int t = num_elements(shifted_cases);
#>   vector[t] infections = rep_vector(1e-5, t);
#>   if(!fixed) {
#>     vector[t] exp_noise = exp(noise);
#>     if (prior == 1) {
#>       infections = infections + shifted_cases .* exp_noise;
#>     }else if (prior == 0) {
#>      infections = infections + exp_noise;
#>     }else if (prior == 2) {
#>       infections[1] = infections[1] + shifted_cases[1] * exp_noise[1];
#>       for (i in 2:t) {
#>         infections[i] = infections[i - 1] * exp_noise[i];
#>       }
#>     }
#>   }else{
#>     infections = infections + shifted_cases;
#>   }
#>   return(infections);
#> }
#> // Update the log density for the generation time distribution mean and sd
#> void generation_time_lp(real[] gt_mean, real gt_mean_mean, real gt_mean_sd,
#>                         real[] gt_sd, real gt_sd_mean, real gt_sd_sd, int weight) {
#>   if (gt_mean_sd > 0) {
#>     target += normal_lpdf(gt_mean[1] | gt_mean_mean, gt_mean_sd) * weight;
#>   }
#>   if (gt_sd_sd > 0) {
#>     target += normal_lpdf(gt_sd[1] | gt_sd_mean, gt_sd_sd) * weight;
#>   }
#> }
#> 
#> 
#> // apply day of the week effect
#> vector day_of_week_effect(vector reports, int[] day_of_week, vector effect) {
#>   int t = num_elements(reports);
#>   int wl = num_elements(effect);
#>   // scale day of week effect
#>   vector[wl] scaled_effect = wl * effect;
#>   vector[t] scaled_reports;
#>   for (s in 1:t) {
#>     // add reporting effects (adjust for simplex scale)
#>     scaled_reports[s] = reports[s] * scaled_effect[day_of_week[s]];
#>    }
#>   return(scaled_reports);
#> }
#> // Scale observations by fraction reported and update log density of
#> // fraction reported
#> vector scale_obs(vector reports, real frac_obs) {
#>   int t = num_elements(reports);
#>   vector[t] scaled_reports;
#>   scaled_reports = reports * frac_obs;
#>   return(scaled_reports);
#> }
#> // Calculate a truncation CMF
#> vector truncation_cmf(real trunc_mean, real trunc_sd, int trunc_max) {
#>     int  trunc_indexes[trunc_max];
#>     vector[trunc_max] cmf;
#>     for (i in 1:(trunc_max)) {
#>       trunc_indexes[i] = i - 1;
#>     }
#>     cmf = discretised_lognormal_pmf(trunc_indexes, trunc_mean, trunc_sd, trunc_max);
#>     cmf[1] = cmf[1] + 1e-8;
#>     cmf = cumulative_sum(cmf);
#>     cmf = reverse_mf(cmf, trunc_max);
#>     return(cmf);
#> }
#> // Truncate observed data by some truncation distribution
#> vector truncate(vector reports, real[] truncation_mean, real[] truncation_sd,
#>                 int[] truncation_max, int reconstruct) {
#>   int t = num_elements(reports);
#>   int truncation = num_elements(truncation_mean);
#>   vector[t] trunc_reports = reports;
#>   if (truncation) {
#>     // Calculate cmf of truncation delay
#>     int trunc_max = truncation_max[1] > t ? t : truncation_max[1];
#>     int  trunc_indexes[trunc_max];
#>     vector[trunc_max] cmf;
#>     int first_t = t - trunc_max + 1;
#>     cmf = truncation_cmf(truncation_mean[1], truncation_sd[1], trunc_max);
#>     // Apply cdf of truncation delay to truncation max last entries in reports
#>     if (reconstruct) {
#>       trunc_reports[first_t:t] = trunc_reports[first_t:t] ./ cmf;
#>     }else{
#>       trunc_reports[first_t:t] = trunc_reports[first_t:t] .* cmf;
#>     }
#>   }
#>   return(trunc_reports);
#> }
#> // Truncation distribution priors
#> void truncation_lp(real[] truncation_mean, real[] truncation_sd,
#>                    real[] trunc_mean_mean, real[] trunc_mean_sd,
#>                    real[] trunc_sd_mean, real[] trunc_sd_sd) {
#>   int truncation = num_elements(truncation_mean);
#>   if (truncation) {
#>     truncation_mean ~ normal(trunc_mean_mean, trunc_mean_sd);
#>     truncation_sd ~ normal(trunc_sd_mean, trunc_sd_sd);
#>   }
#> }
#> // update log density for reported cases
#> void report_lp(int[] cases, vector reports,
#>                real[] rep_phi, real phi_mean, real phi_sd,
#>                int model_type, real weight) {
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>     // the reciprocal overdispersion parameter (phi)
#>     rep_phi[model_type] ~ normal(phi_mean, phi_sd) T[0,];
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>     // defer to poisson if phi is large, to avoid overflow or
#>     // if poisson specified
#>   }
#>   if (sqrt_phi > 1e4) {
#>     if (weight == 1) {
#>       cases ~ poisson(reports);
#>     }else{
#>       target += poisson_lpmf(cases | reports) * weight;
#>     }
#>   } else {
#>     if (weight == 1) {
#>       cases ~ neg_binomial_2(reports, sqrt_phi);
#>     }else{
#>       target += neg_binomial_2_lpmf(cases | reports, sqrt_phi);
#>     }
#>   }
#>   
#> }
#> // update log likelihood (as above but not vectorised and returning log likelihood)
#> vector report_log_lik(int[] cases, vector reports,
#>                       real[] rep_phi, int model_type, real weight) {
#>   int t = num_elements(reports);
#>   vector[t] log_lik;
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>   // the reciprocal overdispersion parameter (phi)
#>   sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>   }
#> 
#>   // defer to poisson if phi is large, to avoid overflow
#>   if (sqrt_phi > 1e4) {
#>     for (i in 1:t) {
#>       log_lik[i] = poisson_lpmf(cases[i] | reports[i]) * weight;
#>     }
#>   } else {
#>     for (i in 1:t) {
#>       log_lik[i] = neg_binomial_2_lpmf(cases[i] | reports[i], sqrt_phi) * weight;
#>     }
#>     }
#>   return(log_lik);
#> }
#> // sample reported cases from the observation model
#> int[] report_rng(vector reports, real[] rep_phi, int model_type) {
#>   int t = num_elements(reports);
#>   int sampled_reports[t];
#>   real sqrt_phi = 1e5;
#>   if (model_type) {
#>     sqrt_phi = 1 / sqrt(rep_phi[model_type]);
#>   }
#>     
#>   for (s in 1:t) {
#>     // defer to poisson if phi is large, to avoid overflow
#>     if (sqrt_phi > 1e4) {
#>       sampled_reports[s] = poisson_rng(reports[s] > 1e8 ? 1e8 : reports[s]);
#>     } else {
#>       sampled_reports[s] = neg_binomial_2_rng(reports[s] > 1e8 ? 1e8 : reports[s], sqrt_phi);
#>     }
#>   }
#>   return(sampled_reports);
#> }
#> 
#> // calculate Rt directly from inferred infections
#> vector calculate_Rt(vector infections, int seeding_time,
#>                     real gt_mean, real gt_sd, int max_gt,
#>                     int smooth) {
#>   vector[max_gt] gt_pmf;
#>   int gt_indexes[max_gt];
#>   int t = num_elements(infections);
#>   int ot = t - seeding_time;
#>   vector[ot] R;
#>   vector[ot] sR;
#>   vector[ot] infectiousness = rep_vector(1e-5, ot);
#>   // calculate PMF of the generation time
#>   for (i in 1:(max_gt)) {
#>     gt_indexes[i] = max_gt - i + 1;
#>   }
#>   if (gt_sd > 0) {
#>     gt_pmf = discretised_gamma_pmf(gt_indexes, gt_mean, gt_sd, max_gt);
#>   } else {
#>     gt_pmf = discretised_delta_pmf(gt_indexes);
#>   }
#>   // calculate Rt using Cori et al. approach
#>   for (s in 1:ot) {
#>     infectiousness[s] += update_infectiousness(infections, gt_pmf, seeding_time, max_gt, s);
#>     R[s] = infections[s + seeding_time] / infectiousness[s];
#>   }
#>   if (smooth) {
#>     for (s in 1:ot) {
#>       real window = 0;
#>       sR[s] = 0;
#>       for (i in max(1, s - smooth):min(ot, s + smooth)) {
#>         sR[s] += R[i];
#>         window += 1;
#>       }
#>       sR[s] = sR[s] / window;
#>     }
#>   }else{
#>     sR = R;
#>   }
#>   return(sR);
#> }
#> // Convert an estimate of Rt to growth
#> real[] R_to_growth(vector R, real gt_mean, real gt_sd) {
#>   int t = num_elements(R);
#>   real r[t];
#>   if (gt_sd > 0) {
#>     real k = pow(gt_sd / gt_mean, 2);
#>     for (s in 1:t) {
#>       r[s] = (pow(R[s], k) - 1) / (k * gt_mean);
#>     }
#>   } else {
#>     // limit as gt_sd -> 0
#>     for (s in 1:t) {
#>       r[s] = log(R[s]) / gt_mean;
#>     }
#>   }
#>   return(r);
#> }
#> }
#> 
#> 
#> data {
#> 
#>   int t;                                            // unobserved time
#>   int seeding_time;                                 // time period used for seeding and not observed
#>   int horizon;                                      // forecast horizon
#>   int future_time;                                  // time in future for Rt
#>   int<lower = 0> cases[t - horizon - seeding_time]; // observed cases
#>   vector<lower = 0>[t] shifted_cases;               // prior infections (for backcalculation)
#> 
#>   int delays;                  // no. of delay distributions
#>   real delay_mean_sd[delays];  // prior sd of mean incubation period
#>   real delay_mean_mean[delays];// prior mean of mean incubation period
#>   real delay_sd_mean[delays];  // prior sd of sd of incubation period
#>   real delay_sd_sd[delays];    // prior sd of sd of incubation period
#>   int max_delay[delays];       // maximum incubation period
#> 
#>   real L;				                     // boundary value for infections gp
#>   int<lower=1> M;			               // basis functions for infections gp
#>   real ls_meanlog;                   // meanlog for gp lengthscale prior
#>   real ls_sdlog;                     // sdlog for gp lengthscale prior
#>   real<lower=0> ls_min;              // Lower bound for the lengthscale
#>   real<lower=0> ls_max;              // Upper bound for the lengthscale
#>   real alpha_sd;                     // standard deviation of the alpha gp kernal parameter
#>   int gp_type;                       // type of gp, 0 = squared exponential, 1 = 3/2 matern
#>   int stationary;                    // is underlying gaussian process first or second order
#>   int fixed;                         //  should a gaussian process be used
#> 
#>   real gt_mean_sd;                   // prior sd of mean generation time
#>   real gt_mean_mean;                 // prior mean of mean generation time
#>   real gt_sd_mean;                   // prior mean of sd of generation time
#>   real gt_sd_sd;                     // prior sd of sd of generation time
#>   int max_gt;                        // maximum generation time
#> 
#>   int estimate_r;                    // should the reproduction no be estimated (1 = yes)
#>   real prior_infections;             // prior for initial infections
#>   real prior_growth;                 // prior on initial growth rate
#>   real <lower = 0> r_mean;           // prior mean of reproduction number
#>   real <lower = 0> r_sd;             // prior standard deviation of reproduction number
#>   int bp_n;                          // no of breakpoints (0 = no breakpoints)
#>   int breakpoints[t - seeding_time]; // when do breakpoints occur
#>   int future_fixed;                  // is underlying future Rt assumed to be fixed
#>   int fixed_from;                    // Reference date for when Rt estimation should be fixed
#>   int pop;                           // Initial susceptible population
#> 
#>   int backcalc_prior;                // Prior type to use for backcalculation
#>   int rt_half_window;                // Half the moving average window used when calculating Rt
#> 
#>   int day_of_week[t - seeding_time]; // day of the week indicator (1 - 7)
#>   int model_type;                    // type of model: 0 = poisson otherwise negative binomial
#>   real phi_mean;                      // Mean and sd of the normal prior for the
#>   real phi_sd;                        // reporting process
#>   int week_effect;                   // length of week effect
#>   int truncation;                    // 1/0 indicating if truncation should be adjusted for
#>   real trunc_mean_mean[truncation];  // truncation mean of mean
#>   real trunc_mean_sd[truncation];    // truncation sd of mean
#>   real trunc_sd_mean[truncation];    // truncation mean of sd
#>   real trunc_sd_sd[truncation];      // truncation sd of sd
#>   int max_truncation[truncation];    // maximum truncation supported
#>   int obs_scale;                     // logical controlling scaling of observations
#>   real obs_scale_mean;               // mean scaling factor for observations
#>   real obs_scale_sd;                 // standard deviation of observation scaling
#>   real obs_weight;                   // weight given to observation in log density
#>   int likelihood;                    // Should the likelihood be included in the model
#>   int return_likelihood; // Should the likehood be returned by the model
#> }
#> 
#> transformed data{
#>   // observations
#>   int ot = t - seeding_time - horizon;  // observed time
#>   int ot_h = ot + horizon;  // observed time + forecast horizon
#>   // gaussian process
#>   int noise_terms = setup_noise(ot_h, t, horizon, estimate_r, stationary, future_fixed, fixed_from);
#>   matrix[noise_terms, M] PHI = setup_gp(M, L, noise_terms);  // basis function
#>   // Rt
#>   real r_logmean = log(r_mean^2 / sqrt(r_sd^2 + r_mean^2));
#>   real r_logsd = sqrt(log(1 + (r_sd^2 / r_mean^2)));
#> }
#> 
#> parameters{
#>   // gaussian process
#>   real<lower = ls_min,upper=ls_max> rho[fixed ? 0 : 1];  // length scale of noise GP
#>   real<lower = 0> alpha[fixed ? 0 : 1];    // scale of of noise GP
#>   vector[fixed ? 0 : M] eta;               // unconstrained noise
#>   // Rt
#>   vector[estimate_r] log_R;                // baseline reproduction number estimate (log)
#>   real initial_infections[estimate_r] ;    // seed infections
#>   real initial_growth[estimate_r && seeding_time > 1 ? 1 : 0]; // seed growth rate
#>   real<lower = 0, upper = max_gt> gt_mean[estimate_r && gt_mean_sd > 0]; // mean of generation time (if uncertain)
#>   real<lower = 0> gt_sd[estimate_r && gt_sd_sd > 0];       // sd of generation time (if uncertain)
#>   real<lower = 0> bp_sd[bp_n > 0 ? 1 : 0]; // standard deviation of breakpoint effect
#>   real bp_effects[bp_n];                   // Rt breakpoint effects
#>   // observation model
#>   real delay_mean[delays];                 // mean of delays
#>   real<lower = 0> delay_sd[delays];        // sd of delays
#>   simplex[week_effect] day_of_week_simplex;// day of week reporting effect
#>   real<lower = 0> frac_obs[obs_scale];     // fraction of cases that are ultimately observed
#>   real truncation_mean[truncation];        // mean of truncation
#>   real<lower = 0> truncation_sd[truncation]; // sd of truncation
#>   real<lower = 0> rep_phi[model_type];     // overdispersion of the reporting process
#> }
#> 
#> transformed parameters {
#>   vector[fixed ? 0 : noise_terms] noise;                    // noise  generated by the gaussian process
#>   vector<lower = 0, upper = 10 * r_mean>[estimate_r > 0 ? ot_h : 0] R; // reproduction number
#>   vector[t] infections;                                     // latent infections
#>   vector[ot_h] reports;                                     // estimated reported cases
#>   vector[ot] obs_reports;                                   // observed estimated reported cases
#>   // GP in noise - spectral densities
#>   if (!fixed) {
#>     noise = update_gp(PHI, M, L, alpha[1], rho[1], eta, gp_type);
#>   }
#>   // Estimate latent infections
#>   if (estimate_r) {
#>     // via Rt
#>     real set_gt_mean = (gt_mean_sd > 0 ? gt_mean[1] : gt_mean_mean);
#>     real set_gt_sd = (gt_sd_sd > 0 ? gt_sd[1] : gt_sd_mean);
#>     R = update_Rt(R, log_R[estimate_r], noise, breakpoints, bp_effects, stationary);
#>     infections = generate_infections(R, seeding_time, set_gt_mean, set_gt_sd, max_gt,
#>                                      initial_infections, initial_growth,
#>                                      pop, future_time);
#>   } else {
#>     // via deconvolution
#>     infections = deconvolve_infections(shifted_cases, noise, fixed, backcalc_prior);
#>   }
#>   // convolve from latent infections to mean of observations
#>   reports = convolve_to_report(infections, delay_mean, delay_sd, max_delay, seeding_time);
#>  // weekly reporting effect
#>  if (week_effect > 1) {
#>    reports = day_of_week_effect(reports, day_of_week, day_of_week_simplex);
#>   }
#>   // scaling of reported cases by fraction observed
#>  if (obs_scale) {
#>    reports = scale_obs(reports, frac_obs[1]);
#>  }
#>  // truncate near time cases to observed reports
#>  obs_reports = truncate(reports[1:ot], truncation_mean, truncation_sd,
#>                         max_truncation, 0);
#> }
#> 
#> model {
#>   // priors for noise GP
#>   if (!fixed) {
#>     gaussian_process_lp(rho[1], alpha[1], eta, ls_meanlog,
#>                         ls_sdlog, ls_min, ls_max, alpha_sd);
#>   }
#>   // penalised priors for delay distributions
#>   delays_lp(delay_mean, delay_mean_mean, delay_mean_sd, delay_sd, delay_sd_mean, delay_sd_sd, t);
#>   // priors for truncation
#>   truncation_lp(truncation_mean, truncation_sd, trunc_mean_mean, trunc_mean_sd,
#>                 trunc_sd_mean, trunc_sd_sd);
#>   if (estimate_r) {
#>     // priors on Rt
#>     rt_lp(log_R, initial_infections, initial_growth, bp_effects, bp_sd, bp_n, seeding_time,
#>           r_logmean, r_logsd, prior_infections, prior_growth);
#>     // penalised_prior on generation interval
#>     generation_time_lp(gt_mean, gt_mean_mean, gt_mean_sd, gt_sd, gt_sd_mean, gt_sd_sd, ot);
#>   }
#>   // prior observation scaling
#>   if (obs_scale) {
#>     frac_obs[1] ~ normal(obs_scale_mean, obs_scale_sd) T[0,];
#>   }
#>   // observed reports from mean of reports (update likelihood)
#>   if (likelihood) {
#>     report_lp(cases, obs_reports, rep_phi, phi_mean, phi_sd,
#>               model_type, obs_weight);
#>   }
#> }
#> 
#> generated quantities {
#>   int imputed_reports[ot_h];
#>   vector[estimate_r > 0 ? 0: ot_h] gen_R;
#>   real r[ot_h];
#>   vector[return_likelihood > 1 ? ot : 0] log_lik;
#>   if (estimate_r){
#>     // estimate growth from estimated Rt
#>     real set_gt_mean = (gt_mean_sd > 0 ? gt_mean[1] : gt_mean_mean);
#>     real set_gt_sd = (gt_sd_sd > 0 ? gt_sd[1] : gt_sd_mean);
#>     r = R_to_growth(R, set_gt_mean, set_gt_sd);
#>   }else{
#>     // sample generation time
#>     real gt_mean_sample = (gt_mean_sd > 0 ? normal_rng(gt_mean_mean, gt_mean_sd) : gt_mean_mean);
#>     real gt_sd_sample = (gt_sd_sd > 0 ? normal_rng(gt_sd_mean, gt_sd_sd) : gt_sd_mean);
#>     // calculate Rt using infections and generation time
#>     gen_R = calculate_Rt(infections, seeding_time, gt_mean_sample, gt_sd_sample,
#>                          max_gt, rt_half_window);
#>     // estimate growth from calculated Rt
#>     r = R_to_growth(gen_R, gt_mean_sample, gt_sd_sample);
#>   }
#>   // simulate reported cases
#>   imputed_reports = report_rng(reports, rep_phi, model_type);
#>   // log likelihood of model
#>   if (return_likelihood) {
#>     log_lik = report_log_lik(cases, obs_reports, rep_phi, model_type,
#>                              obs_weight);
#>   }
#> } 
#> 
#> $method
#> [1] "sampling"
#> 
#> $cores
#> [1] 1
#> 
#> $warmup
#> [1] 1000
#> 
#> $chains
#> [1] 4
#> 
#> $save_warmup
#> [1] FALSE
#> 
#> $seed
#> [1] 9246620
#> 
#> $future
#> [1] FALSE
#> 
#> $max_execution_time
#> [1] Inf
#> 
#> $control
#> $control$adapt_delta
#> [1] 0.95
#> 
#> $control$max_treedepth
#> [1] 15
#> 
#> 
#> $iter
#> [1] 1500
#>