This is a sample page to test out code highlighting.
R Markdown
This is an R
Markdown document. Markdown is a simple formatting syntax
for authoring HTML, PDF, and MS Word documents. For more details on
using R Markdown see http://rmarkdown.rstudio.com.
When you click the Knit button a document will be generated that includes both content as well as the output of any embedded R code chunks within the document. You can embed an R code chunk like this:
blogdown::shortcode('tweet', '852205086956818432')
Anyone know of an R package for interfacing with Alexa Skills? @thosjleeper @xieyihui @drob @JennyBryan @HoloMarkeD ?
— Jeff Leek (@jtleek) April 12, 2017
Stan
/* Variable naming:
obs = observed
cen = (right) censored
N = number of samples
M = number of covariates
bg = established risk (or protective) factors
biom = candidate biomarkers (candidate risk factors)
tau = scale parameter
*/
// Tomi Peltola, tomi.peltola@aalto.fi
functions {
vector sqrt_vec(vector x) {
vector[dims(x)[1]] res;
for (m in 1:dims(x)[1]){
res[m] <- sqrt(x[m]);
}
return res;
}
vector hs_prior_lp(real r1_global, real r2_global,
vector r1_local, vector r2_local, real nu) {
r1_global ~ normal(0.0, 1.0);
r2_global ~ inv_gamma(0.5, 0.5);
r1_local ~ normal(0.0, 1.0);
r2_local ~ inv_gamma(0.5 * nu, 0.5 * nu);
return (r1_global * sqrt(r2_global)) * r1_local .* sqrt_vec(r2_local);
}
vector bg_prior_lp(real r_global, vector r_local) {
r_global ~ normal(0.0, 10.0);
r_local ~ inv_chi_square(1.0);
return r_global * sqrt_vec(r_local);
}
}
data {
int<lower=0> Nobs;
int<lower=0> Ncen;
int<lower=0> M_bg;
int<lower=0> M_biom;
vector[Nobs] yobs;
vector[Ncen] ycen;
matrix[Nobs, M_bg] Xobs_bg;
matrix[Ncen, M_bg] Xcen_bg;
matrix[Nobs, M_biom] Xobs_biom;
matrix[Ncen, M_biom] Xcen_biom;
real<lower=1> nu;
}
transformed data {
real<lower=0> tau_mu;
real<lower=0> tau_al;
tau_mu <- 10.0;
tau_al <- 10.0;
}
parameters {
real<lower=0> tau_s_bg_raw;
vector<lower=0>[M_bg] tau_bg_raw;
real<lower=0> tau_s1_biom_raw;
real<lower=0> tau_s2_biom_raw;
vector<lower=0>[M_biom] tau1_biom_raw;
vector<lower=0>[M_biom] tau2_biom_raw;
real alpha_raw;
vector[M_bg] beta_bg_raw;
vector[M_biom] beta_biom_raw;
real mu;
}
transformed parameters {
vector[M_biom] beta_biom;
vector[M_bg] beta_bg;
real alpha;
beta_biom <- hs_prior_lp(tau_s1_biom_raw,
tau_s2_biom_raw, tau1_biom_raw, tau2_biom_raw, nu) .* beta_biom_raw;
beta_bg <- bg_prior_lp(tau_s_bg_raw, tau_bg_raw) .* beta_bg_raw;
alpha <- exp(tau_al * alpha_raw);
}
model {
yobs ~ weibull(alpha, exp(-(mu + Xobs_bg *
beta_bg + Xobs_biom * beta_biom)/alpha));
increment_log_prob(weibull_ccdf_log(ycen, alpha,
exp(-(mu + Xcen_bg * beta_bg + Xcen_biom * beta_biom)/alpha)));
beta_biom_raw ~ normal(0.0, 1.0);
beta_bg_raw ~ normal(0.0, 1.0);
alpha_raw ~ normal(0.0, 1.0);
mu ~ normal(0.0, tau_mu);
}
data {
int<lower=1> N;
int<lower=1> J;
int<lower=1> K;
int condition[N];
int idx[N];
vector[N] prob;
vector[N] prompt;
vector[3] a;
}
parameters {
simplex[3] phi;
real<lower=0> mu[J];
real<lower=0> sigma[J];
matrix[K,J] mu_idx;
matrix<lower=0>[K,J] sigma_idx;
real<lower=0> sigma_mu[J];
real<lower=0> sigma_sigma[J];
}
transformed parameters {
vector[N] y;
vector[N] alpha;
vector[N] beta;
vector[K] e_mu;
vector[K] e_sigma;
for (n in 1:N) {
y[n] = Phi((prompt[n] - mu_idx[idx[n], condition[n]]) /
sigma_idx[idx[n], condition[n]]);
alpha[n] = sigma_idx[idx[n], condition[n]] * y[n];
beta[n] = sigma_idx[idx[n], condition[n]] * (1.0 - y[n]);
}
for (k in 1:K) {
e_mu[k] = mu_idx[k,1] - mu[1];
e_sigma[k] = sigma_idx[k,1] - sigma[1];
}
}
model {
phi ~ dirichlet(a);
mu ~ normal(0,1);
sigma ~ normal(0,1);
sigma_mu ~ normal(0,1);
sigma_sigma ~ normal(0,1);
for (k in 1:K) {
for (j in 1:J) {
mu_idx[k,j] ~ normal(mu[j], sigma_mu[j]);
sigma_idx[k,j] ~ normal(sigma[j], sigma_sigma[j]);
}
}
for (n in 1:N) {
if (prob[n] == 0) {
target += bernoulli_lpmf(1 | phi[1]);
}
else if (prob[n] == 1) {
target += bernoulli_lpmf(1 | phi[2]);
}
else {
target += bernoulli_lpmf(1 | phi[3]) +
beta_lpdf(prob[n] | alpha[n], beta[n]);
}
}
}
SAS
data _null_;
put 'Hello, world!';
run;
DATA comp1;
SET idre.sales;
IF Job_Title='Sales Rep. IV' THEN Bonus=1000;
IF Job_Title='Sales Manager' THEN Bonus=1500;
IF Job_Title='Senior Sales Manager' THEN Bonus=2000;
IF Job_Title='Chief Sales Officer' THEN Bonus=2500;
RUN;
Python
from __future__ import print_function
import numpy as np
import statsmodels.api as sm
import matplotlib.pyplot as plt
from statsmodels.sandbox.regression.predstd import wls_prediction_std
np.random.seed(1031)
nsample = 5000
sig = 0.5
x = np.linspace(0, 20, nsample)
X = np.column_stack((x, np.sin(x), (x-5)**2, np.ones(nsample)))
beta = [0.5, 0.5, -0.02, 5.]
y_true = np.dot(X, beta)
y = y_true + sig * np.random.normal(size=nsample)
res = sm.OLS(y, X).fit()
print(res.summary())
print('Parameters: ', res.params)
print('Standard errors: ', res.bse)
prstd, iv_l, iv_u = wls_prediction_std(res)
fig, ax = plt.subplots(figsize=(8, 6))
ax.plot(x, y, 'o', label="data", lw=2, color='gray')
ax.plot(x, y_true, 'b-', label="True")
ax.plot(x, res.fittedvalues, 'r--.', label="OLS")
ax.plot(x, iv_u, 'r--')
ax.plot(x, iv_l, 'r--')
ax.legend(loc='best');
plt.show()
R
fit <- lm(mpg ~ cyl + disp, mtcars)
# show the theoretical model
equatiomatic::extract_eq(fit)
Stata
sysuse auto2, clear
mfp: glm price mpg
twoway (fpfitci price mpg, estcmd(glm) fcolor(dkorange%20) alcolor(%40)) || scatter price mpg, mcolor(dkorange) scale(0.75)
graph export "mfp.png", replace
version
clear
set obs 100
/**
Generate variables from a normal distribution like before
*/
generate x = rnormal(0, 1)
generate y = rnormal(0, 1)
/**
We set up our model here
*/
bayesmh y x, likelihood(normal({var})) prior({var}, normal(0, 10)) ///
prior({y:}, normal(0, 10)) rseed(1031) saving(coutput_pred, replace) mcmcsize(1000)
/**
We use the bayespredict command to make predictions from the model
*/
bayespredict (mean:@mean({_resid})) (var:@variance({_resid})), ///
rseed(1031) saving(coutput_pred, replace)
/**
Then we calculate the posterior predictive P-values
*/
bayesstats ppvalues {mean} using coutput_pred
R
library(EmpiricalCalibration)
data(sccs)
drugOfInterest <- sccs[sccs$groundTruth == 1, ]
drugOfInterest
exp(drugOfInterest$logRr)
computeTraditionalP(drugOfInterest$logRr, drugOfInterest$seLogRr)
data(sccs)
negatives <- sccs[sccs$groundTruth == 0, ]
head(negatives)
plotForest(negatives$logRr, negatives$seLogRr, negatives$drugName)
null <- fitNull(negatives$logRr, negatives$seLogRr)
null
plotCalibration(negatives$logRr,negatives$seLogRr)
plotCalibrationEffect(negatives$logRr,negatives$seLogRr, null = null)
p <- calibrateP(null, drugOfInterest$logRr, drugOfInterest$seLogRr)
p
plotCalibrationEffect(negatives$logRr,
negatives$seLogRr,
drugOfInterest$logRr,
drugOfInterest$seLogRr,
null)
null <- fitMcmcNull(negatives$logRr, negatives$seLogRr)
null
plotMcmcTrace(null)
p <- calibrateP(null, drugOfInterest$logRr, drugOfInterest$seLogRr)
p
plotCalibrationEffect(negatives$logRr,
negatives$seLogRr,
drugOfInterest$logRr,
drugOfInterest$seLogRr,
null,
showCis = TRUE)
citation("EmpiricalCalibration")
Bash
brew upgrade
Updating Homebrew...
==> Auto-updated Homebrew!
Updated 2 taps (homebrew/core and homebrew/cask).
==> Updated Formulae
Updated 5 formulae.
==> Updated Casks
Updated 3 casks.
R version 4.0.4 (2021-02-15)
Platform: x86_64-apple-darwin17.0 (64-bit)
Running under: macOS Big Sur 10.16
Matrix products: default
BLAS: /Library/Frameworks/R.framework/Versions/4.0/Resources/lib/libRblas.dylib
LAPACK: /Library/Frameworks/R.framework/Versions/4.0/Resources/lib/libRlapack.dylib
Random number generation:
RNG: Mersenne-Twister
Normal: Inversion
Sample: Rejection
locale:
[1] en_US.UTF-8/en_US.UTF-8/en_US.UTF-8/C/en_US.UTF-8/en_US.UTF-8
attached base packages:
[1] stats graphics grDevices utils datasets methods base
other attached packages:
[1] rstan_2.21.2 StanHeaders_2.21.0-7 Statamarkdown_0.5.2 kableExtra_1.3.4 ggplot2_3.3.3 concurve_2.7.7
loaded via a namespace (and not attached):
[1] colorspace_2.0-0 ggsignif_0.6.0 ellipsis_0.3.1 rio_0.5.16 flextable_0.6.3
[6] base64enc_0.1-3 rstudioapi_0.13 ggpubr_0.4.0 fansi_0.4.2 xml2_1.3.2
[11] codetools_0.2-18 splines_4.0.4 knitr_1.31 jsonlite_1.7.2 broom_0.7.5
[16] km.ci_0.5-2 bcaboot_0.2-1 compiler_4.0.4 httr_1.4.2 backports_1.2.1
[21] assertthat_0.2.1 Matrix_1.3-2 cli_2.3.0 htmltools_0.5.1.1 prettyunits_1.1.1
[26] tools_4.0.4 gtable_0.3.0 glue_1.4.2 dplyr_1.0.4 V8_3.4.0
[31] Rcpp_1.0.6 carData_3.0-4 cellranger_1.1.0 jquerylib_0.1.3 vctrs_0.3.6
[36] debugme_1.1.0 svglite_1.2.3.2 nlme_3.1-152 blogdown_1.1 xfun_0.21
[41] stringr_1.4.0 ps_1.5.0 openxlsx_4.2.3 rvest_0.3.6 lifecycle_1.0.0
[46] sys_3.4 rstatix_0.7.0 zoo_1.8-8 scales_1.1.1 hms_1.0.0
[51] credentials_1.3.0 ProfileLikelihood_1.1 parallel_4.0.4 inline_0.3.17 metafor_2.4-0
[56] yaml_2.2.1 curl_4.3 reticulate_1.18 gridExtra_2.3 KMsurv_0.1-5
[61] gdtools_0.2.3 loo_2.4.1 sass_0.3.1 stringi_1.5.3 boot_1.3-27
[66] pkgbuild_1.2.0 zip_2.1.1 matrixStats_0.58.0 rlang_0.4.10 pkgconfig_2.0.3
[71] systemfonts_1.0.1 evaluate_0.14 lattice_0.20-41 purrr_0.3.4 tidyselect_1.1.0
[76] processx_3.4.5 magrittr_2.0.1 bookdown_0.21 R6_2.5.0 generics_0.1.0
[81] DBI_1.1.1 pillar_1.5.0 haven_2.3.1 foreign_0.8-81 withr_2.4.1
[86] survival_3.2-7 abind_1.4-5 tibble_3.0.6 crayon_1.4.1 car_3.0-10
[91] survMisc_0.5.5 uuid_0.1-4 utf8_1.1.4 rmarkdown_2.7 officer_0.3.16
[96] grid_4.0.4 readxl_1.3.1 data.table_1.13.6 callr_3.5.1 forcats_0.5.1
[101] digest_0.6.27 webshot_0.5.2 pbmcapply_1.5.0 xtable_1.8-4 tidyr_1.1.2
[106] openssl_1.4.3 RcppParallel_5.0.2 stats4_4.0.4 munsell_0.5.0 viridisLite_0.3.0
[111] survminer_0.4.8 bslib_0.2.4 askpass_1.1