Examples
Practical examples demonstrating algebraic.mle capabilities.
Example 1: Fitting Common Distributions
Compare Weibull and Normal fits to simulated data.
library(algebraic.mle)
set.seed(142334)
# Simulate from Weibull + noise mixture
n <- 100
shape <- 2
scale <- 10
noise_sd <- 0.1
x <- rweibull(n, shape = shape, scale = scale) +
rnorm(n, mean = 0, sd = noise_sd)
Fitting a Normal Distribution
fit_normal <- function(data) {
loglik <- function(theta) {
sum(dnorm(data, mean = theta[1], sd = sqrt(theta[2]), log = TRUE))
}
# Closed-form MLEs for normal
mu_hat <- mean(data)
sigma2_hat <- mean((data - mu_hat)^2)
# Compute Hessian for variance estimates
H <- -numDeriv::hessian(loglik, c(mu_hat, sigma2_hat))
mle(theta.hat = c(mu = mu_hat, sigma2 = sigma2_hat),
loglike = loglik(c(mu_hat, sigma2_hat)),
sigma = MASS::ginv(H),
nobs = length(data))
}
normal_fit <- fit_normal(x)
summary(normal_fit)
Fitting a Weibull Distribution
fit_weibull <- function(data) {
loglik <- function(theta) {
sum(dweibull(data, shape = theta[1], scale = theta[2], log = TRUE))
}
sol <- optim(
par = c(1, 1),
fn = loglik,
hessian = TRUE,
method = "L-BFGS-B",
lower = c(0.01, 0.01),
control = list(fnscale = -1)
)
mle(theta.hat = sol$par,
loglike = sol$value,
sigma = MASS::ginv(-sol$hessian),
nobs = length(data))
}
weibull_fit <- fit_weibull(x)
summary(weibull_fit)
Compare Models
cat("Normal AIC:", aic(normal_fit), "\n")
cat("Weibull AIC:", aic(weibull_fit), "\n")
# Weibull should win since data was generated from Weibull
Example 2: Working with Confidence Intervals
# Fit a model
fit <- weibull_fit
# Default 95% CI
confint(fit)
# 90% CI
confint(fit, level = 0.90)
# 99% CI
confint(fit, level = 0.99)
# Extract just the shape parameter CI
shape_ci <- confint(fit)["shape", ]
cat("Shape 95% CI:", shape_ci, "\n")
Example 3: Extracting Marginal Distributions
When you have a multivariate MLE, extract univariate marginals:
# Get marginal for shape parameter
shape_marginal <- marginal(weibull_fit, "shape")
# Work with the marginal
params(shape_marginal)
confint(shape_marginal)
se(shape_marginal)
Example 4: Combining MLEs (Weighted Estimator)
Combine estimates from multiple samples or studies:
# Simulate two independent samples
set.seed(1)
x1 <- rweibull(50, shape = 2, scale = 10)
x2 <- rweibull(100, shape = 2, scale = 10)
# Fit each
fit1 <- fit_weibull(x1)
fit2 <- fit_weibull(x2)
# Combine using inverse-variance weighting
# (simplified example - full implementation would use mle_weighted)
theta1 <- params(fit1)
theta2 <- params(fit2)
var1 <- diag(vcov(fit1))
var2 <- diag(vcov(fit2))
# Inverse variance weights
w1 <- 1/var1
w2 <- 1/var2
# Weighted average
theta_combined <- (w1 * theta1 + w2 * theta2) / (w1 + w2)
cat("Combined estimate:", theta_combined, "\n")
Example 5: Survival Analysis with Right-Censoring
Fit an exponential model to right-censored survival data:
# Simulate right-censored data
set.seed(42)
n <- 100
true_rate <- 0.1
# True failure times
T_true <- rexp(n, rate = true_rate)
# Censoring times (random censoring)
C <- rexp(n, rate = 0.05)
# Observed times and indicators
time <- pmin(T_true, C)
status <- as.numeric(T_true <= C) # 1 = event, 0 = censored
cat("Observed events:", sum(status), "\n")
cat("Censored:", sum(1 - status), "\n")
# Log-likelihood for right-censored exponential
loglik_censored <- function(rate) {
# Event contribution: log(f(t)) = log(rate) - rate*t
# Censored contribution: log(S(t)) = -rate*t
sum(status * log(rate) - rate * time)
}
# Fit
sol <- optim(
par = 0.05,
fn = loglik_censored,
method = "Brent",
lower = 0.001, upper = 1,
control = list(fnscale = -1),
hessian = TRUE
)
fit_surv <- mle_numerical(sol)
summary(fit_surv)
# Compare to true rate
cat("True rate:", true_rate, "\n")
cat("Estimated rate:", params(fit_surv), "\n")
cat("95% CI:", confint(fit_surv), "\n")
Example 6: Profile Likelihood
Compute profile likelihood for a parameter of interest:
# Using the Weibull fit, profile over scale
profile_scale <- function(scale_fixed) {
# For fixed scale, find optimal shape
loglik <- function(shape) {
sum(dweibull(x, shape = shape, scale = scale_fixed, log = TRUE))
}
opt <- optim(par = 1, fn = loglik, method = "Brent",
lower = 0.1, upper = 10, control = list(fnscale = -1))
opt$value
}
# Compute profile over range of scale values
scale_range <- seq(8, 12, by = 0.1)
profile_ll <- sapply(scale_range, profile_scale)
# Plot profile likelihood
plot(scale_range, profile_ll, type = "l",
xlab = "Scale", ylab = "Profile Log-Likelihood",
main = "Profile Likelihood for Scale Parameter")
# Add MLE point
abline(v = params(weibull_fit)["scale"], col = "red", lty = 2)
More Examples
For additional examples including:
- Multi-parameter transformations via delta method
- Bootstrap bias correction
- Hierarchical/mixed models
See the vignettes on the pkgdown site.