forecast/0000755000176200001440000000000014634716616012076 5ustar liggesusersforecast/tests/0000755000176200001440000000000014150370574013230 5ustar liggesusersforecast/tests/testthat/0000755000176200001440000000000014634716616015100 5ustar liggesusersforecast/tests/testthat/test-tbats.R0000644000176200001440000000473514353422625017316 0ustar liggesusers# A unit test for tbats function if (require(testthat)) { test_that("Test simple cases for tbats", { expect_error(tbats(data.frame(x1 = 1, x2 = 2), use.parallel = FALSE)) expect_warning(tbats(c(1:5, NA, 7:9), use.parallel = FALSE)) expect_true(all(forecast(tbats(rep(1, 100), use.parallel = FALSE))$mean == 1)) }) test_that("Test tbats() and forecasts", { # Fit tbats models tbatsfit1 <- tbats(subset(wineind, end = 50), use.parallel = FALSE) tbatsfit2 <- tbats(WWWusage, use.parallel = FALSE) tbatsfit3 <- tbats(as.numeric(woolyrnq), seasonal.periods = frequency(woolyrnq), use.parallel = FALSE) tbatsfit4 <- tbats(airmiles, use.box.cox = FALSE, use.parallel = FALSE) # Test tbats.components tbats.components(tbatsfit1) tbats.components(tbatsfit2) tbats.components(tbatsfit3) tbats.components(tbatsfit4) # Test accuracy.tbats() function expect_output(print(accuracy(tbatsfit1)), regexp = "ME") expect_output(print(accuracy(tbatsfit2)), regexp = "ME") expect_output(print(accuracy(tbatsfit3)), regexp = "ME") expect_output(print(accuracy(tbatsfit4)), regexp = "ME") # Test summary.tbats() expect_output(print(summary(tbatsfit1)), regexp = "Length") expect_output(print(summary(tbatsfit2)), regexp = "Length") expect_output(print(summary(tbatsfit3)), regexp = "Length") expect_output(print(summary(tbatsfit4)), regexp = "Length") # Test fitted length expect_true(length(fitted(tbatsfit1)) == 50) expect_true(length(fitted(tbatsfit2)) == length(WWWusage)) expect_true(length(fitted(tbatsfit3)) == length(woolyrnq)) expect_true(length(fitted(tbatsfit4)) == length(airmiles)) # Test length of forecast expect_true(length(forecast(tbatsfit1)$mean) == 2 * frequency(wineind)) expect_true(length(forecast(tbatsfit2)$mean) == 10) # expect_true(length(forecast(tbatsfit3)$mean) == 2 * frequency(woolyrnq)) expect_true(length(forecast(tbatsfit4)$mean) == 10) # Test inappropriate levels expect_error(forecast(tbatsfit1, level = -10)) expect_error(forecast(tbatsfit1, level = 110)) # Test forecasts with fan = TRUE expect_true(all(forecast(tbatsfit1, fan = TRUE)$mean == forecast(tbatsfit1)$mean)) }) #test_that("Test tbats() with parallel", { # Tests will not run on Travis in parallel # expect_output(print(tbats(woolyrnq, num.cores = 1)), regexp = "TBATS") # expect_output(print(tbats(elecsales, num.cores = 1, use.trend = FALSE)), regexp = "BATS") #}) } forecast/tests/testthat/test-msts.R0000644000176200001440000000041714353422625017160 0ustar liggesusers# A unit test for msts.R if (require(testthat)) { test_that("tests for msts() and print.msts()", { x <- msts(taylor, seasonal.periods = c(48, 336), ts.frequency = 48, start = 2000 + 22 / 52) expect_output(print(x), regexp = "Multi-Seasonal Time Series") }) } forecast/tests/testthat/test-calendar.R0000644000176200001440000000147514353422625017750 0ustar liggesusers# A unit test for calendar.R if (require(testthat)) { test_that("Tests for bizdays()", { expect_error(bizdays(1:20)) b1 <- bizdays(woolyrnq, FinCenter = "New York") b2 <- bizdays(woolyrnq, FinCenter = "London") b3 <- bizdays(woolyrnq, FinCenter = "Zurich") if(packageVersion("timeDate") >= '4021.105') { expect_equal(sum(abs(b1 - b2)), 145L) expect_equal(sum(abs(b1 - b3)), 176L) } expect_equal(sum(abs(b2 - b3)), 117L) b1 <- bizdays(gas, FinCenter = "NERC") b2 <- bizdays(gas, FinCenter = "Toronto") if(packageVersion("timeDate") >= '4021.105') { expect_equal(sum(abs(b1 - b2)), 211L) } }) test_that("Tests for easter()", { expect_true(length(easter(woolyrnq)) == length(woolyrnq)) expect_true(length(easter(wineind)) == length(wineind)) }) } forecast/tests/testthat/test-arfima.R0000644000176200001440000000230714150370574017431 0ustar liggesusers# A unit test for arfima.R if (require(testthat)) { arfima1 <- arfima(WWWusage, estim = "mle") arfima2 <- arfima(WWWusage, estim = "ls") arfimabc <- arfima(WWWusage, estim = "mle", lambda = 0.75, biasadj = FALSE) arfimabc2 <- arfima(WWWusage, estim = "mle", lambda = 0.75, biasadj = TRUE) test_that("test accuracy(), fitted(), and residuals().", { expect_true(all(arimaorder(arfima1) == arimaorder(arfima2))) fitarfima <- fitted(arfima1) residarfima <- residuals(arfima2) expect_true(length(fitarfima) == length(residarfima)) expect_true(all(getResponse(arfima1) == WWWusage)) expect_false(identical(arfimabc$fitted, arfimabc2$fitted)) expect_error(accuracy(arfima1), NA) expect_equal(mean(residuals(arfima1)), accuracy(arfima1)[, "ME"]) }) test_that("test forecast.fracdiff()", { expect_true(all(forecast(arfima1, fan = TRUE)$mean == forecast(arfima1, fan = FALSE)$mean)) expect_error(forecast(arfimabc, level = -10)) expect_error(forecast(arfimabc, level = 110)) expect_false(identical(forecast(arfimabc, biasadj = FALSE), forecast(arfimabc, biasadj = TRUE))) expect_output(print(summary(forecast(arfimabc))), regexp = "Forecast method: ARFIMA") }) } forecast/tests/testthat/test-acf.R0000644000176200001440000000073414353422625016725 0ustar liggesusers# A unit test for Acf() function if (require(testthat)) { test_that("tests for acf", { out <- Acf(wineind, lag.max = 10, type = "partial", plot = FALSE) expect_length(out$lag, 10) expect_identical(out$acf, Pacf(wineind, lag.max = 10, plot = FALSE)$acf) expect_equal(dim(Acf(wineind, lag.max = 10, type = "correlation", plot = FALSE)$acf), c(11L, 1L, 1L)) expect_equal(Acf(wineind, lag.max = 10, type = "correlation", plot = TRUE)$acf[1, 1, 1], 1) }) } forecast/tests/testthat/test-armaroots.R0000644000176200001440000000045114353422625020177 0ustar liggesusers# A unit test for armaroots.R if (require(testthat)) { test_that("Tests for plot.Arima()", { arimafit <- Arima(lynx, c(2, 0, 2), include.mean = FALSE) plot(arimafit) plot(arimafit, type = "ma") plot(arimafit, type = "ar") expect_warning(plot(Arima(lynx, c(0, 1, 0)))) }) } forecast/tests/testthat/test-refit.R0000644000176200001440000001221514353422625017302 0ustar liggesusers# A unit test for re-fitting models if (require(testthat)) { test_that("tests for re-fitting models", { # arima fit <- Arima(mdeaths, c(1, 0, 0), c(2, 0, 0), include.mean = FALSE, include.drift = TRUE) refit <- Arima(fdeaths, model = fit) expect_true(identical(fit$coef, refit$coef)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- Arima(mdeaths, model = fit) expect_true(identical(fit$coef, refit_same$coef)) expect_true(identical(fit$x, refit_same$x)) expect_true(all.equal(fit$fitted, refit_same$fitted)) expect_true(all.equal(fit$residuals, refit_same$residuals)) # arfima fit <- arfima(mdeaths) refit <- arfima(fdeaths, model = fit) expect_true(identical(fit$ar, refit$ar)) expect_true(identical(fit$ma, refit$ma)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- arfima(mdeaths, model = fit) expect_true(identical(fit$ar, refit_same$ar)) expect_true(identical(fit$ma, refit_same$ma)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # dshw fit <- dshw(mdeaths, period1 = 4, period2 = 12) refit <- dshw(fdeaths, model = fit) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- dshw(mdeaths, model = fit) expect_true(identical(fit$model, refit_same$model)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # ets fit <- ets(mdeaths) refit <- ets(fdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$fit, refit$fit)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- ets(mdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # stlm fit <- stlm(mdeaths) refit <- stlm(fdeaths, model = fit) expect_true(identical(fit$model$par, refit$model$par)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- stlm(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(fit$residuals, refit_same$residuals)) # bats fit <- bats(mdeaths) refit <- bats(fdeaths, model = fit) expect_true(identical(fit$parameters, refit$parameters)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted.values, refit$fitted.values)) expect_false(identical(residuals(fit), residuals(refit))) refit_same <- bats(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted.values, refit_same$fitted.values)) expect_true(identical(residuals(fit), residuals(refit_same))) # tbats fit <- tbats(mdeaths) refit <- tbats(fdeaths, model = fit) expect_true(identical(fit$parameters, refit$parameters)) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted.values, refit$fitted.values)) expect_false(identical(residuals(fit), residuals(refit))) refit_same <- tbats(mdeaths, model = fit) expect_true(identical(fit$model$par, refit_same$model$par)) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted.values, refit_same$fitted.values)) expect_true(identical(residuals(fit), residuals(refit_same))) # nnetar fit <- nnetar(mdeaths) refit <- nnetar(fdeaths, model = fit) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- nnetar(mdeaths, model = fit) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) # forecast.ts fit <- forecast(mdeaths) refit <- forecast(fdeaths, model = fit, use.initial.values = TRUE) expect_false(identical(fit$x, refit$x)) expect_false(identical(fit$fitted, refit$fitted)) expect_false(identical(fit$residuals, refit$residuals)) refit_same <- forecast(mdeaths, model = fit, use.initial.values = TRUE) expect_true(identical(fit$x, refit_same$x)) expect_true(identical(fit$fitted, refit_same$fitted)) expect_true(identical(residuals(fit), residuals(refit_same))) }) } forecast/tests/testthat/test-wrangle.R0000644000176200001440000000226514353422625017634 0ustar liggesusers# A unit test for wrangling functions if (require(testthat)) { mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) test_that("tests on retaining matrix attributes", { data <- datamat(mv_y, mv_x, v_y, v_x) expect_true(is.ts(data[, 1])) expect_true(identical(tsp(data[, 1]), tsp(data[, 2]))) expect_true(NCOL(data) == 8) expect_true(NCOL(data[, 1]) == 2) expect_true("matrix" %in% class(data[, 1])) expect_true(class(data) == "data.frame") }) test_that("flatten data.frames", { mvdata <- datamat(mv_y, mv_x) vdata <- datamat(v_y, v_x) data <- datamat(mvdata, vdata, flatten = TRUE) expect_true(class(data) == "data.frame") expect_true(!"data.frame" %in% class(data[, 1])) }) } forecast/tests/testthat/test-newarima2.R0000644000176200001440000000411314323125536020052 0ustar liggesusers# A unit test functions in newarima2.R if (require(testthat)) { test_that("test auto.arima() and associated methods", { expect_warning(auto.arima(rep(1, 100), stepwise = TRUE, parallel = TRUE)) set.seed(345) testseries1 <- ts(rnorm(100) + 1:100, frequency = 0.1) xregmat <- matrix(runif(300), ncol = 3) expect_true(frequency(forecast(auto.arima(testseries1))) == 1) fit1 <- auto.arima(testseries1, xreg = xregmat, allowdrift = FALSE) expect_true(all(xregmat == fit1$xreg)) testseries2 <- ts(rep(100, 120), frequency = 12) xregmat <- matrix(runif(240), ncol = 2) expect_output(print(auto.arima(testseries2, xreg = xregmat)), regexp = "Series: testseries2") expect_output(print(summary(auto.arima(testseries2, xreg = xregmat, approximation = TRUE, stepwise = FALSE))), regexp = "Series: testseries2") expect_output(print(auto.arima(ts(testseries2, frequency = 4), approximation = TRUE, trace = TRUE)), regexp = "ARIMA") fit1 <- auto.arima(testseries1, stepwise = FALSE, lambda = 2, biasadj = FALSE) fit2 <- auto.arima(testseries1, stepwise = FALSE, lambda = 2, biasadj = TRUE) expect_false(identical(fit1$fitted, fit2$fitted)) }) test_that("test parallel = FALSE and stepwise = FALSE for auto.arima()", { skip_if(identical(Sys.getenv("GITHUB_ACTIONS"), "true")) expect_equal(auto.arima(WWWusage, parallel = FALSE, stepwise = FALSE)$arma, c(3L, 0L, 0L, 0L, 1L, 1L, 0L)) }) test_that("tests for ndiffs()", { expect_true(ndiffs(AirPassengers, test = "kpss") == 1) expect_true(ndiffs(AirPassengers, test = "adf") == 1) expect_true(ndiffs(AirPassengers, test = "pp") == 1) }) test_that("tests for nsdiffs()", { expect_true(nsdiffs(AirPassengers, test = "seas") == 1) expect_true(nsdiffs(AirPassengers, test = "ocsb") == 1) expect_error(nsdiffs(airmiles)) expect_true(nsdiffs(rep(1, 100)) == 0) expect_warning(nsdiffs(ts(rnorm(10), frequency = 0.1))) skip_if_not_installed("uroot") expect_true(nsdiffs(AirPassengers, test = "hegy") == 1) expect_true(nsdiffs(AirPassengers, test = "ch") == 0) }) } forecast/tests/testthat/test-ets.R0000644000176200001440000000440414456202551016763 0ustar liggesusers# A unit test for ets function if (require(testthat)) { test_that("tests for some arguments in ets", { fit <- ets(wineind, model = "ZZM") comp <- paste0(fit$components[1:3], collapse = "") expect_identical(comp, "MAM") }) test_that("tests for some arguments in ets", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) expect_identical(as.numeric(fit$par["alpha"]), 0.1611) }) test_that("refit ets model to new data", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) parnames <- c("alpha", "beta", "gamma") par <- fit$par[parnames] expect_identical(ets(wineind, model = fit, alpha = 0.1611, use.initial.values = FALSE)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, beta = NA, use.initial.values = FALSE)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, gamma = NA, use.initial.values = FALSE)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, phi = NA, use.initial.values = FALSE)$par[parnames], par) expect_identical(ets(wineind, model = fit, alpha = 0.1611, use.initial.values = TRUE)$par, fit$par) }) test_that("class methods for ets work", { fit <- ets(wineind, model = "MAM", alpha = 0.1611) expect_output(print(summary(fit)), "Smoothing parameters") expect_equal(length(coef(fit)), 16L) expect_lt(abs(logLik(fit) + 1802.9586023), 1e-5) plot(fit) }) test_that("test ets() for errors", { expect_warning(ets(taylor)) fit1 <- ets(airmiles, lambda = 0.15, biasadj = FALSE) expect_gt(fit1$par["alpha"], 0.95) fit2 <- ets(airmiles, lambda = 0.15, biasadj = TRUE) expect_lt(fit2$par["beta"], 1e-3) expect_false(identical(fit1$fitted, fit2$fitted)) expect_error(ets(taylor, model = "ZZA")) }) test_that("forecast.ets()", { fit <- ets(airmiles, lambda = 0.15, biasadj = TRUE) fcast1 <- forecast(fit, PI = FALSE) expect_true(is.null(fcast1$upper) & is.null(fcast1$lower)) fcast1 <- forecast(fit, biasadj = FALSE) fcast2 <- forecast(fit, biasadj = TRUE) expect_false(identical(fcast1$mean, fcast2$mean)) fcast <- forecast(fit, simulate = TRUE) expect_true(!is.null(fcast$upper) & !is.null(fcast$lower)) expect_true(all(fcast$upper > fcast$lower)) }) } forecast/tests/testthat/test-mforecast.R0000644000176200001440000000463414353422625020162 0ustar liggesusers# A unit test for forecast.R if (require(testthat)) { mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) test_that("tests for is.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_true(is.mforecast(fcast)) fit <- lm(v_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_false(is.mforecast(fcast)) }) test_that("tests for mlmsplit()", { fit <- lm(mv_y ~ v_x) fit1 <- mlmsplit(fit, index = 1) fit2 <- mlmsplit(fit, index = 2) fit3 <- lm(mv_y[, 1] ~ v_x) fit4 <- lm(mv_y[, 2] ~ v_x) expect_identical(fit1$coefficients, fit3$coefficients) expect_identical(fit2$coefficients, fit4$coefficients) expect_identical(fit1$rank, fit3$rank) expect_identical(fit2$rank, fit4$rank) expect_equal(fit1$fitted.values, fit3$fitted.values) expect_equal(fit2$fitted.values, fit4$fitted.values) expect_error(mlmsplit(fit), "Must select lm") }) test_that("tests for forecast.mlm()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) fit2 <- lm(mv_y[, 1] ~ v_x) fcast2 <- forecast(fit2, newdata = data.frame(v_x = 30)) expect_equal(fcast$forecast[[1]]$residuals, fcast2$residuals) }) test_that("tests for forecast.mts()", { lungDeaths <- cbind(mdeaths, fdeaths) fcast_b <- forecast(lungDeaths) fcast_m <- forecast(mdeaths) fcast_f <- forecast(fdeaths) expect_true(all.equal(fcast_b$forecast[[1]]$mean, fcast_m$mean)) expect_true(all.equal(fcast_b$forecast[[2]]$mean, fcast_f$mean)) }) test_that("tests for print.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_output(print(fcast), "Series 1") expect_output(print(fcast), "Series 2") }) test_that("tests for plot.mforecast()", { fit <- lm(mv_y ~ v_x) fcast <- forecast(fit, newdata = data.frame(v_x = 30)) expect_silent(plot(fcast)) }) } forecast/tests/testthat/test-season.R0000644000176200001440000000721714150370574017467 0ustar liggesusers# A unit test for na.interp() and tsclean() if (require(testthat)) { test_that("tests for monthdays", { expect_error(monthdays(rnorm(10))) expect_error(monthdays(rnorm(10))) expect_true(all(monthdays(ts(rep(100, 12), frequency = 12)) == c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31))) expect_true(all(monthdays(ts(rep(1, 4), frequency = 4)) == c(90, 91, 92, 92))) # Test leapyears expect_true(monthdays(ts(rep(1, 48), frequency = 12))[38] == 29) expect_true(monthdays(ts(rep(1, 16), frequency = 4))[13] == 91) }) test_that("tests for seasonaldummy", { expect_error(seasonaldummy(1)) testseries <- ts(rep(1:7, 5), frequency = 7) dummymat <- seasonaldummy(testseries) expect_true(length(testseries) == nrow(dummymat)) expect_true(ncol(dummymat) == 6) expect_true(all(seasonaldummy(wineind)[1:11, ] == diag(11))) }) test_that("tests for seasonaldummyf", { expect_error(seasonaldummy(1)) expect_warning(dummymat <- seasonaldummyf(wineind, 4), "deprecated") expect_true(nrow(dummymat) == 4) expect_true(ncol(dummymat) == 11) }) test_that("tests for fourier", { expect_error(fourier(1)) testseries <- ts(rep(1:7, 5), frequency = 7) fouriermat <- fourier(testseries, 3) expect_true(length(testseries) == nrow(fouriermat)) expect_true(ncol(fouriermat) == 6) expect_true(all(grep("-7", colnames(fouriermat)))) }) test_that("tests for fourierf", { expect_warning(fouriermat <- fourierf(wineind, 4, 10), "deprecated") expect_true(nrow(fouriermat) == 10) expect_true(ncol(fouriermat) == 8) }) test_that("tests for stlm", { expect_warning(stlm(ts(rep(5, 24), frequency = 4), etsmodel = "ZZZ")) }) test_that("tests for forecast.stlm", { expect_error(forecast.stlm(stlm(wineind), newxreg = matrix(rep(1, 24), ncol = 2))) stlmfit1 <- stlm(woolyrnq, method = "ets") stlmfit2 <- stlm(woolyrnq, method = "arima", approximation = FALSE) fcfit1 <- forecast(stlmfit1) fcfit2 <- forecast(stlmfit1, fan = TRUE) expect_true(all(fcfit2$level == seq(from = 51, to = 99, by = 3))) fcstlmfit3 <- forecast(stlmfit2) expect_true(all(round(forecast(stlm(ts(rep(100, 120), frequency = 12)))$mean, 10) == 100)) expect_true(all(round(forecast(stlm(ts(rep(100, 120), frequency = 12), lambda = 1))$mean, 10) == 100)) }) test_that("tests for stlf", { expect_true(all(forecast(stlm(wineind))$mean == stlf(wineind)$mean)) expect_true(all(forecast(stlm(wineind, lambda = .5))$mean == stlf(wineind, lambda = .5)$mean)) fit1 <- stlf(wineind, lambda = .2, biasadj = FALSE) fit2 <- stlf(wineind, lambda = .2, biasadj = TRUE) expect_false(identical(fit1$mean, fit2$mean)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(stlf(series), NA) # Small eps expect_true(all(abs(constantForecast$mean - mean(series)) < 10^-8)) y <- ts(rep(1:7, 3), frequency = 7) expect_equal(c(stlf(y)$mean), rep(1:7, 2)) }) test_that("tests for ma", { testseries <- ts(1:20, frequency = 4) expect_true(frequency(ma(testseries, order = 4)) == frequency(testseries)) maseries <- ma(testseries, order = 3) expect_true(identical(which(is.na(maseries)), c(1L, 20L))) expect_true(all(abs(maseries[2:19] - 2:19) < 1e-14)) maseries <- ma(testseries, order = 2, centre = FALSE) expect_true(identical(which(is.na(maseries)), 20L)) expect_true(all(abs(maseries[1:19] - 1:19 - 0.5) < 1e-14)) maseries <- ma(testseries, order = 2, centre = TRUE) expect_true(identical(which(is.na(maseries)), c(1L, 20L))) expect_true(all(abs(maseries[2:19] - 2:19) < 1e-14)) }) } forecast/tests/testthat/test-tslm.R0000644000176200001440000001203214353422625017145 0ustar liggesusers# A unit test for tslm function if (require(testthat)) { mv_y <- ts(cbind(rnorm(120, 0, 3) + 1:120 + 20 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + 1:120 + 16 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) mv_x <- ts(cbind(rnorm(120, 0, 8) + (1:120) / 2 + 42 * sin(2 * pi * (1:120) / 12), rnorm(120, 3, 7) + (1:120) * -1 + 20 * sin(2 * pi * (1:120 + 6) / 12)), frequency = 12) v_y <- ts(rnorm(120, 0, 8) + (1:120) / 2 + 12 * sin(2 * pi * (1:120) / 12), frequency = 12) v_x <- ts(rnorm(120, 0, 1) + (1:120) * (-1) + 28 * sin(2 * pi * (1:120) / 12), frequency = 12) data <- datamat(mv_y, mv_x, v_y, v_x, fourier(v_y, 3)) test_that("tests on model building with univariate time series", { fit1 <- tslm(v_y ~ trend + season, data = data) fit2 <- tslm(v_y ~ trend + season, data = data, lambda = 2, biasadj = FALSE) fit3 <- tslm(v_y ~ trend + season, data = data, lambda = 2, biasadj = TRUE) expect_false(identical(fit2$fitted.values, fit3$fitted.values)) fit2 <- tslm(v_y ~ trend + season, data = data.frame(trend = rnorm(120))) expect_false(identical(fit1$model, fit2$model)) fit2 <- tslm(v_y ~ trend + season) expect_named(fit1, names(fit2)) expect_identical(fit1$model, fit2$model, ignore_attr = "terms") expect_identical(fit1$coefficients, fit2$coefficients) fit1 <- tslm(USAccDeaths ~ trend + season, data = USAccDeaths) fit2 <- tslm(USAccDeaths ~ trend + season) expect_named(fit1, names(fit2)) expect_identical(fit1$model, fit2$model, ignore_attr = "terms") expect_identical(fit1$coefficients, fit2$coefficients) expect_warning(fit3 <- tslm( USAccDeaths ~ trend + season, data = USAccDeaths, subset = time(USAccDeaths) %% 1 < 0.1 )) fit <- tslm(USAccDeaths ~ trend + season + trend * season, data = USAccDeaths) expect_true("trend:season" %in% attr(fit$terms, "term.labels")) }) test_that("tslm parity with lm", { fit1 <- tslm(v_y ~ v_x + fourier(v_y, 3), data = data.frame(v_y = v_y)) fit2 <- lm(v_y ~ v_x + fourier(v_y, 3), data = data.frame(v_y = v_y)) expect_equal(fit1$coefficients, fit1$coefficients) expect_equal(fit1$model, fit2$model, ignore_attr = "terms") }) test_that("tests on subsetting data", { a <- mv_y[, 1] expect_warning(fit1 <- tslm(mv_y ~ trend, subset = a < 20), "Subset has been assumed contiguous") expect_error(fit2 <- tslm(mv_y ~ trend, subset = subset(mv_y, mv_y[, 1] < 20))) expect_warning(tslm(v_y ~ trend + season + trend * season, subset = v_y < 100), "Subset has been assumed contiguous") }) test_that("tests on model building with multivariate time series", { fit1 <- tslm(mv_y ~ trend + season) fit2 <- tslm(mv_y ~ trend + season, lambda = 0.5) expect_false(identical(fit1$coefficients, fit2$coefficients)) fit3 <- tslm(mv_y ~ trend + season, lambda = 0.5, biasadj = TRUE) expect_false(identical(fit2$fitted.values, fit3$fitted.values)) fit2 <- tslm(mv_y ~ trend + season, data = data) expect_named(fit1,names(fit2)) expect_identical(fit1$model, fit2$model, ignore_attr = "terms") expect_identical(fit1$coefficients, fit2$coefficients) expect_warning(fit3 <- tslm(mv_y ~ trend + season, subset = mv_y[, 1] < 1), "Subset has been assumed contiguous") expect_warning(fit4 <- tslm(mv_y ~ trend + season, data = data, subset = mv_y[, 1] < 1), "Subset has been assumed contiguous") expect_named(fit3,names(fit4)) expect_identical(fit3$model, fit4$model, ignore_attr = "terms") expect_identical(fit3$coefficients, fit4$coefficients) }) test_that("tests with bad input", { expect_error(tslm(mpg ~ cyl, data = mtcars), "Not time series data") expect_error(tslm(tmp2 ~ trend + season + trend * season, subset = subset(tmp2, month = "January"), "Non-seasonal data cannot be modelled using a seasonal factor")) }) test_that("forecast.lm", { fit1 <- tslm(v_y ~ trend + season, lambda = 2, biasadj = FALSE) fit2 <- tslm(v_y ~ trend + season, lambda = 2, biasadj = TRUE) fcast1 <- forecast(fit1, h = 60, biasadj = FALSE) fcast2 <- forecast(fit2, h = 60, biasadj = TRUE) expect_false(identical(fcast1$mean, fcast2$mean)) fred <- tslm(ldeaths ~ trend + season, lambda = 0) fc <- forecast(fred) }) test_that("Unusual usage", { expect_silent(fit1 <- tslm(v_y ~ trend + v_x + I(v_x ^ 2) + fourier(v_x, 3))) # forecast(fit1, newdata=data.frame(v_x=ts(1:2,freq=12))) # tslm(v_y ~ trend + I(v_x) + I(v_x^2) + fourier(v_x, 3), data=data) # tslm(v_y ~ trend + season + I(v_x) + I(v_x^2) + fourier(ts(season, freq=12), 3)) # fit2 <- tslm(v_y ~ trend + season + I(v_x)*fourier(v_x,3)) # forecast(fit2, newdata=data.frame(v_x=ts(1:2,freq=12))) # tslm(v_y ~ trend + season + I(v_x)*fourier(v_x,3),data=data) }) test_that("Missing values", { USMissingDeaths <- USAccDeaths USMissingDeaths[c(1,44, 72)] <- NA timetrend <- 1:72 fit <- tslm(USMissingDeaths ~ season + timetrend) expect_equal(sum(is.na(residuals(fit))), 3) fc <- forecast(fit, newdata = data.frame(timetrend = 73)) expect_length(fc$mean, 1) }) } forecast/tests/testthat/test-ggplot.R0000644000176200001440000000526614353422625017475 0ustar liggesusers# A unit test for ggplot support if (require(testthat)) { test_that("tests for autoplot/gg functions", { library(ggplot2) lungDeaths <- cbind(mdeaths, fdeaths) ggAcf(wineind) autoplot(Acf(wineind)) expect_identical(ggAcf(wineind, plot = FALSE)$acf, acf(wineind, plot = FALSE, lag.max = 24)$acf) ggPacf(wineind) autoplot(Pacf(wineind)) expect_identical(ggPacf(wineind, plot = FALSE)$acf, acf(wineind, plot = FALSE, type = "partial", lag.max = 24)$acf) ggCcf(mdeaths, fdeaths) autoplot(Ccf(mdeaths, fdeaths)) expect_identical(ggCcf(mdeaths, fdeaths, plot = FALSE)$acf, ccf(mdeaths, fdeaths, plot = FALSE, type = "correlation", lag.max = 24)$acf) arimafit <- Arima(USAccDeaths, order = c(1, 1, 1), seasonal = c(1, 1, 1)) autoplot(arimafit) autoplot(arimafit, type = "ma") autoplot(arimafit, type = "ar") arfit <- ar(USAccDeaths) autoplot(arfit) decomposefit <- decompose(USAccDeaths) autoplot(decomposefit) etsfit <- ets(USAccDeaths, model = "ANA") autoplot(etsfit) structfit <- StructTS(USAccDeaths) autoplot(structfit) stlfit <- stl(USAccDeaths, s.window = "periodic") autoplot(stlfit) # seasfit <- seasonal::seas(USAccDeaths) # autoplot(seasfit) etsfcast <- forecast(etsfit) autoplot(etsfcast) autoplot(etsfcast, PI = FALSE) lmfit <- lm(mpg ~ disp, data = mtcars) lmfcast <- forecast(lmfit, newdata = data.frame(disp = 214)) autoplot(lmfcast) mfcast <- forecast(lungDeaths) autoplot(mfcast) ggtsdisplay(USAccDeaths, plot.type = "spectrum") ggtsdisplay(USAccDeaths, plot.type = "partial") ggtsdisplay(USAccDeaths, plot.type = "histogram") ggtsdisplay(USAccDeaths, plot.type = "scatter", theme = ggplot2::theme_bw()) gglagplot(woolyrnq, lags = 2) gglagplot(lungDeaths, lags = 2) gglagplot(WWWusage, do.lines = FALSE, colour = FALSE, labels = TRUE) gglagchull(woolyrnq, lags = 4) ggmonthplot(woolyrnq) ggseasonplot(woolyrnq, year.labels = TRUE, year.labels.left = TRUE) ggseasonplot(USAccDeaths, polar = TRUE, col = 1:5, continuous = TRUE) splinefit <- splinef(airmiles, h = 5) autoplot(splinefit) autoplot(USAccDeaths) autoplot(lungDeaths) autoplot(lungDeaths, facets = TRUE) autoplot(USAccDeaths) + geom_forecast() autoplot(USAccDeaths) + autolayer(etsfcast, series = "ETS") autoplot(lungDeaths) + geom_forecast() autoplot(lungDeaths) + autolayer(mfcast, series = c("mdeaths", "fdeaths")) autoplot(lungDeaths) + autolayer(mfcast) autoplot(lungDeaths) + autolayer(mfcast, series = TRUE) autoplot(lungDeaths, facets = TRUE) + geom_forecast() gghistogram(USAccDeaths, add.kde = TRUE) }) } forecast/tests/testthat/test-graph.R0000644000176200001440000000113414353422625017270 0ustar liggesusers# A unit test for graph.R if (require(testthat)) { test_that("Tests for seasonplot()", { expect_error(seasonplot(airmiles)) seasonplot(ts(gold, frequency = 7)) seasonplot(woolyrnq) seasonplot(wineind) seasonplot(wineind, year.labels = TRUE) seasonplot(wineind, year.labels.left = TRUE) # seasonplot(taylor) }) test_that("Tests for tsdisplay()", { expect_silent(tsdisplay(airmiles, ci.type = "ma")) expect_silent(tsdisplay(1:20)) expect_silent(tsdisplay(airmiles, plot.type = "scatter")) expect_silent(tsdisplay(airmiles, plot.type = "spectrum")) }) } forecast/tests/testthat/test-hfitted.R0000644000176200001440000000244614353422625017625 0ustar liggesusers# A unit test for h-step fits if (require(testthat)) { test_that("variance test on h-step fits", { mod1 <- ets(WWWusage, model = "AAN", damped = TRUE) h1 <- fitted(mod1, h = 1) h2 <- fitted(mod1, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # hfitted automatic function selection h2_1 <- hfitted(mod1, h = 2) expect_true(identical(h2, h2_1)) mod2 <- Arima(WWWusage, order = c(1, 1, 1)) h1 <- fitted(mod2, h = 1) h2 <- fitted(mod2, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) mod3 <- arfima(WWWusage) h1 <- fitted(mod3, h = 1) h2 <- fitted(mod3, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # mod3 <- tbats(WWWusage) # h1 <- fitted(mod3, h=1) # h2 <- fitted(mod3, h=2) # j <- !is.na(h1) & !is.na(h2) # expect_lt(var(diff(h1[j])), var(diff(h2[j]))) # # mod4 <- bats(WWWusage) # h1 <- fitted(mod4, h=1) # h2 <- fitted(mod4, h=2) # j <- !is.na(h1) & !is.na(h2) # expect_lt(var(diff(h1[j])), var(diff(h2[j]))) mod5 <- nnetar(WWWusage) h1 <- fitted(mod5, h = 1) h2 <- fitted(mod5, h = 2) j <- !is.na(h1) & !is.na(h2) expect_lt(var(diff(h1[j])), var(diff(h2[j]))) }) } forecast/tests/testthat/test-nnetar.R0000644000176200001440000001463414456202551017465 0ustar liggesusers# A unit test for nnetar.R if (require(testthat)) { test_that("Tests for nnetar", { oilnnet <- nnetar(airmiles, lambda = 0.15) woolyrnqnnet <- nnetar(woolyrnq, repeats = 10) expect_output(print(woolyrnqnnet), regexp = "Series: woolyrnq") expect_true(length(forecast(oilnnet)$mean) == 10) expect_true(length(forecast(woolyrnqnnet)$mean) == 2 * frequency(woolyrnq)) # # Test with single-column xreg (which might be a vector) uscnnet <- nnetar(woolyrnq, xreg = 1:length(woolyrnq)) expect_true(all(dim(uscnnet$xreg) == c(119, 1))) expect_true(length(forecast(uscnnet, xreg = 120:130)$mean) == 11) # Test default size with and without xreg uscnnet <- nnetar(woolyrnq, p = 2, P = 2) expect_output( print(uscnnet), regexp = "NNAR(2,2,2)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "4-2-1 network", fixed = TRUE ) expect_true(uscnnet$size == 2) uscnnet <- nnetar(woolyrnq, p = 2, P = 2, xreg = 1:119, repeats = 10) expect_output( print(uscnnet), regexp = "NNAR(2,2,3)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "5-3-1 network", fixed = TRUE ) expect_true(uscnnet$size == 3) # Test default size for models with only seasonal lags, with and without xreg seasonal_only_lags_nnet <- nnetar(woolyrnq,p = 0,P = 3) expect_output( print(seasonal_only_lags_nnet),regexp = "NNAR(0,3,2)", fixed = TRUE ) expect_output( print(seasonal_only_lags_nnet),regexp = "3-2-1 network", fixed = TRUE ) seasonal_only_lags_xreg_nnet <- nnetar(woolyrnq,p = 0,P = 3,xreg = cbind(1:119,119:1)) expect_output( print(seasonal_only_lags_xreg_nnet),regexp = "NNAR(0,3,3)", fixed = TRUE ) expect_output( print(seasonal_only_lags_xreg_nnet),regexp = "5-3-1 network", fixed = TRUE ) # Test P=0 when m>1 uscnnet <- nnetar(woolyrnq, p = 4, P = 0) expect_true(uscnnet$size == 2) expect_output(print(uscnnet), regexp = "NNAR(4,2)", fixed = TRUE) # Test overlapping p & P uscnnet <- nnetar(woolyrnq, p = 4, P = 2) expect_true(uscnnet$size == 3) expect_output( print(uscnnet), regexp = "NNAR(4,2,3)", fixed = TRUE ) expect_output( print(uscnnet), regexp = "5-3-1 network", fixed = TRUE ) # Test that p = 0 & P = 0 is not permitted expect_error( nnetar(woolyrnq,p = 0,P = 0) ) # Test with multiple-column xreg creditnnet <- nnetar( wineind, xreg = cbind(bizdays(wineind), fourier(wineind, 1)) ) expect_warning( expect_length(forecast(creditnnet, h = 2, xreg = matrix(2, 2, 3))$mean, 2L), "different column names", fixed = TRUE ) # Test if h doesn't match xreg expect_warning( expect_length(forecast(creditnnet, h = 5, xreg = matrix(2, 2, 3))$mean, 2L), "different column names", fixed = TRUE ) # Test that P is ignored if m=1 expect_warning(creditnnet <- nnetar(WWWusage, p = 2, P = 4, xreg = 1:length(WWWusage))) expect_output( print(creditnnet), regexp = "NNAR(2,2)", fixed = TRUE ) # Test fixed size creditnnet <- nnetar(WWWusage, p = 1, P = 1, xreg = 1:length(WWWusage), size = 12) expect_true(uscnnet$size == 3) expect_output(print(creditnnet), regexp = "NNAR(1,12)", fixed = TRUE) # Test passing arguments to nnet expect_warning(creditnnet <- nnetar( WWWusage, p = 2, P = 4, xreg = 1:length(WWWusage), decay = 0.1 )) expect_output( print(creditnnet), regexp = "decay=0.1", fixed = TRUE ) ## Test output format correct oilnnet <- nnetar(airmiles, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0, repeats = 10) expect_true(all.equal(oilnnet$fitted[-1], airmiles[-length(airmiles)])) ## Test output format correct when NAs present oilna <- airmiles oilna[12] <- NA suppressWarnings(oilnnet <- nnetar(oilna, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0)) expect_true(all.equal(oilnnet$fitted[-c(1, 12, 13)], oilna[-c(11, 12, length(oilna))])) ## Test model argument fit1 <- nnetar( WWWusage, xreg = 1:length(WWWusage), lambda = 2, decay = 0.5, maxit = 25, repeats = 7 ) fit2 <- nnetar(WWWusage, xreg = 1:length(WWWusage), model = fit1) # Check some model parameters expect_true(identical(fit1$p, fit2$p)) expect_true(identical(fit1$lambda, fit2$lambda)) expect_true(identical(fit1$nnetargs, fit2$nnetargs)) # Check fitted values are all the same expect_true(identical(fitted(fit1), fitted(fit2))) # Check residuals all the same expect_true(identical(residuals(fit1), residuals(fit2))) # Check number of neural nets expect_true(identical(length(fit1$model), length(fit2$model))) # Check neural network weights all the same expect_true(identical(fit1$model[[1]]$wts, fit2$model[[1]]$wts)) expect_true(identical(fit1$model[[7]]$wts, fit2$model[[7]]$wts)) # Check subset argument oilnnet <- nnetar(airmiles, subset = 11:20) expect_true(identical(which(!is.na(fitted(oilnnet))), 11:20)) oilnnet <- nnetar(airmiles, subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20))) expect_true(identical(which(!is.na(fitted(oilnnet))), 11:20)) ## Check short and constant data expect_warning(nnetfit <- nnetar(rep(1, 10), p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant data") expect_true(nnetfit$p == 1) expect_true(is.null(nnetfit$lambda)) expect_true(is.null(nnetfit$scalex)) expect_error(nnetfit <- nnetar(rnorm(2), p=1, P=0, size=1, repeats=1), "Not enough data") expect_silent(nnetfit <- nnetar(rnorm(3), p=1, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 1) expect_silent(nnetfit <- nnetar(rnorm(3), p=2, P=0, size=1, repeats=1)) expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(3), p=3, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(3), p=4, P=0, size=1, repeats=1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- nnetar(rnorm(10), xreg=rep(1, 10), p=2, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) expect_warning(nnetfit <- nnetar(rnorm(3), xreg=matrix(c(1, 2, 3, 1, 1, 1), ncol=2), p=1, P=0, size=1, repeats=1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) }) } forecast/tests/testthat/test-thetaf.R0000644000176200001440000000075714150370574017454 0ustar liggesusers# A unit test for thetaf.R if (require(testthat)) { test_that("test thetaf()", { thetafc <- thetaf(WWWusage)$mean expect_true(all(thetafc == thetaf(WWWusage, fan = TRUE)$mean)) expect_error(thetaf(WWWusage, level = -10)) expect_error(thetaf(WWWusage, level = 110)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(thetaf(series), NA) expect_true(is.constant(round(constantForecast$mean, 12))) }) } forecast/tests/testthat/test-forecast2.R0000644000176200001440000000736214353422625020070 0ustar liggesusers# A unit test for forecast2.R if (require(testthat)) { test_that("test meanf()", { meanfc <- mean(wineind) expect_true(all(meanf(wineind)$mean == meanfc)) bcforecast <- meanf(wineind, lambda = -0.5)$mean expect_true(max(bcforecast) == min(bcforecast)) expect_true(all(meanf(wineind, fan = TRUE)$mean == meanfc)) expect_error(meanf(wineind, level = -10)) expect_error(meanf(wineind, level = 110)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(rwf(series), NA) expect_true(is.constant(constantForecast$mean)) }) test_that("test rwf()", { rwfc <- rwf(airmiles)$mean expect_true(all(rwfc == naive(airmiles)$mean)) expect_true(all(rwfc < rwf(airmiles, drift = TRUE)$mean)) expect_true(all(rwf(airmiles, fan = TRUE)$mean == rwfc)) expect_true(length(rwf(airmiles, lambda = 0.15)$mean) == 10) expect_false(identical(rwf(airmiles, lambda = 0.15, biasadj = FALSE)$mean, rwf(airmiles, lambda = 0.15, biasadj = TRUE)$mean)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(rwf(series), NA) expect_true(is.constant(constantForecast$mean)) }) test_that("test forecast.HoltWinters()", { hwmod <- stats::HoltWinters(UKgas) expect_true(all(forecast(hwmod, fan = TRUE)$mean == forecast(hwmod)$mean)) expect_error(forecast(hwmod, level = -10)) expect_error(forecast(hwmod, level = 110)) # Forecasts transformed manually with Box-Cox should match # forecasts when lambda is passed as an argument hwmodbc <- stats::HoltWinters(BoxCox(UKgas, lambda = 0.25)) hwfc <- forecast(hwmodbc, lambda = 0.25, biasadj = FALSE)$mean hwfc2 <- forecast(hwmodbc, lambda = 0.25, biasadj = TRUE)$mean hwbcfc <- InvBoxCox(forecast(hwmodbc)$mean, lambda = 0.25) expect_true(all(hwfc == hwbcfc)) expect_false(identical(hwfc, hwfc2)) }) test_that("test for forecast.StructTS()", { structtsmod <- stats::StructTS(wineind) fc1 <- forecast(structtsmod)$mean expect_true(all(fc1 == forecast(structtsmod, fan = TRUE)$mean)) expect_error(forecast(structtsmod, level = -10)) expect_error(forecast(structtsmod, level = 110)) # Forecasts transformed manually with Box-Cox should match # forecasts when lambda is passed as an argument bcseries <- BoxCox(woolyrnq, lambda = 0.19) fc2 <- InvBoxCox(forecast(stats::StructTS(bcseries))$mean, lambda = 0.19) fc3 <- forecast(stats::StructTS(bcseries), lambda = 0.19, biasadj = FALSE)$mean fc4 <- forecast(stats::StructTS(bcseries), lambda = 0.19, biasadj = TRUE)$mean expect_true(all(fc2 == fc3)) expect_false(identical(fc3, fc4)) }) test_that("test croston()", { set.seed(1234) expect_error(croston(rnorm(100))) expect_true(all(croston(rep(0, 100))$mean == 0)) }) test_that("test hw()", { expect_output(print(summary(holt(wineind))), regexp = "Forecast method: Holt's method") expect_output(print(summary(holt(wineind, damped = TRUE))), regexp = "Forecast method: Damped Holt's method") }) test_that("test holt()", { expect_output(print(summary(hw(wineind))), regexp = "Forecast method: Holt-Winters' additive method") }) test_that("test naive() and snaive()", { # WWWusage has frequency = 1, so naive and snaive should match expect_true(all(snaive(WWWusage, h = 10)$mean == naive(WWWusage)$mean)) expect_true(all(snaive(WWWusage, h = 10)$upper == naive(WWWusage)$upper)) expect_true(all(snaive(WWWusage, h = 10)$lower == naive(WWWusage)$lower)) # Constant series should not error series <- ts(rep(950, 20), frequency = 4) constantForecast <- expect_error(snaive(series), NA) expect_true(is.constant(constantForecast$mean)) }) } forecast/tests/testthat/test-dshw.R0000644000176200001440000000204014353422625017131 0ustar liggesusers# A unit test for dshw function if (require(testthat)) { test_that("Test dshw()", { # Test negative values and period1 and period2 not specified set.seed(345) expect_error(dshw(-10:10)) expect_error(dshw(abs(rnorm(100)))) # Test fits with period1 and period2 swapped set.seed(5555) t <- seq(0, 1, by = 0.1) x <- exp(sin(2 * pi * t) + cos(2 * pi * t * 4) + rnorm(length(t), 0, 0.1)) fit1 <- dshw(x, period1 = 4, period2 = 2)$mean fit2 <- dshw(x, period1 = 2, period2 = 4)$mean expect_true(all(fit1 == fit2)) # Test fits with lambda specified and armethod = FALSE y <- x + 1 fit3 <- dshw(y, period1 = 2, period2 = 4, lambda = 2, biasadj = FALSE) fit4 <- dshw(y, period1 = 2, period2 = 4, lambda = 2, biasadj = TRUE) expect_false(identical(fit3$mean, fit4$mean)) fit5 <- dshw(x, period1 = 2, period2 = 4, armethod = FALSE) # Test fits with inappropriate periods specified expect_error(dshw(x, period1 = 2, period2 = 2)) expect_error(dshw(x, period1 = 2, period2 = 4.1)) }) } forecast/tests/testthat/test-modelAR.R0000644000176200001440000002376514456202551017526 0ustar liggesusers# A unit test for modelAR.R if (require(testthat)) { test_that("Tests for modelAR", { ## Set up functions to match 'nnetar' behavior avnnet2 <- function(x, y, repeats = repeats, linout = TRUE, trace = FALSE, ...) { mods <- list() for (i in 1:repeats) { mods[[i]] <- nnet::nnet(x, y, linout = linout, trace = trace, ...) } return(structure(mods, class = "nnetarmodels")) } ## predict.avnnet2 <- function(model, newdata = NULL) { if (is.null(newdata)) { if (length(predict(model[[1]])) > 1) { rowMeans(sapply(model, predict)) } else { mean(sapply(model, predict)) } } else { if (NCOL(newdata) >= 2 & NROW(newdata) >= 2) { rowMeans(sapply(model, predict, newdata = newdata)) } else { mean(sapply(model, predict, newdata = newdata)) } } } ## compare residuals to 'nnetar' expect_silent({ set.seed(123) nnetar_model <- nnetar(lynx[1:100], p = 2, P = 1, size = 3, repeats = 20) set.seed(123) modelAR_model <- modelAR(lynx[1:100], FUN = avnnet2, predict.FUN = predict.avnnet2, p = 2, P = 1, scale.inputs = TRUE, size = 3, repeats = 20) res1 <- residuals(nnetar_model) res2 <- residuals(modelAR_model) }) expect_true(identical(res1, res2)) ## check re-fitting old model and compare to 'nnetar' expect_silent({ nnetar_model2 <- nnetar(lynx[101:114], model = nnetar_model) modelAR_model2 <- modelAR(lynx[101:114], FUN = avnnet2, predict.FUN = predict.avnnet2, model = modelAR_model) res1 <- residuals(nnetar_model2) res2 <- residuals(modelAR_model2) }) expect_true(identical(res1, res2)) ## compare forecasts with 'nnetar' expect_silent({ f1 <- forecast(nnetar_model)$mean f2 <- forecast(modelAR_model)$mean }) expect_true(identical(f1, f2)) ## test lambda and compare to 'nnetar' expect_silent({ set.seed(123) oilnnet_nnetar <- nnetar(airmiles, lambda = 0.15, size = 1, repeats = 20) set.seed(123) oilnnet_modelAR <- modelAR(airmiles, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, lambda = 0.15, size = 1, repeats = 20) }) expect_true(identical(residuals(oilnnet_nnetar, type = "response"), residuals(oilnnet_modelAR, type = "response"))) expect_true(length(forecast(oilnnet_modelAR)$mean) == 10) ## check print input name expect_silent(woolyrnqnnet <- modelAR(woolyrnq, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, p = 1, P = 0, size = 8, repeats = 10)) expect_output(print(woolyrnqnnet), regexp = "Series: woolyrnq") ## check default forecast length expect_true(length(forecast(woolyrnqnnet)$mean) == 2 * frequency(woolyrnq)) # # Test with single-column xreg (which might be a vector) expect_silent({ set.seed(123) woolyrnqnnet <- modelAR(woolyrnq, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(woolyrnq), p = 2, P = 2, size = 4, repeats = 10) set.seed(123) woolyrnqnnet2 <- nnetar(woolyrnq, xreg = 1:length(woolyrnq), p = 2, P = 2, size = 4, repeats = 10) }) expect_true(all(dim(woolyrnqnnet$xreg) == c(119, 1))) expect_true(length(forecast(woolyrnqnnet, xreg = 120:130)$mean) == 11) expect_true(identical(forecast(woolyrnqnnet, xreg = 120:130)$mean, forecast(woolyrnqnnet2, xreg = 120:130)$mean)) ## Test with multiple-column xreg set.seed(123) winennet <- modelAR(wineind, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = cbind(bizdays(wineind), fourier(wineind, 1)), p = 2, P = 1, size = 4, repeats = 10) set.seed(123) winennet2 <- nnetar( wineind, xreg = cbind(bizdays(wineind), fourier(wineind, 1)), p = 2, P = 1, size = 4, repeats = 10 ) expect_true(length(forecast(winennet, h = 2, xreg = matrix(2, 2, 3))$mean) == 2L) ## Test if h matches xreg expect_true(length(forecast(winennet, h = 5, xreg = matrix(2, 2, 3))$mean) == 2L) expect_warning( expect_equal( forecast(winennet2, xreg = matrix(2, 2, 3))$mean, forecast(winennet, xreg = matrix(2, 2, 3))$mean ), "different column names", fixed = TRUE ) ## Test that P is ignored if m=1 expect_warning(wwwnnet <- modelAR(WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 2, P = 4, size = 3, repeats = 10)) ## Test passing arguments to nnet expect_silent({ set.seed(123) wwwnnet <- modelAR(WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 2, P = 0, size = 3, decay = 0.1, repeats = 10) set.seed(123) wwwnnet2 <- nnetar(WWWusage, size = 3, p = 2, P = 0, xreg = 1:length(WWWusage), decay = 0.1, repeats = 10) }) expect_true(identical( forecast(wwwnnet, h = 2, xreg = (length(WWWusage) + 1):(length(WWWusage) + 5))$mean, forecast(wwwnnet2, h = 2, xreg = (length(WWWusage) + 1):(length(WWWusage) + 5))$mean )) ## Test output format correct when NAs present airna <- airmiles airna[12] <- NA expect_warning(airnnet <- modelAR(airna, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, p = 1, size = 0, skip = TRUE, Wts = c(0, 1), maxit = 0, repeats = 5)) expect_equal(airnnet$fitted[-c(1, 12, 13)], airna[-c(11, 12, length(airna))]) ## Test model argument expect_silent({ set.seed(123) fit1 <- modelAR( WWWusage, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, xreg = 1:length(WWWusage), p = 3, size = 2, lambda = 2, decay = 0.5, maxit = 25, repeats = 7 ) fit2 <- modelAR(WWWusage, xreg = 1:length(WWWusage), model = fit1) set.seed(123) fit3 <- nnetar(WWWusage, xreg = 1:length(WWWusage), p = 3, size = 2, lambda = 2, decay = 0.5, maxit = 25, repeats = 7) }) # Check some model parameters expect_true(identical(fit1$p, fit2$p)) expect_true(identical(fit1$lambda, fit2$lambda)) expect_true(identical(fit1$modelargs, fit2$modelargs)) # Check fitted values are all the same expect_true(identical(fitted(fit1), fitted(fit2))) expect_true(identical(fitted(fit1, h = 2), fitted(fit2, h = 2))) # Check residuals all the same expect_true(identical(residuals(fit1), residuals(fit2))) # Check number of neural nets expect_true(identical(length(fit1$model), length(fit2$model))) # Check neural network weights all the same expect_true(identical(fit1$model[[1]]$wts, fit2$model[[1]]$wts)) expect_true(identical(fit1$model[[7]]$wts, fit2$model[[7]]$wts)) ## compare results with 'nnetar' expect_true(identical(fitted(fit1), fitted(fit3))) expect_true(identical(fitted(fit1, h = 3), fitted(fit3, h = 3))) expect_true(identical(residuals(fit1, type = "response"), residuals(fit3, type = "response"))) ## Check subset argument using indices expect_silent({ set.seed(123) airnnet <- modelAR(airmiles, , FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, subset = 11:20, p = 1, size = 1, repeats = 10) set.seed(123) airnnet2 <- nnetar(airmiles, , subset = 11:20, p = 1, size = 1, repeats = 10) }) expect_true(identical(which(!is.na(fitted(airnnet))), 11:20)) expect_true(identical(fitted(airnnet), fitted(airnnet2))) expect_true(identical(forecast(airnnet, h = 5)$mean, forecast(airnnet2, h = 5)$mean)) ## Check subset argument using logical vector expect_silent({ set.seed(123) airnnet <- modelAR(airmiles, FUN = avnnet2, predict.FUN = predict.avnnet2, scale.inputs = TRUE, subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20)), p = 1, size = 1, repeats = 10) set.seed(123) airnnet2 <- nnetar(airmiles, , subset = c(rep(F, 10), rep(T, 10), rep(F, length(airmiles) - 20)), p = 1, size = 1, repeats = 10) }) expect_true(identical(which(!is.na(fitted(airnnet))), 11:20)) expect_true(identical(fitted(airnnet), fitted(airnnet2))) expect_true(identical(forecast(airnnet, h = 5)$mean, forecast(airnnet2, h = 5)$mean)) ## compare prediction intervals with 'nnetar' expect_silent({ set.seed(456) f1 <- forecast(airnnet, h = 5, PI = TRUE, npaths = 100) set.seed(456) f2 <- forecast(airnnet2, h = 5, PI = TRUE, npaths = 100) }) #expect_true(identical(f1$upper, f2$upper)) #expect_true(identical(f1$lower, f2$lower)) ## Check short and constant data expect_warning(nnetfit <- modelAR(rep(1, 10), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 2, P = 0, size = 1, repeats = 1, lambda = 0.1), "Constant data") expect_true(nnetfit$p == 1) expect_true(is.null(nnetfit$lambda)) expect_true(is.null(nnetfit$scalex)) expect_error(nnetfit <- modelAR(rnorm(2), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 1, P = 0, size = 1, repeats = 1), "Not enough data") expect_silent(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 1, P = 0, size = 1, repeats = 1)) expect_true(nnetfit$p == 1) expect_silent(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 2, P = 0, size = 1, repeats = 1)) expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 3, P = 0, size = 1, repeats = 1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, p = 4, P = 0, size = 1, repeats = 1), "short series") expect_true(nnetfit$p == 2) expect_warning(nnetfit <- modelAR(rnorm(10), FUN = avnnet2, predict.FUN = predict.avnnet2, xreg = rep(1, 10), p = 2, P = 0, size = 1, repeats = 1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) expect_warning(nnetfit <- modelAR(rnorm(3), FUN = avnnet2, predict.FUN = predict.avnnet2, xreg = matrix(c(1, 2, 3, 1, 1, 1), ncol = 2), p = 1, P = 0, size = 1, repeats = 1, lambda = 0.1), "Constant xreg") expect_true(is.null(nnetfit$scalexreg)) }) } forecast/tests/testthat/test-accuracy.R0000644000176200001440000000306514353422625017766 0ustar liggesusers# A unit test for accuracy() function if (require(testthat)) { test_that("tests for a non-forecast object (input)", { expect_error(accuracy(USAccDeaths)) }) test_that("tests for dimension (output)", { train <- window(USAccDeaths, start = c(1973, 1), end = c(1976, 12)) test <- window(USAccDeaths, start = c(1977, 1)) fcasts <- forecast(train, h = 6) expect_identical(dim(accuracy(fcasts)), c(1L, 7L)) expect_identical(dim(accuracy(fcasts, test)), c(2L, 8L)) expect_false( all(dim(accuracy(fcasts, test, test = 1:2)) == dim(accuracy(fcasts, test))) ) expect_identical(accuracy(fcasts, test = 1:length(train)), accuracy(fcasts)) }) test_that("tests for accuracy (output)", { # Test arima fitarima <- Arima(USAccDeaths, order = c(0, 1, 1), seasonal = c(0, 1, 1)) accuracyarima <- accuracy(fitarima)[1, "RMSE"] accuracyarimasim <- accuracy(Arima(simulate(fitarima, seed = 123), order = c(0, 1, 0), seasonal = c(0, 0, 1)))[1, "RMSE"] expect_lt(accuracyarima, accuracyarimasim) # Test ets fitets <- ets(AirPassengers, model = "MAM", damped = TRUE) accuracyets <- accuracy(fitets)[1, "RMSE"] accuracyetssim <- accuracy(ets(simulate(fitets, seed = 123), model = "MAM", damped = TRUE))[1, "RMSE"] expect_lt(accuracyets, accuracyetssim) # Test lm month <- factor(rep(1:12, 14)) fitlm <- lm(wineind[1:168] ~ month) accuracylm <- accuracy(fitlm)[1, "RMSE"] accuracylmsim <- accuracy(lm(simulate(fitlm, seed = 123)[, 1] ~ month))[1, "RMSE"] expect_gt(accuracylm, accuracylmsim) }) } forecast/tests/testthat/test-forecast.R0000644000176200001440000000303114353422625017773 0ustar liggesusers# A unit test for forecast.R if (require(testthat)) { test_that("tests for findfrequency()", { expect_true(frequency(airmiles) == findfrequency(as.numeric(airmiles))) expect_false(frequency(wineind) == findfrequency(as.numeric(wineind))) expect_true(frequency(woolyrnq) == findfrequency(as.numeric(woolyrnq))) expect_true(frequency(gas) == findfrequency(as.numeric(gas))) }) test_that("tests forecast.ts()", { fc1 <- as.numeric(forecast(as.numeric(airmiles), find.frequency = TRUE)$mean) fc2 <- as.numeric(forecast(airmiles)$mean) expect_true(all(fc1 == fc2)) }) test_that("tests summary.forecast() and forecast.forecast()", { WWWusageforecast <- forecast(WWWusage) expect_output(print(summary(WWWusageforecast)), regexp = "Forecast method:") expect_true(all(predict(WWWusageforecast)$mean == forecast(WWWusageforecast)$mean)) }) # test_that("tests plot.forecast()", { # # Fit several types of models for plotting # batsmod <- bats(woolyrnq) # nnetmod <- nnetar(woolyrnq) # tslmmod <- tslm(woolyrnq ~ trend + season) # nnetfc<- forecast(nnetmod) # batsfc <- forecast(batsmod) # tslmfc <- forecast(tslmmod) # skip_on_travis() # # Plot the forecasts # expect_that(plot(nnetfc), not(throws_error())) # expect_that(plot(batsfc), not(throws_error())) # expect_that(plot(batsfc, shaded = FALSE), not(throws_error())) # expect_that(plot(tslmfc, PI = FALSE), not(throws_error())) # expect_that(plot(forecast(tslmmod, h = 0)), not(throws_error())) # }) } forecast/tests/testthat/test-arima.R0000644000176200001440000001165514353422625017271 0ustar liggesusers# A unit test for Arima() function if (require(testthat)) { test_that("tests for a non-ts object", { set.seed(123) abc <- rnorm(50, 5, 1) fit <- Arima(abc, order = c(2, 0, 1)) expect_identical(fit$arma, c(2L, 1L, 0L, 0L, 1L, 0L, 0L)) }) test_that("tests for a ts with the seasonal component", { fit <- Arima(wineind, order = c(1, 1, 1), seasonal = c(0, 1, 1)) expect_identical(fit$arma, c(1L, 1L, 0L, 1L, 12L, 1L, 1L)) }) test_that("tests for ARIMA errors", { fit <- Arima(wineind, order = c(1, 1, 1), seasonal = c(0, 1, 1)) expect_identical(residuals(fit, type = "regression"), wineind) }) test_that("tests for arimaorder", { for (ar in 1:5) { for (i in 0:1) { for (ma in 1:5) { fitarima <- Arima(lynx, order = c(ar, i, ma), method = "ML", include.constant = TRUE, lambda = 0.5) arextracted <- fitarima$arma[1] iextracted <- fitarima$arma[6] maextracted <- fitarima$arma[2] expect_true(all(arimaorder(fitarima) == c(arextracted, iextracted, maextracted))) expect_true(all(names(arimaorder(fitarima)) == c("p", "d", "q"))) expect_true(arimaorder(fitarima)["p"] == ar) expect_true(arimaorder(fitarima)["d"] == i) expect_true(arimaorder(fitarima)["q"] == ma) } } } # Test ar arMod <- ar(lynx, order.max = 2) expect_true(arimaorder(arMod)["p"] == 2) expect_true(arimaorder(arMod)["d"] == 0) expect_true(arimaorder(arMod)["q"] == 0) expect_true(all(names(arimaorder(arMod)) == c("p", "d", "q"))) # Test SARIMA sarimaMod <- Arima(wineind, order = c(1, 1, 2), seasonal=c(0, 1,1)) expect_true(all(names(arimaorder(sarimaMod)) == c("p", "d", "q", "P", "D", "Q", "Frequency"))) expect_true(arimaorder(sarimaMod)["p"] == 1) expect_true(arimaorder(sarimaMod)["d"] == 1) expect_true(arimaorder(sarimaMod)["q"] == 2) expect_true(arimaorder(sarimaMod)["P"] == 0) expect_true(arimaorder(sarimaMod)["D"] == 1) expect_true(arimaorder(sarimaMod)["Q"] == 1) expect_true(arimaorder(sarimaMod)["Frequency"] == frequency(wineind)) # Test fracdiff set.seed(4) fracdiffMod <- fracdiff::fracdiff(lynx, nar = 2) expect_true(all(names(arimaorder(fracdiffMod)) == c("p", "d", "q"))) expect_true(arimaorder(fracdiffMod)["p"] == 2) expect_true(arimaorder(fracdiffMod)["d"] >= 0) expect_true(arimaorder(fracdiffMod)["d"] <= 1) expect_true(arimaorder(fracdiffMod)["p"] == 2) }) test_that("tests for forecast.Arima", { fit1 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "CSS") expect_warning(forecast.Arima(fit1, xreg = 1:10), "xreg not required") expect_warning(forecast.Arima(fit1, include.drift = TRUE)) expect_true(all.equal(forecast.Arima(fit1, bootstrap = TRUE, npaths = 100)$ mean, forecast.Arima(fit1)$mean)) fit2 <- Arima(wineind, order = c(1, 0, 1), seasonal = c(0, 0, 0), include.drift = TRUE) expect_warning(Arima(wineind, order = c(1, 2, 1), include.drift = TRUE)) expect_true("drift" %in% names(coef(fit2))) expect_true(length(forecast.Arima(fit2)$mean) == 2 * frequency(wineind)) fit3 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), include.mean = FALSE) expect_false("intercept" %in% names(coef(fit3))) expect_true(frequency(forecast.Arima(fit3)$mean) == frequency(wineind)) fit4 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), xreg = rnorm(length(wineind))) expect_error(forecast.Arima(fit4)) expect_error(forecast.Arima(fit4, xreg = matrix(rnorm(40), ncol = 2))) forecast.Arima(fit4, xreg = rnorm(20))$mean %>% expect_length(20) fit5 <- Arima(wineind[1:150], order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "ML") expect_true(accuracy(fit5)[1, "MAPE"] < accuracy(Arima(wineind, model = fit5))[1, "MAPE"]) fit6 <- Arima(wineind, order = c(1, 1, 2), seasonal = c(0, 1, 1), method = "CSS", lambda = 5) expect_false(identical(fit1$coef, fit6$coef)) }) test_that("tests for search.arima", { set.seed(444) arimasim <- arima.sim(n = 300, model = list(ar = runif(8, -.1, 0.1), ma = runif(8, -0.1, 0.1), sd = 0.1)) expect_true(AIC(auto.arima(arimasim)) >= AIC(auto.arima(arimasim, stepwise = FALSE))) }) test_that("tests for forecast.ar()", { fitar <- ar(taylor) arfc <- forecast.ar(fitar)$mean expect_true(all(arfc == forecast.ar(fitar, bootstrap = TRUE, npaths = 100)$mean)) expect_true(all(arfc == forecast.ar(fitar, fan = TRUE)$mean)) expect_error(forecast.ar(fitar, level = -10)) expect_error(forecast.ar(fitar, level = 110)) expect_true(all(arfc + 1 == forecast.ar(fitar, lambda = 1)$mean)) arfcbc <- forecast.ar(fitar, lambda = 2) arfcabc <- forecast.ar(fitar, lambda = 2, biasadj = TRUE) expect_false(identical(arfcbc$mean, arfcabc$mean)) }) test_that("tests for as.character.Arima()", { expect_match(as.character(auto.arima(woolyrnq)), regexp = "ARIMA") }) } forecast/tests/testthat/test-bats.R0000644000176200001440000000134714353422625017126 0ustar liggesusers# A unit test for bats function if (require(testthat)) { test_that("tests for a non-ts object", { set.seed(123) abc <- rnorm(50, 5, 1) fit <- bats(abc, use.box.cox = TRUE, use.parallel = FALSE) expect_false(fit$lambda == 0) expect_output(print(fit), "Seed States") expect_equal(length(residuals(fit)), 50L) plot(fit) expect_equal(bats(1, use.box.cox = TRUE, use.parallel = FALSE)$AIC, -Inf) expect_equal(bats(-1, use.box.cox = TRUE, use.parallel = FALSE)$AIC, -Inf) }) test_that("Test parallel of bats", { abc <- rnorm(50, 5, 1) skip_on_cran() skip_if(identical(Sys.getenv("GITHUB_ACTIONS"), "true")) expect_gt(bats(abc, use.box.cox = TRUE, use.parallel = TRUE)$lambda, 0.999) }) } forecast/tests/testthat/test-clean.R0000644000176200001440000000256614353422625017263 0ustar liggesusers# A unit test for na.interp() and tsclean() if (require(testthat)) { test_that("tests for na.interp", { # Test nonseasonal interpolation expect_true(all(na.interp(c(1, 2, 3, NA, 5, 6, 7)) == 1:7)) # Test for identical on series without NAs expect_true(all(na.interp(wineind) == wineind)) # Test seasonal interpolation testseries <- ts(rep(1:7, 5), frequency = 7) testseries[c(1, 3, 11, 17)] <- NA expect_true(sum(abs(na.interp(testseries) - rep(1:7, 5))) < 1e-12) # Test length of output expect_true(length(testseries) == length(na.interp(testseries))) }) test_that("tests for tsclean", { # Test for no NAs expect_false(any(is.na(tsclean(gold)))) # Test for removing outliers in seasonal series testseries <- ts(rep(1:7, 5), frequency = 7) testseries[c(2, 4, 14)] <- 0 expect_true(sum(abs(tsclean(testseries) - rep(1:7, 5))) < 1e-12) # Test for NAs left with replace.missing = FALSE argument testseries[c(2, 4, 14)] <- NA expect_true(any(is.na(tsclean(testseries, replace.missing = FALSE)))) # Test for outliers in a series expect_equal(sum(abs(wineind - tsclean(wineind)) > 1e-6), 1) # Test for identical on series without NAs or outliers expect_true(identical(USAccDeaths, tsclean(USAccDeaths))) # Test length of output expect_true(length(tsclean(testseries)) == length(testseries)) }) } forecast/tests/testthat/test-subset.R0000644000176200001440000000374614353422625017507 0ustar liggesusers# A unit test for subset function if (require(testthat)) { mtsobj <- ts(matrix(rnorm(200), ncol = 2), frequency = 4) test_that("tests specifying correct argument", { sub <- subset(wineind, month = "September") expect_length(sub, tsp(sub)[2] - tsp(sub)[1] + 1) expect_identical(round(sum(sub)), 338985) sub2 <- subset(wineind, month = "SEPT") expect_identical(sub, sub2) sub2 <- subset(wineind, month = 9) expect_identical(sub, sub2) sub2 <- subset(wineind, season = 9) expect_identical(sub, sub2) sub <- subset(woolyrnq, quarter = 1) expect_length(sub,tsp(sub)[2] - tsp(sub)[1] + 1) expect_identical(sum(sub), 153142) sub2 <- subset(woolyrnq, season = 1) expect_identical(sub, sub2) sub <- subset(wineind, subset = wineind < 25000) expect_identical(round(sum(sub)), 1948985) expect_length(sub,91) sub <- subset(mtsobj, c(1, 1, rep(0, 98)) == 1) expect_identical(ncol(sub), 2L) expect_identical(nrow(sub), 2L) sub <- subset(mtsobj, quarter = 1) expect_identical(ncol(sub), 2L) expect_identical(nrow(sub), 25L) }) test_that("tests specifying wrong argument", { expect_error(subset(wineind, quarter = 1), "Data is not quarterly") expect_error(subset(woolyrnq, month = "January"), "Data is not monthly") }) test_that("test for bad input", { expect_error(subset.ts(mtcars, quarter = 1), "Data must be seasonal") expect_error(subset(wineind, subset = c(1, 2)), "subset must be the same length as x") expect_error(subset(mtsobj, mtsobj < .5), "subset must be a vector of rows to keep") expect_error(subset(wineind, month = "Jaan"), "No recognizable months") expect_error(subset(wineind, season = 1:14), "Seasons must be between 1 and 12") expect_error(subset(wineind, month = 1:14), "Months must be between 1 and 12") expect_error(subset(woolyrnq, quarter = "qq1"), "No recognizable quarters") expect_error(subset(woolyrnq, quarter = 1:6), "Quarters must be between 1 and 4") }) } forecast/tests/testthat/test-boxcox.R0000644000176200001440000000416714353422625017502 0ustar liggesusers# A unit test for boxcox transformations if (require(testthat)) { test_that("tests for biasadj automatically set based on model fit", { # lm fit <- tslm(USAccDeaths ~ trend, lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # HoltWintersZZ fit <- ses(USAccDeaths, initial = "simple", lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # arfima x <- fracdiff::fracdiff.sim(100, ma = -.4, d = .3)$series fit <- arfima(x) expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) #arima fit1 <- Arima(USAccDeaths, order = c(0,1,1), seasonal = c(0,1,1), lambda = 0.5, biasadj = TRUE) fit2 <- auto.arima(USAccDeaths, max.p=0, max.d=1, max.q=1, max.P=0, max.D=1, max.Q=1, lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit1), forecast(fit1, biasadj=TRUE))) expect_true(all.equal(forecast(fit2), forecast(fit2, biasadj=TRUE))) expect_true(all.equal(forecast(fit1)$mean, forecast(fit2)$mean)) # ets fit <- ets(USAccDeaths, model = "ANA", lambda = 0.5, biasadj = TRUE) expect_true(all.equal(forecast(fit), forecast(fit, biasadj = TRUE))) # bats # fit <- bats(USAccDeaths, use.box.cox = TRUE, biasadj = TRUE) # expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) # tbats # fit <- tbats(USAccDeaths, use.box.cox = TRUE, biasadj = TRUE) # expect_true(all.equal(forecast(fit), forecast(fit, biasadj=TRUE))) }) test_that("tests for automatic lambda selection in BoxCox transformation", { lambda_auto <- BoxCox.lambda(USAccDeaths) # lm fit <- tslm(USAccDeaths ~ trend, lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto, tolerance=1e-3) # ets fit <- ets(USAccDeaths, model = "ANA", lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto, tolerance=1e-3) # arima fit <- Arima(USAccDeaths, order = c(0,1,1), seasonal = c(0,1,1), lambda = "auto", biasadj = TRUE) expect_equal(as.numeric(fit$lambda), lambda_auto, tolerance=1e-3) }) } forecast/tests/testthat/test-spline.R0000644000176200001440000000074414353422625017467 0ustar liggesusers# A unit test for spline.R if (require(testthat)) { test_that("Tests for splinef()", { plot.splineforecast(splinef(airmiles)) fit1 <- splinef(woolyrnq, lambda = 0.2, biasadj = FALSE) fit2 <- splinef(woolyrnq, lambda = 0.2, biasadj = TRUE) expect_false(identical(fit1$mean, fit2$mean)) splinef(woolyrnq, method = "mle") splinef(WWWusage, method = "mle") expect_error(splinef(woolyrnq, level = 110)) expect_error(splinef(woolyrnq, level = -10)) }) } forecast/tests/testthat.R0000644000176200001440000000011714150370574015212 0ustar liggesusersSys.setenv("R_TESTS" = "") if (require(testthat)) { test_check("forecast") } forecast/MD50000644000176200001440000002553314634716616012416 0ustar liggesusers103911f1ca79231d39b14cff8d820339 *DESCRIPTION 4f62633bc8574b54ff4ae0085d81c27e *NAMESPACE 0b0771c6cfc6a053365cef62a6dc5a51 *NEWS.md 8635679a18d096d9e79886efa4d42274 *R/DM2.R d1e5423b18da656d6809572192149ee8 *R/HoltWintersNew.R 1012ef80bcd84169d6dc2deb48a963f8 *R/acf.R 2cb2e34bcc6071acc40b22e6c3ddcc24 *R/adjustSeasonalSeeds.R fb78073e67fe711b3941914d3282771d *R/arfima.R 07c0667dc8fef4820619ebac54fb670a *R/arima.R c796cd0fb735c672daea94fb8b7145ca *R/armaroots.R a691673cff3fc30436fe246f83f833c3 *R/attach.R e8dfa73402b7eae5fa670c4156cabcbe *R/baggedModel.R 90bd8fca0bfdc3fa6f31e5acc4d8e1b5 *R/bats.R c69f52ca63cefc8ec286095b01c490c8 *R/bootstrap.R 7f5e80ed72cf26c3a2386b7d404b6c82 *R/calendar.R 2c2afd2e69e126efdc41a43a633d18b0 *R/checkAdmissibility.R 9f6af411ebef922acf25e14181579f17 *R/checkresiduals.R 53200cd8b991f2f2e544377700904741 *R/clean.R 53bd366467f8721c4b2082c60a5c95ea *R/components.R 55fe30a71cd7f5ae40f161978e6fdd97 *R/data.R bca8b27285a9249d7623917d8e2f3fca *R/dshw.r 4a801ec36eb9a67d3db94e61d3756414 *R/errors.R 4d608066a4002dbd58837c07e8bcd520 *R/ets.R 3b3832d7c86c1f67879af315d49c5265 *R/etsforecast.R a49e4ecccdb8c0f70f70da4b1f8b6086 *R/findfrequency.R 3dd911673657a1e049a988b0d629e30f *R/fitBATS.R 4489632ce1bb96eaaafe51247e31eb4f *R/fitTBATS.R ef1376b4cc03cf23e66b9569117be3a8 *R/forecast-package.R 7dc2254663ea9ad353cd63910277d388 *R/forecast.R 87d693356c97be5cc411f98cb1adfe93 *R/forecast.varest.R 7f6028f3368a9d511f3a2ac8248dd1c7 *R/forecast2.R cb6d7bf60c5cc71b81270d2b0205bc19 *R/forecastBATS.R 4677125c3355f315250c5067e99557b3 *R/forecastTBATS.R f647e0b6d5a501facddd395bdb302346 *R/getResponse.R 29869b613fb21e6a436f32c300265475 *R/ggplot.R e7f4efc9862f994b97d627d7b76e6ec3 *R/graph.R f20ad130d6d6233d41d4e5acf82703ea *R/guerrero.R 90ad1f95e8d0f73c785736698d1b127e *R/lm.R 4dbc5927102faf07edc172b15ee24c4e *R/makeMatrices.R d2a82600e97e923595e3adc760b81288 *R/makeParamVector.R d4e1c72e929d802c63261350dad4bab6 *R/mforecast.R 474a536845da7b177a4d426d223e4908 *R/modelAR.R 333e564e346e4bc310a219ac08d7d781 *R/mstl.R 3a91d0c24a2601ca3a18fb60acccd08f *R/msts.R 2afd90efc04fd8d2bffece766fd4ca38 *R/naive.R c876f7b654137e8841dd2f1629230226 *R/newarima2.R d0671eae6082fc26ced141856ab8c043 *R/nnetar.R 4ed698eabfe343e66ba3dc075ddc486e *R/residuals.R 48e2aee79cf37130ec860583a6d0af6d *R/seasadj.R 6e59c019d0589002ae9f1d224a488f42 *R/season.R 2fbf0f39f2012e5c4d5ead042215e66e *R/simulate.R 7cd92409b235cd14d40ee0701e0204a9 *R/simulate_tbats.R 3a8559406c735ff0f799388e55d77d5a *R/spline.R 3d73197fd6c1ce91302d121c0e155fb9 *R/subset.R ba9d6d3fbdb1c085de82e30b90b6c48f *R/tbats.R 1db4998c13a6a1da4e228ca7a612111a *R/theta.R df1a2b39917256fd4786e1992e4d2c04 *R/tscv.R 25430683535a3072c340c44c5d996c6a *R/unitRoot.R 10f32c6de9168ac9d066307d318bae91 *R/whichmodels.R 7238604557eca232bdfce0502ee4a2eb *R/wrangle.R 57d15099f8ff73632535706efd46709b *README.md c19e7665a0ff630ef08d5772020fdd3e *build/vignette.rds d83263b393c17189250711ff49f730b6 *data/gas.rda de9a9e1c277aa90de4c8ee718d4ef93f *data/gold.rda f0c82cb5de038d46b4489ef90769d58b *data/taylor.rda 38679e434ddf3856988e34aabbe662fc *data/wineind.rda 20dae67335336c52c4228859c36a22c3 *data/woolyrnq.rda 03603761cd0e19d3b66c6e1346eb76a2 *inst/CITATION 74af0a2135e16058d635d5f3ef844ade *inst/doc/JSS2008.R c654c548be2e55c4284dab38657eeebe *inst/doc/JSS2008.Rmd 58379aad70b60f82e7ced8cad03ae665 *inst/doc/JSS2008.pdf 83f91e71324a77f2e65abbb0a39dac82 *man/Acf.Rd 1233246bc7f32a0e669fe3670261dc78 *man/Arima.Rd b91d83371a8f3574234b6ba4beca570a *man/BoxCox.Rd 0390825433c5207e11a3aa2c9689bbc1 *man/BoxCox.lambda.Rd 4a846add965855d0144767d413211791 *man/CV.Rd 6ecc8c117a35ae3e4a8e20eb546f1856 *man/CVar.Rd 6aaaeb47ccdcde81fcb14a9d65329f44 *man/accuracy.default.Rd 5442830e86a1a0c1013e8d41157871db *man/arfima.Rd 9c2e1f1ef44dcd07a8db27af46c6273f *man/arima.errors.Rd 99d19c63d529f30309f5fa5f22b20d59 *man/arimaorder.Rd b77919994f730e51935edf1c014caa7d *man/auto.arima.Rd 059fe7a40892ae77b6cc1d16a3aeb4af *man/autolayer.Rd f33ea9daffe72a6de6dc4e444c8bf564 *man/autoplot.acf.Rd 292043ae77bb3767640d3493a56424b8 *man/autoplot.seas.Rd 5135c65efe9400b5e3b7884129b5dc99 *man/autoplot.ts.Rd 790787a95a2779ed85b870d9d774cc8b *man/baggedModel.Rd 0e974f0da71ec18800366d77440e1356 *man/bats.Rd e0142a3b240fcc9f6229defc4727c238 *man/bizdays.Rd 209c496a43538dfa3eb929a9a23933c3 *man/bld.mbb.bootstrap.Rd 9848999fddd6caa019b2a97e981b96ba *man/checkresiduals.Rd 3f93237aef8ee4696ddb0bca3f6e9a02 *man/croston.Rd 7558acd6622ed6f745cb4c4de74957f5 *man/dm.test.Rd fad85528e3c36a57426495d6c0be5ba8 *man/dshw.Rd 4c3a40f6807c40d497529da77186946e *man/easter.Rd d241ce7b381ed46cd972f6dc0565f830 *man/ets.Rd d2ccaa153869329ea005b8816b2e029f *man/figures/logo.png 30b384c8dd90a0f902218e76bba5f472 *man/findfrequency.Rd 871cc6cc555d50e6d4c82e3eef954207 *man/fitted.Arima.Rd 33a48ed1bdb748891198e1ea349b3f18 *man/forecast-package.Rd 0f4856ac677c1f398d034a012f6a5b6a *man/forecast.Arima.Rd f77aeca83a063a307f911797e020d6df *man/forecast.HoltWinters.Rd dda85c94530c8b7978d0b9a49968d4c5 *man/forecast.StructTS.Rd a5ede17e227dab77b77e801aff71815f *man/forecast.baggedModel.Rd 3bd2f567500d1898d3d49dae8b120700 *man/forecast.bats.Rd 88a24a1870163ac29156ce1cc8056119 *man/forecast.ets.Rd bb17f9d40014a8987f4ed9f50a4c6f22 *man/forecast.lm.Rd 76069224b3c79f9d51454611ab8ffe2e *man/forecast.mlm.Rd fb7fdcc24bb63ece5fb4e5484f78ae23 *man/forecast.modelAR.Rd 6bbb48242a147f43078a9c543c927e61 *man/forecast.mts.Rd 7b811361f9d9e69f6ef03b68a98f377a *man/forecast.nnetar.Rd e3f2afcecffefa3e11da29c3c7dfc082 *man/forecast.stl.Rd ca6e4c0080b53338eaa63e5388737c59 *man/forecast.ts.Rd 8536b20ad167f572a8f41ff7157276a4 *man/fourier.Rd 4c310ce65a57cac565c003ba8d1c706c *man/gas.Rd d4e84da63601e9c09e84929d6c9b9b01 *man/geom_forecast.Rd 04278fb50a27f247325113f626cd9214 *man/getResponse.Rd c6dbd99bebbefa49cb1cb4a012e66963 *man/gghistogram.Rd fe56843c030a399f300275aae6eee230 *man/gglagplot.Rd a655c9f394843bc1aec3af6deb67f9f6 *man/ggmonthplot.Rd 2f30c1139541d530824d307fd86a93e7 *man/gold.Rd 69c39a9661c43d0631c6a1ef4c3ffae3 *man/is.constant.Rd a1f193faca93a50f329bd52eafbe6d6e *man/is.ets.Rd 0fdb9a4ef0c3274a7a6a1135b5a2f591 *man/is.forecast.Rd c4419f85bca986614f9950fe954b6c86 *man/ma.Rd 4f946a31c5da15d9c1a2697a8144d2d4 *man/meanf.Rd fbb350863c6aecb2c95e3512a54801bb *man/modelAR.Rd 4dbd96acb654451b659c8f8ba00d8c9f *man/modeldf.Rd ee39cdf50cad2d1aae445da601dc4ac9 *man/monthdays.Rd 35e54c362d1568b509e7606544d823f9 *man/mstl.Rd c1e2f69e0e056997a5dcf747fba66c1b *man/msts.Rd db892ad70cd382574ab7ab004c800e4d *man/na.interp.Rd b84cf459aa295fc8b8203b6ecc85c507 *man/naive.Rd f457cb0539e4dc98f37f10786af164ba *man/ndiffs.Rd 378ede105a593b204006701d6362fd93 *man/nnetar.Rd d8e464947377cc21729cc855499ab2c3 *man/nsdiffs.Rd e5efcb1e39e1e8816050e0a04f2de0dd *man/ocsb.test.Rd ca040caf6e8cb69a709f093640e05b19 *man/plot.Arima.Rd 05d3c713844dabe0e0ab1e7e877acc94 *man/plot.bats.Rd dda7462647917f639d1ed48e010c0874 *man/plot.ets.Rd 523b0e23da1fef3337e44c0095869c8c *man/plot.forecast.Rd bd4278fe0b6984e2f88c3e3e97f1fa48 *man/plot.mforecast.Rd 6c7f2c4386245076645be6f16737cdd7 *man/reexports.Rd 4348571330fa10db423a393eac8e8a72 *man/residuals.forecast.Rd 43698c8686becf342e68c02aa797cbc0 *man/seasadj.Rd c8a8a9bf21ea57bf9e1304698905cfd3 *man/seasonal.Rd 86a05976843a74991be96ac536fcdfee *man/seasonaldummy.Rd 38bc4dcc5611b22fe90b80617693aafd *man/seasonplot.Rd e31cfb0650c537e97c8e8c32e170461a *man/ses.Rd 21fe5bde767254014a7d7c4f00e258c6 *man/simulate.ets.Rd 59b2af1fb81a9088a5aa0e8e66507ae3 *man/sindexf.Rd 54be116966779434d622dddcd9eabb1d *man/splinef.Rd 2901ce6660f7b06d7ecaa226276052ae *man/subset.ts.Rd 580471f7024edd6c15f796caefa804a7 *man/taylor.Rd e2723ca1bd6e6df55bc698fd572de580 *man/tbats.Rd 0ae2d2dd61045aefec1202213a05e2f7 *man/tbats.components.Rd 04a2aa0f9f3f2e9314562247f444c302 *man/thetaf.Rd 137757f5d574ca7845b9a651e348d316 *man/tsCV.Rd a068b467a8b0b54c43771c998de15d0f *man/tsclean.Rd fe3f298d209fbf7769faac02f39733f6 *man/tsdisplay.Rd 3822a7637be4232e2d420c72a45871be *man/tslm.Rd 888eba3e26715d9861473ac27e9cbd1f *man/tsoutliers.Rd df48ac7208918eeecd7681e8578ba872 *man/wineind.Rd d456755f11d47e081631c0753c67ddde *man/woolyrnq.Rd f6eb5fa21b3765950d039a3b000a83d6 *src/Makevars f6eb5fa21b3765950d039a3b000a83d6 *src/Makevars.win f9ecc1f5c60cd3dc0308fb1a3d7330f9 *src/calcBATS.cpp 8a950e3ff166da855be2bac67fa699ef *src/calcBATS.h d0f166950dc9401c846838e18d237c52 *src/calcTBATS.cpp 3c53efc7fbf62345277457fce024695a *src/etsTargetFunction.cpp 1862021b4c660004e29148f2ab00c46a *src/etsTargetFunction.h 1c35039e6ec6da9e4bbb4c04b6235905 *src/etsTargetFunctionWrapper.cpp 1993b54b4241d9339a89e94deee49959 *src/etscalc.c 10a04e99372e13b619cb078598d72b19 *src/etspolyroot.c 7737b4b0565a59df9fdacdeab4234f8d *src/makeBATSMatrices.cpp f6e3a6eda213b1c0154adb7d2a638852 *src/makeTBATSMatrices.cpp 7c22b1b500cab25872924e218f1645f5 *src/registerDynamicSymbol.c cdb9079de1d58f886f769aa68f0f480e *src/updateMatrices.cpp 06adefbef775d3ddd511764f84d16f13 *src/updateTBATSMatrices.cpp 22708a41a2f33a810a8353ff0a62a1eb *tests/testthat.R 5ce26e5e558d08a2e0c411a36ad60f7b *tests/testthat/test-accuracy.R 3b580e8bd4b8da3bac0d6489a2ad2e64 *tests/testthat/test-acf.R 05607b4a3ee0b4bd6354a7c342bf748c *tests/testthat/test-arfima.R 9384d55845d8325a918ea6b3192f4c93 *tests/testthat/test-arima.R aafe5438a347abe8740fca4add616a2a *tests/testthat/test-armaroots.R bad4f6f86a7b3d74333cd6e42fd82912 *tests/testthat/test-bats.R cc07f49ef73d238837d4a0c6cee150b9 *tests/testthat/test-boxcox.R 1e11fa115e1e56b18afb3fd79e5f53c4 *tests/testthat/test-calendar.R d06215d364164219a5087baa8fc71968 *tests/testthat/test-clean.R e29ef19acf0b97b246205cae7a2a242b *tests/testthat/test-dshw.R 569467a4f0e400a68f047a0bb5c029cb *tests/testthat/test-ets.R 348765823673999adcf1e9d3321069c8 *tests/testthat/test-forecast.R a9bdc48ed3b761ff84658aa01d961795 *tests/testthat/test-forecast2.R aa707bfcff6a8d83a6658020105a78ef *tests/testthat/test-ggplot.R 60a12b281f2c89cb93382a3757087cd9 *tests/testthat/test-graph.R 7248fd1953fc5bdca50933fa5922fdec *tests/testthat/test-hfitted.R 2f8dac2d15acdae41651ccae610cc8b3 *tests/testthat/test-mforecast.R 89b6ecb622d8db706a41ad8efaa7e547 *tests/testthat/test-modelAR.R 57a3715236999a8c51c3d49ec103ca9a *tests/testthat/test-msts.R aa1f54d7fb4b0e7651081a622fcf9c5b *tests/testthat/test-newarima2.R 6ffa8156de363742c886dd577e45c419 *tests/testthat/test-nnetar.R 802309f8f3824c53438b1a3d22ed19be *tests/testthat/test-refit.R 165c2eefc684be35818d4bf75b446312 *tests/testthat/test-season.R f0e5201af3cd697d85c3e1bd82720ba0 *tests/testthat/test-spline.R cdeaa87e7bc3e478497824d70d1d3390 *tests/testthat/test-subset.R 697883c2fa3b300efcef42a4911cdfb5 *tests/testthat/test-tbats.R 8741ad0287cb8bbbb0d73fbd7bb630e3 *tests/testthat/test-thetaf.R 45254e5bb52856455e58d771ce5fca4c *tests/testthat/test-tslm.R 20c99ad040532709bc751d5f0cf90115 *tests/testthat/test-wrangle.R 2fa3082147c937669b5c1a44722eb85b *vignettes/JSS-paper.bib c654c548be2e55c4284dab38657eeebe *vignettes/JSS2008.Rmd 16e6ff1f952a8b8b4f77aa0adf736559 *vignettes/jsslogo.jpg 5476cea82c73c77a83ddb7e2619d3727 *vignettes/orcidlink.sty forecast/R/0000755000176200001440000000000014633662406012273 5ustar liggesusersforecast/R/DM2.R0000644000176200001440000001220414456202551012771 0ustar liggesusers# Diebold-Mariano test. Modified from code by Adrian Trapletti. # Then adapted by M. Yousaf Khan for better performance on small samples #' Diebold-Mariano test for predictive accuracy #' #' The Diebold-Mariano test compares the forecast accuracy of two forecast #' methods. #' #' This function implements the modified test proposed by Harvey, Leybourne and #' Newbold (1997). The null hypothesis is that the two methods have the same #' forecast accuracy. For \code{alternative="less"}, the alternative hypothesis #' is that method 2 is less accurate than method 1. For #' \code{alternative="greater"}, the alternative hypothesis is that method 2 is #' more accurate than method 1. For \code{alternative="two.sided"}, the #' alternative hypothesis is that method 1 and method 2 have different levels #' of accuracy. The long-run variance estimator can either the #' auto-correlation estimator \code{varestimator = "acf"}, or the estimator based #' on Bartlett weights \code{varestimator = "bartlett"} which ensures a positive estimate. #' Both long-run variance estimators are proposed in Diebold and Mariano (1995). #' #' @param e1 Forecast errors from method 1. #' @param e2 Forecast errors from method 2. #' @param alternative a character string specifying the alternative hypothesis, #' must be one of \code{"two.sided"} (default), \code{"greater"} or #' \code{"less"}. You can specify just the initial letter. #' @param h The forecast horizon used in calculating \code{e1} and \code{e2}. #' @param power The power used in the loss function. Usually 1 or 2. #' @param varestimator a character string specifying the long-run variance estimator. #' Options are \code{"acf"} (default) or \code{"bartlett"}. #' @return A list with class \code{"htest"} containing the following #' components: #' \item{statistic}{the value of the DM-statistic.} #' \item{parameter}{the forecast horizon and loss function power used in the test.} #' \item{alternative}{a character string describing the alternative hypothesis.} #' \item{varestimator}{a character string describing the long-run variance estimator.} #' \item{p.value}{the p-value for the test.} #' \item{method}{a character string with the value "Diebold-Mariano Test".} #' \item{data.name}{a character vector giving the names of the two error series.} #' @author George Athanasopoulos and Kirill Kuroptev #' @references Diebold, F.X. and Mariano, R.S. (1995) Comparing predictive #' accuracy. \emph{Journal of Business and Economic Statistics}, \bold{13}, #' 253-263. #' #' Harvey, D., Leybourne, S., & Newbold, P. (1997). Testing the equality of #' prediction mean squared errors. \emph{International Journal of forecasting}, #' \bold{13}(2), 281-291. #' @keywords htest ts #' @examples #' #' # Test on in-sample one-step forecasts #' f1 <- ets(WWWusage) #' f2 <- auto.arima(WWWusage) #' accuracy(f1) #' accuracy(f2) #' dm.test(residuals(f1), residuals(f2), h = 1) #' #' # Test on out-of-sample one-step forecasts #' f1 <- ets(WWWusage[1:80]) #' f2 <- auto.arima(WWWusage[1:80]) #' f1.out <- ets(WWWusage[81:100], model = f1) #' f2.out <- Arima(WWWusage[81:100], model = f2) #' accuracy(f1.out) #' accuracy(f2.out) #' dm.test(residuals(f1.out), residuals(f2.out), h = 1) #' @export dm.test <- function(e1, e2, alternative = c("two.sided", "less", "greater"), h = 1, power = 2, varestimator = c("acf", "bartlett")) { alternative <- match.arg(alternative) varestimator <- match.arg(varestimator) h <- as.integer(h) if(h < 1L) { stop("h must be at least 1") } if(h > length(e1)) { stop("h cannot be longer than the number of forecast errors") } d <- c(abs(e1))^power - c(abs(e2))^power d.cov <- acf(d, na.action = na.omit, lag.max = h-1, type = "covariance", plot = FALSE)$acf[, , 1] n <- length(d) if (varestimator == "acf" | h == 1L) { # Original estimator d.var <- sum(c(d.cov[1], 2 * d.cov[-1])) / n } else { # varestimator == "bartlett" # Using Bartlett weights to ensure a positive estimate of long-run-variance d.var <- sum(c(d.cov[1], 2 * (1 - seq_len(h-1)/h) * d.cov[-1])) / n } dv <- d.var if (dv > 0) { STATISTIC <- mean(d, na.rm = TRUE) / sqrt(dv) } else if (h == 1) { stop("Variance of DM statistic is zero") } else { warning("Variance is negative. Try varestimator = bartlett. Proceeding with horizon h=1.") return(dm.test(e1, e2, alternative, h = 1, power, varestimator)) } k <- ((n + 1 - 2 * h + (h / n) * (h - 1)) / n) ^ (1 / 2) STATISTIC <- STATISTIC * k names(STATISTIC) <- "DM" if (alternative == "two.sided") { PVAL <- 2 * pt(-abs(STATISTIC), df = n - 1) } else if (alternative == "less") { PVAL <- pt(STATISTIC, df = n - 1) } else if (alternative == "greater") { PVAL <- pt(STATISTIC, df = n - 1, lower.tail = FALSE) } PARAMETER <- c(h, power) names(PARAMETER) <- c("Forecast horizon", "Loss function power") structure( list( statistic = STATISTIC, parameter = PARAMETER, alternative = alternative, varestimator = varestimator, p.value = PVAL, method = "Diebold-Mariano Test", data.name = c(deparse(substitute(e1)), deparse(substitute(e2))) ), class = "htest" ) } is.htest <- function(x) { inherits(x, "htest") } forecast/R/forecast-package.R0000644000176200001440000000265314272665773015634 0ustar liggesusers#' @keywords package #' @aliases forecast-package "_PACKAGE" #' @import parallel #' @import Rcpp #' #' @importFrom colorspace sequential_hcl #' @importFrom fracdiff fracdiff diffseries fracdiff.sim #' @importFrom tseries adf.test pp.test kpss.test #' @importFrom zoo rollmean as.Date as.yearqtr #' @importFrom timeDate as.timeDate isBizday difftimeDate Easter as.Date.timeDate #' @importFrom nnet nnet #' @importFrom grDevices gray heat.colors nclass.FD palette #' @importFrom graphics abline axis grid layout lines mtext par plot points polygon text title hist #' @importFrom stats Box.test acf approx ar arima arima.sim as.ts complete.cases cycle decompose diffinv end extractAIC fitted formula frequency window filter na.contiguous spec.ar hatvalues is.ts ksmooth lm lsfit loess median model.frame na.exclude na.omit na.pass optim optimize pf plot.ts poly predict pt qnorm qt quantile residuals rnorm runif sd simulate smooth.spline start stl supsmu terms time ts tsp tsp<- tsdiag var logLik nobs napredict #' @importFrom stats aggregate as.formula is.mts reformulate #' @importFrom utils packageVersion tail head #' @importFrom ggplot2 autoplot fortify #' @importFrom lmtest bgtest #' @importFrom stats supsmu #' @importFrom magrittr %>% #' @importFrom generics forecast accuracy #' #' @useDynLib forecast, .registration = TRUE NULL # Generics to re-export #' @export magrittr::`%>%` #' @export generics::forecast #' @export generics::accuracy forecast/R/tscv.R0000644000176200001440000002133114456202551013367 0ustar liggesusers# Time series cross-validation # y is a time series # forecastfunction must return an object of class forecast # h is number of steps ahead to forecast # ... are passed to forecastfunction #' Time series cross-validation #' #' \code{tsCV} computes the forecast errors obtained by applying #' \code{forecastfunction} to subsets of the time series \code{y} using a #' rolling forecast origin. #' #' Let \code{y} contain the time series \eqn{y_1,\dots,y_T}{y[1:T]}. Then #' \code{forecastfunction} is applied successively to the time series #' \eqn{y_1,\dots,y_t}{y[1:t]}, for \eqn{t=1,\dots,T-h}, making predictions #' \eqn{\hat{y}_{t+h|t}}{f[t+h]}. The errors are given by \eqn{e_{t+h} = #' y_{t+h}-\hat{y}_{t+h|t}}{e[t+h] = y[t+h]-f[t+h]}. If h=1, these are returned as a #' vector, \eqn{e_1,\dots,e_T}{e[1:T]}. For h>1, they are returned as a matrix with #' the hth column containing errors for forecast horizon h. #' The first few errors may be missing as #' it may not be possible to apply \code{forecastfunction} to very short time #' series. #' #' @param y Univariate time series #' @param forecastfunction Function to return an object of class #' \code{forecast}. Its first argument must be a univariate time series, and it #' must have an argument \code{h} for the forecast horizon. If exogenous predictors are used, #' then it must also have \code{xreg} and \code{newxreg} arguments corresponding to the #' training and test periods. #' @param h Forecast horizon #' @param window Length of the rolling window, if NULL, a rolling window will not be used. #' @param xreg Exogeneous predictor variables passed to the forecast function if required. #' @param initial Initial period of the time series where no cross-validation is performed. #' @param ... Other arguments are passed to \code{forecastfunction}. #' @return Numerical time series object containing the forecast errors as a vector (if h=1) #' and a matrix otherwise. The time index corresponds to the last period of the training #' data. The columns correspond to the forecast horizons. #' @author Rob J Hyndman #' @seealso \link{CV}, \link{CVar}, \link{residuals.Arima}, \url{https://robjhyndman.com/hyndsight/tscv/}. #' #' @keywords ts #' @examples #' #' #Fit an AR(2) model to each rolling origin subset #' far2 <- function(x, h){forecast(Arima(x, order=c(2,0,0)), h=h)} #' e <- tsCV(lynx, far2, h=1) #' #' #Fit the same model with a rolling window of length 30 #' e <- tsCV(lynx, far2, h=1, window=30) #' #' #Example with exogenous predictors #' far2_xreg <- function(x, h, xreg, newxreg) { #' forecast(Arima(x, order=c(2,0,0), xreg=xreg), xreg=newxreg) #' } #' #' y <- ts(rnorm(50)) #' xreg <- matrix(rnorm(100),ncol=2) #' e <- tsCV(y, far2_xreg, h=3, xreg=xreg) #' #' @export tsCV <- function(y, forecastfunction, h=1, window=NULL, xreg=NULL, initial=0, ...) { y <- as.ts(y) n <- length(y) e <- ts(matrix(NA_real_, nrow = n, ncol = h)) if(initial >= n) stop("initial period too long") tsp(e) <- tsp(y) if (!is.null(xreg)) { # Make xreg a ts object to allow easy subsetting later xreg <- ts(as.matrix(xreg)) if(NROW(xreg) != length(y)) stop("xreg must be of the same size as y") # Pad xreg with NAs xreg <- ts(rbind(xreg, matrix(NA, nrow=h, ncol=NCOL(xreg))), start = start(y), frequency = frequency(y)) } if (is.null(window)) indx <- seq(1+initial, n - 1L) else indx <- seq(window+initial, n - 1L, by = 1L) for (i in indx) { y_subset <- subset( y, start = ifelse(is.null(window), 1L, ifelse(i - window >= 0L, i - window + 1L, stop("small window"))), end = i) if (is.null(xreg)) { fc <- try(suppressWarnings( forecastfunction(y_subset, h = h, ...) ), silent = TRUE) } else { xreg_subset <- subset( xreg, start = ifelse(is.null(window), 1L, ifelse(i - window >= 0L, i - window + 1L, stop("small window"))), end = i) xreg_future <- subset( xreg, start = i+1, end = i+h) fc <- try(suppressWarnings( forecastfunction(y_subset, h = h, xreg = xreg_subset, newxreg=xreg_future, ...) ), silent = TRUE) } if (!is.element("try-error", class(fc))) { e[i, ] <- y[i + seq(h)] - fc$mean[seq(h)] } } if (h == 1) { return(e[, 1L]) } else { colnames(e) <- paste("h=", 1:h, sep = "") return(e) } } # Cross-validation for AR models # By Gabriel Caceres ## Note arguments to pass must be named #' k-fold Cross-Validation applied to an autoregressive model #' #' \code{CVar} computes the errors obtained by applying an autoregressive #' modelling function to subsets of the time series \code{y} using k-fold #' cross-validation as described in Bergmeir, Hyndman and Koo (2015). It also #' applies a Ljung-Box test to the residuals. If this test is significant #' (see returned pvalue), there is serial correlation in the residuals and the #' model can be considered to be underfitting the data. In this case, the #' cross-validated errors can underestimate the generalization error and should #' not be used. #' #' @aliases print.CVar #' #' @param y Univariate time series #' @param k Number of folds to use for cross-validation. #' @param FUN Function to fit an autoregressive model. Currently, it only works #' with the \code{\link{nnetar}} function. #' @param cvtrace Provide progress information. #' @param blocked choose folds randomly or as blocks? #' @param LBlags lags for the Ljung-Box test, defaults to 24, for yearly series can be set to 20 #' @param ... Other arguments are passed to \code{FUN}. #' @return A list containing information about the model and accuracy for each #' fold, plus other summary information computed across folds. #' @author Gabriel Caceres and Rob J Hyndman #' @seealso \link{CV}, \link{tsCV}. #' @references Bergmeir, C., Hyndman, R.J., Koo, B. (2018) A note on the #' validity of cross-validation for evaluating time series prediction. #' \emph{Computational Statistics & Data Analysis}, \bold{120}, 70-83. #' \url{https://robjhyndman.com/publications/cv-time-series/}. #' @keywords ts #' @examples #' #' modelcv <- CVar(lynx, k=5, lambda=0.15) #' print(modelcv) #' print(modelcv$fold1) #' #' library(ggplot2) #' autoplot(lynx, series="Data") + #' autolayer(modelcv$testfit, series="Fits") + #' autolayer(modelcv$residuals, series="Residuals") #' ggAcf(modelcv$residuals) #' #' @export CVar <- function(y, k=10, FUN=nnetar, cvtrace=FALSE, blocked=FALSE, LBlags=24, ...) { nx <- length(y) # n-folds at most equal number of points k <- min(as.integer(k), nx) if (k <= 1L) { stop("k must be at least 2") } # Set up folds ind <- seq_len(nx) fold <- if (blocked) { sort(rep(1:k, length.out = nx)) } else { sample(rep(1:k, length.out = nx)) } cvacc <- matrix(NA_real_, nrow = k, ncol = 7) out <- list() alltestfit <- rep(NA, length.out = nx) for (i in 1:k) { out[[paste0("fold", i)]] <- list() testset <- ind[fold == i] trainset <- ind[fold != i] trainmodel <- FUN(y, subset = trainset, ...) testmodel <- FUN(y, model = trainmodel, xreg = trainmodel$xreg) testfit <- fitted(testmodel) acc <- accuracy(y, testfit, test = testset) cvacc[i, ] <- acc out[[paste0("fold", i)]]$model <- trainmodel out[[paste0("fold", i)]]$accuracy <- acc out[[paste0("fold", i)]]$testfit <- testfit out[[paste0("fold", i)]]$testset <- testset alltestfit[testset] <- testfit[testset] if (isTRUE(cvtrace)) { cat("Fold", i, "\n") print(acc) cat("\n") } } out$testfit <- ts(alltestfit) tsp(out$testfit) <- tsp(y) out$residuals <- out$testfit - y out$LBpvalue <- Box.test(out$residuals, type = "Ljung", lag = LBlags)$p.value out$k <- k # calculate mean accuracy accross all folds CVmean <- matrix(apply(cvacc, 2, FUN = mean, na.rm = TRUE), dimnames = list(colnames(acc), "Mean")) # calculate accuracy sd accross all folds --- include? CVsd <- matrix(apply(cvacc, 2, FUN = sd, na.rm = TRUE), dimnames = list(colnames(acc), "SD")) out$CVsummary <- cbind(CVmean, CVsd) out$series <- deparse(substitute(y)) out$call <- match.call() return(structure(out, class = c("CVar", class(trainmodel)))) } #' @export print.CVar <- function(x, ...) { cat("Series:", x$series, "\n") cat("Call: ") print(x$call) # Add info about series, function, and parameters # Add note about any NA/NaN in folds? # # Print number of folds cat("\n", x$k, "-fold cross-validation\n", sep = "") # Print mean & sd accuracy() results print(x$CVsummary) cat("\n") cat("p-value of Ljung-Box test of residuals is ", x$LBpvalue, "\n") cat("if this value is significant (<0.05),\n") cat("the result of the cross-validation should not be used\n") cat("as the model is underfitting the data.\n") invisible(x) } forecast/R/HoltWintersNew.R0000644000176200001440000004537014254256650015362 0ustar liggesusers# Modelled on the HoltWinters() function but with more conventional # initialization. # Written by Zhenyu Zhou. 21 October 2012 HoltWintersZZ <- function(x, # smoothing parameters alpha = NULL, # level beta = NULL, # trend gamma = NULL, # seasonal component seasonal = c("additive", "multiplicative"), exponential = FALSE, # exponential phi = NULL, # damp lambda = NULL, # box-cox biasadj = FALSE, # adjusted back-transformed mean for box-cox warnings = TRUE # return optimization warnings ) { x <- as.ts(x) origx <- x seasonal <- match.arg(seasonal) m <- frequency(x) lenx <- length(x) if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } if (is.null(phi) || !is.numeric(phi)) { phi <- 1 } if (!is.null(alpha) && !is.numeric(alpha)) { stop("cannot fit models without level ('alpha' must not be 0 or FALSE).") } if (!all(is.null(c(alpha, beta, gamma))) && any(c(alpha, beta, gamma) < 0 | c(alpha, beta, gamma) > 1)) { stop("'alpha', 'beta' and 'gamma' must be within the unit interval.") } if ((is.null(gamma) || gamma > 0)) { if (seasonal == "multiplicative" && any(x <= 0)) { stop("data must be positive for multiplicative Holt-Winters.") } } if (m <= 1) { gamma <- FALSE } ## initialise l0, b0, s0 if (!is.null(gamma) && is.logical(gamma) && !gamma) { seasonal <- "none" l.start <- x[1L] s.start <- 0 if (is.null(beta) || !is.logical(beta) || beta) { if (!exponential) { b.start <- x[2L] - x[1L] } else { b.start <- x[2L] / x[1L] } } } else { ## seasonal Holt-Winters l.start <- mean(x[1:m]) b.start <- (mean(x[m + (1:m)]) - l.start) / m if (seasonal == "additive") { s.start <- x[1:m] - l.start } else { s.start <- x[1:m] / l.start } } # initialise smoothing parameters # lower=c(rep(0.0001,3), 0.8) # upper=c(rep(0.9999,3),0.98) lower <- c(0, 0, 0, 0) upper <- c(1, 1, 1, 1) if (!is.null(beta) && is.logical(beta) && !beta) { trendtype <- "N" } else if (exponential) { trendtype <- "M" } else { trendtype <- "A" } if (seasonal == "none") { seasontype <- "N" } else if (seasonal == "multiplicative") { seasontype <- "M" } else { seasontype <- "A" } ## initialise smoothing parameter optim.start <- initparam( alpha = alpha, beta = beta, gamma = gamma, phi = 1, trendtype = trendtype, seasontype = seasontype, damped = FALSE, lower = lower, upper = upper, m = m, bounds = "usual" ) # if(!is.na(optim.start["alpha"])) # alpha2 <- optim.start["alpha"] # else # alpha2 <- alpha # if(!is.na(optim.start["beta"])) # beta2 <- optim.start["beta"] # else # beta2 <- beta # if(!is.na(optim.start["gamma"])) # gamma2 <- optim.start["gamma"] # else # gamma2 <- gamma # if(!check.param(alpha = alpha2,beta = beta2, gamma = gamma2,phi=1,lower,upper,bounds="haha",m=m)) # { # print(paste("alpha=", alpha2, "beta=",beta2, "gamma=",gamma2)) # stop("Parameters out of range") # } ################################################################################### # optimisation: alpha, beta, gamma, if any of them is null, then optimise them error <- function(p, select) { if (select[1] > 0) { alpha <- p[1L] } if (select[2] > 0) { beta <- p[1L + select[1]] } if (select[3] > 0) { gamma <- p[1L + select[1] + select[2]] } zzhw( x, lenx = lenx, alpha = alpha, beta = beta, gamma = gamma, seasonal = seasonal, m = m, dotrend = (!is.logical(beta) || beta), doseasonal = (!is.logical(gamma) || gamma), exponential = exponential, phi = phi, l.start = l.start, b.start = b.start, s.start = s.start )$SSE } select <- as.numeric(c(is.null(alpha), is.null(beta), is.null(gamma))) if (sum(select) > 0) # There are parameters to optimize { sol <- optim(optim.start, error, method = "L-BFGS-B", lower = lower[select], upper = upper[select], select = select) if (sol$convergence || any(sol$par < 0 | sol$par > 1)) { if (sol$convergence > 50) { if (warnings) { warning(gettextf("optimization difficulties: %s", sol$message), domain = NA) } } else { stop("optimization failure") } } if (select[1] > 0) { alpha <- sol$par[1L] } if (select[2] > 0) { beta <- sol$par[1L + select[1]] } if (select[3] > 0) { gamma <- sol$par[1L + select[1] + select[2]] } } final.fit <- zzhw( x, lenx = lenx, alpha = alpha, beta = beta, gamma = gamma, seasonal = seasonal, m = m, dotrend = (!is.logical(beta) || beta), doseasonal = (!is.logical(gamma) || gamma), exponential = exponential, phi = phi, l.start = l.start, b.start = b.start, s.start = s.start ) tspx <- tsp(x) fitted <- ts(final.fit$fitted, frequency = m, start = tspx[1]) res <- ts(final.fit$residuals, frequency = m, start = tspx[1]) if (!is.null(lambda)) { fitted <- InvBoxCox(fitted, lambda, biasadj, var(final.fit$residuals)) attr(lambda, "biasadj") <- biasadj } states <- matrix(final.fit$level, ncol = 1) colnames(states) <- "l" if (trendtype != "N") { states <- cbind(states, b = final.fit$trend) } if (seasontype != "N") { nr <- nrow(states) nc <- ncol(states) for (i in 1:m) states <- cbind(states, final.fit$season[(m - i) + (1:nr)]) colnames(states)[nc + (1:m)] <- paste("s", 1:m, sep = "") } states <- ts(states, frequency = m, start = tspx[1] - 1 / m) # Package output as HoltWinters class # structure(list(fitted = fitted, # x = x, # alpha = alpha, # beta = beta, # gamma = gamma, # coefficients = c(a = final.fit$level[lenx], # b = if (!is.logical(beta) || beta) final.fit$trend[lenx], # s = if (!is.logical(gamma) || gamma) final.fit$season[lenx - m + 1L:m]), # seasonal = seasonal, # exponential = exponential, # SSE = final.fit$SSE, # call = match.call(), # level = final.fit$level, # trend = final.fit$trend, # season = final.fit$season, # phi = phi # ), # class = "HoltWinters" # ) # Package output as ets class damped <- (phi < 1.0) if (seasonal == "additive") { # This should not happen components <- c("A", trendtype, seasontype, damped) } else if (seasonal == "multiplicative") { components <- c("M", trendtype, seasontype, damped) } else if (seasonal == "none" && exponential) { components <- c("M", trendtype, seasontype, damped) } else { # if(seasonal=="none" & !exponential) components <- c("A", trendtype, seasontype, damped) } initstate <- states[1, ] param <- alpha names(param) <- "alpha" if (trendtype != "N") { param <- c(param, beta = beta) names(param)[length(param)] <- "beta" } if (seasontype != "N") { param <- c(param, gamma = gamma) names(param)[length(param)] <- "gamma" } if (damped) { param <- c(param, phi = phi) names(param)[length(param)] <- "phi" } if (components[1] == "A") { sigma2 <- mean(res ^ 2) } else { sigma2 <- mean((res / fitted) ^ 2) } structure( list( fitted = fitted, residuals = res, components = components, x = origx, par = c(param, initstate), initstate = initstate, states = states, SSE = final.fit$SSE, sigma2 = sigma2, call = match.call(), m = m, lambda = lambda ), class = "ets" ) } ################################################################################### # filter function zzhw <- function(x, lenx, alpha=NULL, beta=NULL, gamma=NULL, seasonal="additive", m, dotrend=FALSE, doseasonal=FALSE, l.start=NULL, exponential = NULL, phi=NULL, b.start=NULL, s.start=NULL) { if (exponential != TRUE || is.null(exponential)) { exponential <- FALSE } if (is.null(phi) || !is.numeric(phi)) { phi <- 1 } # initialise array of l, b, s level <- trend <- season <- xfit <- residuals <- numeric(lenx) SSE <- 0 if (!dotrend) { beta <- 0 b.start <- 0 } if (!doseasonal) { gamma <- 0 s.start[1:length(s.start)] <- ifelse(seasonal == "additive", 0, 1) } lastlevel <- level0 <- l.start lasttrend <- trend0 <- b.start season0 <- s.start for (i in 1:lenx) { # definel l(t-1) if (i > 1) { lastlevel <- level[i - 1] } # define b(t-1) if (i > 1) { lasttrend <- trend[i - 1] } # define s(t-m) if (i > m) { lastseason <- season[i - m] } else { lastseason <- season0[i] } if (is.na(lastseason)) { lastseason <- ifelse(seasonal == "additive", 0, 1) } # stop((lastlevel + phi*lasttrend)*lastseason) # forecast for this period i if (seasonal == "additive") { if (!exponential) { xhat <- lastlevel + phi * lasttrend + lastseason } else { xhat <- lastlevel * lasttrend ^ phi + lastseason } } else { if (!exponential) { xhat <- (lastlevel + phi * lasttrend) * lastseason } else { xhat <- lastlevel * lasttrend ^ phi * lastseason } } xfit[i] <- xhat res <- x[i] - xhat residuals[i] <- res SSE <- SSE + res * res # calculate level[i] if (seasonal == "additive") { if (!exponential) { level[i] <- alpha * (x[i] - lastseason) + (1 - alpha) * (lastlevel + phi * lasttrend) } else { level[i] <- alpha * (x[i] - lastseason) + (1 - alpha) * (lastlevel * lasttrend ^ phi) } } else { if (!exponential) { level[i] <- alpha * (x[i] / lastseason) + (1 - alpha) * (lastlevel + phi * lasttrend) } else { level[i] <- alpha * (x[i] / lastseason) + (1 - alpha) * (lastlevel * lasttrend ^ phi) } } # calculate trend[i] if (!exponential) { trend[i] <- beta * (level[i] - lastlevel) + (1 - beta) * phi * lasttrend } else { trend[i] <- beta * (level[i] / lastlevel) + (1 - beta) * lasttrend ^ phi } # calculate season[i] if (seasonal == "additive") { if (!exponential) { season[i] <- gamma * (x[i] - lastlevel - phi * lasttrend) + (1 - gamma) * lastseason } else { season[i] <- gamma * (x[i] - lastlevel * lasttrend ^ phi) + (1 - gamma) * lastseason } } else { if (!exponential) { season[i] <- gamma * (x[i] / (lastlevel + phi * lasttrend)) + (1 - gamma) * lastseason } else { season[i] <- gamma * (x[i] / (lastlevel * lasttrend ^ phi)) + (1 - gamma) * lastseason } } } list( SSE = SSE, fitted = xfit, residuals = residuals, level = c(level0, level), trend = c(trend0, trend), season = c(season0, season), phi = phi ) } #' Exponential smoothing forecasts #' #' Returns forecasts and other information for exponential smoothing forecasts #' applied to \code{y}. #' #' ses, holt and hw are simply convenient wrapper functions for #' \code{forecast(ets(...))}. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting. #' @param damped If TRUE, use a damped trend. #' @param seasonal Type of seasonality in \code{hw} model. "additive" or #' "multiplicative" #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param initial Method used for selecting initial state values. If #' \code{optimal}, the initial values are optimized along with the smoothing #' parameters using \code{\link{ets}}. If \code{simple}, the initial values are #' set to values obtained using simple calculations on the first few #' observations. See Hyndman & Athanasopoulos (2014) for details. #' @param exponential If TRUE, an exponential trend is fitted. Otherwise, the #' trend is (locally) linear. #' @param alpha Value of smoothing parameter for the level. If \code{NULL}, it #' will be estimated. #' @param beta Value of smoothing parameter for the trend. If \code{NULL}, it #' will be estimated. #' @param gamma Value of smoothing parameter for the seasonal component. If #' \code{NULL}, it will be estimated. #' @param phi Value of damping parameter if \code{damped=TRUE}. If \code{NULL}, #' it will be estimated. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Other arguments passed to \code{forecast.ets}. #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{ets} and associated #' functions. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted #' model.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link[stats]{HoltWinters}}, #' \code{\link{rwf}}, \code{\link[stats]{arima}}. #' @references Hyndman, R.J., Koehler, A.B., Ord, J.K., Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag: New York. \url{http://www.exponentialsmoothing.net}. #' #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' #' fcast <- holt(airmiles) #' plot(fcast) #' deaths.fcast <- hw(USAccDeaths,h=48) #' plot(deaths.fcast) #' #' @export ses <- function(y, h = 10, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), alpha=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) if (initial == "optimal") { fcast <- forecast(ets(x, "ANN", alpha = alpha, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { fcast <- forecast(HoltWintersZZ(x, alpha = alpha, beta = FALSE, gamma = FALSE, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } fcast$method <- fcast$model$method <- "Simple exponential smoothing" fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } #' @rdname ses #' @export holt <- function(y, h = 10, damped = FALSE, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), exponential=FALSE, alpha=NULL, beta=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) if (length(y) <= 1L) { stop("I need at least two observations to estimate trend.") } if (initial == "optimal" || damped) { if (exponential) { fcast <- forecast(ets(x, "MMN", alpha = alpha, beta = beta, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { fcast <- forecast(ets(x, "AAN", alpha = alpha, beta = beta, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } } else { fcast <- forecast( HoltWintersZZ(x, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = exponential, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ... ) } if (damped) { fcast$method <- "Damped Holt's method" if (initial == "simple") { warning("Damped Holt's method requires optimal initialization") } } else { fcast$method <- "Holt's method" } if (exponential) { fcast$method <- paste(fcast$method, "with exponential trend") } fcast$model$method <- fcast$method fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } #' @rdname ses #' @export hw <- function(y, h = 2 * frequency(x), seasonal = c("additive", "multiplicative"), damped = FALSE, level = c(80, 95), fan = FALSE, initial=c("optimal", "simple"), exponential=FALSE, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, x=y, ...) { initial <- match.arg(initial) seasonal <- match.arg(seasonal) m <- frequency(x) if (m <= 1L) { stop("The time series should have frequency greater than 1.") } if (length(y) < m + 3) { stop(paste("I need at least", m + 3, "observations to estimate seasonality.")) } if (initial == "optimal" || damped) { if (seasonal == "additive" && exponential) { stop("Forbidden model combination") } else if (seasonal == "additive" && !exponential) { fcast <- forecast(ets(x, "AAA", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else if (seasonal != "additive" && exponential) { fcast <- forecast(ets(x, "MMM", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } else { # if(seasonal!="additive" & !exponential) fcast <- forecast(ets(x, "MAM", alpha = alpha, beta = beta, gamma = gamma, phi = phi, damped = damped, opt.crit = "mse", lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ...) } } else { fcast <- forecast( HoltWintersZZ(x, alpha = alpha, beta = beta, gamma = gamma, phi = phi, seasonal = seasonal, exponential = exponential, lambda = lambda, biasadj = biasadj), h, level = level, fan = fan, ... ) } if (seasonal == "additive") { fcast$method <- "Holt-Winters' additive method" } else { fcast$method <- "Holt-Winters' multiplicative method" } if (exponential) { fcast$method <- paste(fcast$method, "with exponential trend") } if (damped) { fcast$method <- paste("Damped", fcast$method) if (initial == "simple") { warning("Damped methods require optimal initialization") } } fcast$model$method <- fcast$method fcast$model$call <- match.call() fcast$series <- deparse(substitute(y)) return(fcast) } forecast/R/arima.R0000644000176200001440000010317114633662406013512 0ustar liggesuserssearch.arima <- function(x, d=NA, D=NA, max.p=5, max.q=5, max.P=2, max.Q=2, max.order=5, stationary=FALSE, ic=c("aic", "aicc", "bic"), trace=FALSE, approximation=FALSE, xreg=NULL, offset=offset, allowdrift=TRUE, allowmean=TRUE, parallel=FALSE, num.cores=2, ...) { # dataname <- substitute(x) ic <- match.arg(ic) m <- frequency(x) allowdrift <- allowdrift & (d + D) == 1 allowmean <- allowmean & (d + D) == 0 maxK <- (allowdrift | allowmean) # Choose model orders # Serial - technically could be combined with the code below if (parallel == FALSE) { best.ic <- Inf for (i in 0:max.p) { for (j in 0:max.q) { for (I in 0:max.P) { for (J in 0:max.Q) { if (i + j + I + J <= max.order) { for (K in 0:maxK) { fit <- myarima( x, order = c(i, d, j), seasonal = c(I, D, J), constant = (K == 1), trace = trace, ic = ic, approximation = approximation, offset = offset, xreg = xreg, ... ) if (fit$ic < best.ic) { best.ic <- fit$ic bestfit <- fit constant <- (K == 1) } } } } } } } } else if (parallel == TRUE) { to.check <- WhichModels(max.p, max.q, max.P, max.Q, maxK) par.all.arima <- function(l, max.order) { .tmp <- UndoWhichModels(l) i <- .tmp[1] j <- .tmp[2] I <- .tmp[3] J <- .tmp[4] K <- .tmp[5] == 1 if (i + j + I + J <= max.order) { fit <- myarima( x, order = c(i, d, j), seasonal = c(I, D, J), constant = (K == 1), trace = trace, ic = ic, approximation = approximation, offset = offset, xreg = xreg, ... ) } if (exists("fit")) { return(cbind(fit, K)) } else { return(NULL) } } if (is.null(num.cores)) { num.cores <- detectCores() } all.models <- mclapply(X = to.check, FUN = par.all.arima, max.order=max.order, mc.cores = num.cores) # Removing null elements all.models <- all.models[!sapply(all.models, is.null)] # Choosing best model best.ic <- Inf for (i in 1:length(all.models)) { if (!is.null(all.models[[i]][, 1]$ic) && all.models[[i]][, 1]$ic < best.ic) { bestfit <- all.models[[i]][, 1] best.ic <- bestfit$ic constant <- unlist(all.models[[i]][1, 2]) } } class(bestfit) <- c("forecast_ARIMA", "ARIMA", "Arima") } if (exists("bestfit")) { # Refit using ML if approximation used for IC if (approximation) { if (trace) { cat("\n\n Now re-fitting the best model(s) without approximations...\n") } # constant <- length(bestfit$coef) - ncol(xreg) > sum(bestfit$arma[1:4]) newbestfit <- myarima( x, order = bestfit$arma[c(1, 6, 2)], seasonal = bestfit$arma[c(3, 7, 4)], constant = constant, ic, trace = FALSE, approximation = FALSE, xreg = xreg, ... ) if (newbestfit$ic == Inf) { # Final model is lousy. Better try again without approximation # warning("Unable to fit final model using maximum likelihood. AIC value approximated") bestfit <- search.arima( x, d = d, D = D, max.p = max.p, max.q = max.q, max.P = max.P, max.Q = max.Q, max.order = max.order, stationary = stationary, ic = ic, trace = trace, approximation = FALSE, xreg = xreg, offset = offset, allowdrift = allowdrift, allowmean = allowmean, parallel = parallel, num.cores = num.cores, ... ) bestfit$ic <- switch(ic, bic = bestfit$bic, aic = bestfit$aic, aicc = bestfit$aicc) } else { bestfit <- newbestfit } } } else { stop("No ARIMA model able to be estimated") } bestfit$x <- x bestfit$series <- deparse(substitute(x)) bestfit$ic <- NULL bestfit$call <- match.call() if (trace) { cat("\n\n") } return(bestfit) } # Set up seasonal dummies using Fourier series SeasDummy <- function(x) { n <- length(x) m <- frequency(x) if (m == 1) { stop("Non-seasonal data") } tt <- 1:n fmat <- matrix(NA, nrow = n, ncol = 2 * m) for (i in 1:m) { fmat[, 2 * i] <- sin(2 * pi * i * tt / m) fmat[, 2 * (i - 1) + 1] <- cos(2 * pi * i * tt / m) } return(fmat[, 1:(m - 1)]) } # CANOVA-HANSEN TEST # Largely based on uroot package code for CH.test() SD.test <- function(wts, s=frequency(wts)) { if (any(is.na(wts))) { stop("Series contains missing values. Please choose order of seasonal differencing manually.") } if (s == 1) { stop("Not seasonal data") } t0 <- start(wts) N <- length(wts) if (N <= s) { stop("Insufficient data") } frec <- rep(1, as.integer((s + 1) / 2)) ltrunc <- round(s * (N / 100) ^ 0.25) R1 <- as.matrix(SeasDummy(wts)) lmch <- lm(wts ~ R1, na.action = na.exclude) # run the regression : y(i)=mu+f(i)'gamma(i)+e(i) Fhat <- Fhataux <- matrix(nrow = N, ncol = s - 1) for (i in 1:(s - 1)) Fhataux[, i] <- R1[, i] * residuals(lmch) for (i in 1:N) { for (n in 1:(s - 1)) Fhat[i, n] <- sum(Fhataux[1:i, n]) } wnw <- 1 - seq(1, ltrunc, 1) / (ltrunc + 1) Ne <- nrow(Fhataux) Omnw <- 0 for (k in 1:ltrunc) Omnw <- Omnw + (t(Fhataux)[, (k + 1):Ne] %*% Fhataux[1:(Ne - k), ]) * wnw[k] Omfhat <- (crossprod(Fhataux) + Omnw + t(Omnw)) / Ne sq <- seq(1, s - 1, 2) frecob <- rep(0, s - 1) for (i in 1:length(frec)) { if (frec[i] == 1 && i == as.integer(s / 2)) { frecob[sq[i]] <- 1 } if (frec[i] == 1 && i < as.integer(s / 2)) { frecob[sq[i]] <- frecob[sq[i] + 1] <- 1 } } a <- length(which(frecob == 1)) A <- matrix(0, nrow = s - 1, ncol = a) j <- 1 for (i in 1:(s - 1)) { if (frecob[i] == 1) { A[i, j] <- 1 ifelse(frecob[i] == 1, j <- j + 1, j <- j) } } tmp <- t(A) %*% Omfhat %*% A problems <- (min(svd(tmp)$d) < .Machine$double.eps) if (problems) { stL <- 0 } else { stL <- (1 / N ^ 2) * sum(diag(solve(tmp, tol = 1e-25) %*% t(A) %*% t(Fhat) %*% Fhat %*% A)) } return(stL) } #' Forecasting using ARIMA or ARFIMA models #' #' Returns forecasts and other information for univariate ARIMA models. #' #' For \code{Arima} or \code{ar} objects, the function calls #' \code{\link[stats]{predict.Arima}} or \code{\link[stats]{predict.ar}} and #' constructs an object of class "\code{forecast}" from the results. For #' \code{fracdiff} objects, the calculations are all done within #' \code{\link{forecast.fracdiff}} using the equations given by Peiris and #' Perera (1988). #' #' @param object An object of class "\code{Arima}", "\code{ar}" or #' "\code{fracdiff}". Usually the result of a call to #' \code{\link[stats]{arima}}, \code{\link{auto.arima}}, #' \code{\link[stats]{ar}}, \code{\link{arfima}} or #' \code{\link[fracdiff]{fracdiff}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of an regression variables (for class \code{Arima} #' objects only). A numerical vector or matrix of external regressors; it should not be a data frame. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulation with resampled errors. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals when \code{bootstrap=TRUE}. #' @param ... Other arguments. #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.Arima}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{predict.Arima}}, #' \code{\link[stats]{predict.ar}}, \code{\link{auto.arima}}, #' \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link[stats]{ar}}, #' \code{\link{arfima}}. #' @references Peiris, M. & Perera, B. (1988), On prediction with fractionally #' differenced ARIMA models, \emph{Journal of Time Series Analysis}, #' \bold{9}(3), 215-220. #' @keywords ts #' @aliases forecast.forecast_ARIMA #' @examples #' fit <- Arima(WWWusage,c(3,1,0)) #' plot(forecast(fit)) #' #' library(fracdiff) #' x <- fracdiff.sim( 100, ma=-.4, d=.3)$series #' fit <- arfima(x) #' plot(forecast(fit,h=30)) #' #' @export forecast.Arima <- function(object, h=ifelse(object$arma[5] > 1, 2 * object$arma[5], 10), level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=5000, biasadj=NULL, ...) { # Check whether there are non-existent arguments all.args <- names(formals()) user.args <- names(match.call())[-1L] # including arguments passed to 3 dots check <- user.args %in% all.args if (!all(check)) { error.args <- user.args[!check] warning(sprintf("The non-existent %s arguments will be ignored.", error.args)) } use.drift <- is.element("drift", names(object$coef)) x <- object$x <- getResponse(object) usexreg <- (use.drift | is.element("xreg", names(object))) # | use.constant) if (!is.null(xreg) && usexreg) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } origxreg <- xreg <- as.matrix(xreg) h <- nrow(xreg) } else { if(!is.null(xreg)){ warning("xreg not required by this model, ignoring the provided regressors") xreg <- NULL } origxreg <- NULL } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 & max(level) < 1) { level <- 100 * level } else if (min(level) < 0 | max(level) > 99.99) { stop("Confidence limit out of range") } } level <- sort(level) if (use.drift) { n <- length(x) #missing <- is.na(x) #firstnonmiss <- head(which(!missing),1) #n <- length(x) - firstnonmiss + 1 if (!is.null(xreg)) { xreg <- `colnames<-`(cbind(drift = (1:h) + n, xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) } else { xreg <- `colnames<-`(as.matrix((1:h) + n), "drift") } } # Check if data is constant if (!is.null(object$constant)) { if (object$constant) { pred <- list(pred = rep(x[1], h), se = rep(0, h)) } else { stop("Strange value of object$constant") } } else if (usexreg) { if (is.null(xreg)) { stop("No regressors provided") } object$call$xreg <- getxreg(object) if (NCOL(xreg) != NCOL(object$call$xreg)) { stop("Number of regressors does not match fitted model") } if(!identical(colnames(xreg), colnames(object$call$xreg))){ warning("xreg contains different column names from the xreg used in training. Please check that the regressors are in the same order.") } pred <- predict(object, n.ahead = h, newxreg = xreg) } else { pred <- predict(object, n.ahead = h) } # Fix time series characteristics if there are missing values at end of series, or if tsp is missing from pred if (!is.null(x)) { tspx <- tsp(x) nx <- max(which(!is.na(x))) if (nx != length(x) | is.null(tsp(pred$pred)) | is.null(tsp(pred$se))) { tspx[2] <- time(x)[nx] start.f <- tspx[2] + 1 / tspx[3] pred$pred <- ts(pred$pred, frequency = tspx[3], start = start.f) pred$se <- ts(pred$se, frequency = tspx[3], start = start.f) } } # Compute prediction intervals nint <- length(level) if (bootstrap) # Compute prediction intervals using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = TRUE, xreg = origxreg, lambda = lambda) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nint > 1L) { lower <- t(lower) upper <- t(upper) } else { lower <- matrix(lower, ncol = 1) upper <- matrix(upper, ncol = 1) } } else { # Compute prediction intervals via the normal distribution lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) upper <- lower for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } if (!is.finite(max(upper))) { warning("Upper prediction intervals are not finite.") } } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") lower <- ts(lower) upper <- ts(upper) tsp(lower) <- tsp(upper) <- tsp(pred$pred) method <- arima.string(object, padding = FALSE) seriesname <- if (!is.null(object$series)) { object$series } else if (!is.null(object$call$x)) { object$call$x } else { object$call$y } fits <- fitted.Arima(object) if (!is.null(lambda) & is.null(object$constant)) { # Back-transform point forecasts and prediction intervals pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, pred$se^2) if (!bootstrap) { # Bootstrapped intervals already back-transformed lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } } return(structure( list( method = method, model = object, level = level, mean = future_msts(x, pred$pred), lower = future_msts(x, lower), upper = future_msts(x, upper), x = x, series = seriesname, fitted = copy_msts(x, fits), residuals = copy_msts(x, residuals.Arima(object)) ), class = "forecast" )) } #' @export forecast.forecast_ARIMA <- forecast.Arima #' @rdname forecast.Arima #' @export forecast.ar <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, bootstrap=FALSE, npaths=5000, biasadj=FALSE, ...) { x <- getResponse(object) pred <- predict(object, newdata = x, n.ahead = h) if (bootstrap) # Recompute se using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = TRUE) pred$se <- apply(sim, 2, sd) } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 & max(level) < 1) { level <- 100 * level } else if (min(level) < 0 | max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) upper <- lower for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") method <- paste("AR(", object$order, ")", sep = "") f <- frequency(x) res <- residuals.ar(object) fits <- fitted.ar(object) if (!is.null(lambda)) { pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) fits <- InvBoxCox(fits, lambda) x <- InvBoxCox(x, lambda) } return(structure( list( method = method, model = object, level = level, mean = future_msts(x, pred$pred), lower = future_msts(x, lower), upper = future_msts(x, upper), x = x, series = deparse(object$call$x), fitted = copy_msts(x, fits), residuals = copy_msts(x, res) ) , class = "forecast" )) } # Find xreg matrix in an Arima object getxreg <- function(z) { # Look in the obvious place first if (is.element("xreg", names(z))) { return(z$xreg) } # Next most obvious place else if (is.element("xreg", names(z$coef))) { return(eval.parent(z$coef$xreg)) } # Now check under call else if (is.element("xreg", names(z$call))) { return(eval.parent(z$call$xreg)) } # Otherwise check if it exists else { armapar <- sum(z$arma[1:4]) + is.element("intercept", names(z$coef)) npar <- length(z$coef) if (npar > armapar) { stop("It looks like you have an xreg component but I don't know what it is.\n Please use Arima() or auto.arima() rather than arima().") } else { # No xreg used return(NULL) } } } #' Errors from a regression model with ARIMA errors #' #' Returns time series of the regression residuals from a fitted ARIMA model. #' #' This is a deprecated function #' which is identical to \code{\link{residuals.Arima}(object, type="regression")} #' Regression residuals are equal to the original data #' minus the effect of any regression variables. If there are no regression #' variables, the errors will be identical to the original series (possibly #' adjusted to have zero mean). #' #' @param object An object containing a time series model of class \code{Arima}. #' @return A \code{ts} object #' @author Rob J Hyndman #' @seealso \code{\link{residuals.Arima}}. #' @keywords ts #' #' @export arima.errors <- function(object) { message("Deprecated, use residuals.Arima(object, type='regression') instead") residuals.Arima(object, type = "regression") } # Return one-step fits #' h-step in-sample forecasts for time series models. #' #' Returns h-step forecasts for the data used in fitting the model. #' #' @param object An object of class "\code{Arima}", "\code{bats}", #' "\code{tbats}", "\code{ets}" or "\code{nnetar}". #' @param h The number of steps to forecast ahead. #' @param ... Other arguments. #' @return A time series of the h-step forecasts. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{forecast.Arima}}, \code{\link{forecast.bats}}, #' \code{\link{forecast.tbats}}, \code{\link{forecast.ets}}, #' \code{\link{forecast.nnetar}}, \code{\link{residuals.Arima}}, #' \code{\link{residuals.bats}}, \code{\link{residuals.tbats}}, #' \code{\link{residuals.ets}}, \code{\link{residuals.nnetar}}. #' @keywords ts #' @aliases fitted.forecast_ARIMA #' @examples #' fit <- ets(WWWusage) #' plot(WWWusage) #' lines(fitted(fit), col='red') #' lines(fitted(fit, h=2), col='green') #' lines(fitted(fit, h=3), col='blue') #' legend("topleft", legend=paste("h =",1:3), col=2:4, lty=1) #' #' @export fitted.Arima <- function(object, h = 1, ...) { if (h == 1) { x <- getResponse(object) if (!is.null(object$fitted)) { return(object$fitted) } else if (is.null(x)) { # warning("Fitted values are unavailable due to missing historical data") return(NULL) } else if (is.null(object$lambda)) { return(x - object$residuals) } else { fits <- InvBoxCox(BoxCox(x, object$lambda) - object$residuals, object$lambda, NULL, object$sigma2) return(fits) } } else { return(hfitted(object = object, h = h, FUN = "Arima", ...)) } } #' @export fitted.forecast_ARIMA <- fitted.Arima # Calls arima from stats package and adds data to the returned object # Also allows refitting to new data # and drift terms to be included. #' Fit ARIMA model to univariate time series #' #' Largely a wrapper for the \code{\link[stats]{arima}} function in the stats #' package. The main difference is that this function allows a drift term. It #' is also possible to take an ARIMA model from a previous call to \code{Arima} #' and re-apply it to the data \code{y}. #' #' See the \code{\link[stats]{arima}} function in the stats package. #' #' @aliases print.ARIMA summary.Arima as.character.Arima #' #' @param y a univariate time series of class \code{ts}. #' @param order A specification of the non-seasonal part of the ARIMA model: #' the three components (p, d, q) are the AR order, the degree of differencing, #' and the MA order. #' @param seasonal A specification of the seasonal part of the ARIMA model, #' plus the period (which defaults to frequency(y)). This should be a list with #' components order and period, but a specification of just a numeric vector of #' length 3 will be turned into a suitable list with the specification as the #' order. #' @param xreg Optionally, a numerical vector or matrix of external regressors, which #' must have the same number of rows as y. It should not be a data frame. #' @param include.mean Should the ARIMA model include a mean term? The default #' is \code{TRUE} for undifferenced series, \code{FALSE} for differenced ones #' (where a mean would not affect the fit nor predictions). #' @param include.drift Should the ARIMA model include a linear drift term? #' (i.e., a linear regression with ARIMA errors is fitted.) The default is #' \code{FALSE}. #' @param include.constant If \code{TRUE}, then \code{include.mean} is set to #' be \code{TRUE} for undifferenced series and \code{include.drift} is set to #' be \code{TRUE} for differenced series. Note that if there is more than one #' difference taken, no constant is included regardless of the value of this #' argument. This is deliberate as otherwise quadratic and higher order #' polynomial trends would be induced. #' @param method Fitting method: maximum likelihood or minimize conditional #' sum-of-squares. The default (unless there are missing values) is to use #' conditional-sum-of-squares to find starting values, then maximum likelihood. #' @param model Output from a previous call to \code{Arima}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Additional arguments to be passed to \code{\link[stats]{arima}}. #' @inheritParams forecast.ts #' @return See the \code{\link[stats]{arima}} function in the stats package. #' The additional objects returned are \item{x}{The time series data} #' \item{xreg}{The regressors used in fitting (when relevant).} #' \item{sigma2}{The bias adjusted MLE of the innovations variance.} #' #' @export #' #' @author Rob J Hyndman #' @seealso \code{\link{auto.arima}}, \code{\link{forecast.Arima}}. #' @keywords ts #' @examples #' library(ggplot2) #' WWWusage %>% #' Arima(order=c(3,1,0)) %>% #' forecast(h=20) %>% #' autoplot #' #' # Fit model to first few years of AirPassengers data #' air.model <- Arima(window(AirPassengers,end=1956+11/12),order=c(0,1,1), #' seasonal=list(order=c(0,1,1),period=12),lambda=0) #' plot(forecast(air.model,h=48)) #' lines(AirPassengers) #' #' # Apply fitted model to later data #' air.model2 <- Arima(window(AirPassengers,start=1957),model=air.model) #' #' # Forecast accuracy measures on the log scale. #' # in-sample one-step forecasts. #' accuracy(air.model) #' # out-of-sample one-step forecasts. #' accuracy(air.model2) #' # out-of-sample multi-step forecasts #' accuracy(forecast(air.model,h=48,lambda=NULL), #' log(window(AirPassengers,start=1957))) #' Arima <- function(y, order=c(0, 0, 0), seasonal=c(0, 0, 0), xreg=NULL, include.mean=TRUE, include.drift=FALSE, include.constant, lambda=model$lambda, biasadj=FALSE, method=c("CSS-ML", "ML", "CSS"), model=NULL, x=y, ...) { # Remove outliers near ends # j <- time(x) # x <- na.contiguous(x) # if(length(j) != length(x)) # warning("Missing values encountered. Using longest contiguous portion of time series") series <- deparse(substitute(y)) origx <- y if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") if (is.null(attr(lambda, "biasadj"))) { attr(lambda, "biasadj") <- biasadj } } if (!is.null(xreg)) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } } if (!is.list(seasonal)) { if (frequency(x) <= 1) { seasonal <- list(order = c(0, 0, 0), period = NA) if(length(x) <= order[2L]) stop("Not enough data to fit the model") } else { seasonal <- list(order = seasonal, period = frequency(x)) if(length(x) <= order[2L] + seasonal$order[2L] * seasonal$period) stop("Not enough data to fit the model") } } if (!missing(include.constant)) { if (include.constant) { include.mean <- TRUE if ((order[2] + seasonal$order[2]) == 1) { include.drift <- TRUE } } else { include.mean <- include.drift <- FALSE } } if ((order[2] + seasonal$order[2]) > 1 & include.drift) { warning("No drift term fitted as the order of difference is 2 or more.") include.drift <- FALSE } if (!is.null(model)) { tmp <- arima2(x, model, xreg = xreg, method = method) xreg <- tmp$xreg tmp$fitted <- NULL tmp$lambda <- model$lambda } else { if (include.drift) { xreg <- `colnames<-`(cbind(drift = 1:length(x), xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) } if (is.null(xreg)) { suppressWarnings(tmp <- stats::arima(x = x, order = order, seasonal = seasonal, include.mean = include.mean, method = method, ...)) } else { suppressWarnings(tmp <- stats::arima(x = x, order = order, seasonal = seasonal, xreg = xreg, include.mean = include.mean, method = method, ...)) } } # Calculate aicc & bic based on tmp$aic npar <- length(tmp$coef[tmp$mask]) + 1 missing <- is.na(tmp$residuals) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- sum(!missing[firstnonmiss:lastnonmiss]) nstar <- n - tmp$arma[6] - tmp$arma[7] * tmp$arma[5] tmp$aicc <- tmp$aic + 2 * npar * (nstar / (nstar - npar - 1) - 1) tmp$bic <- tmp$aic + npar * (log(nstar) - 2) tmp$series <- series tmp$xreg <- xreg tmp$call <- match.call() tmp$lambda <- lambda tmp$x <- origx # Adjust residual variance to be unbiased if (is.null(model)) { tmp$sigma2 <- sum(tmp$residuals ^ 2, na.rm = TRUE) / (nstar - npar + 1) } out <- structure(tmp, class = c("forecast_ARIMA", "ARIMA", "Arima")) out$fitted <- fitted.Arima(out) out$series <- series return(out) } # Refits the model to new data x arima2 <- function(x, model, xreg, method) { use.drift <- is.element("drift", names(model$coef)) use.intercept <- is.element("intercept", names(model$coef)) use.xreg <- is.element("xreg", names(model$call)) sigma2 <- model$sigma2 if (use.drift) { driftmod <- lm(model$xreg[, "drift"] ~ I(time(as.ts(model$x)))) newxreg <- driftmod$coefficients[1] + driftmod$coefficients[2] * time(as.ts(x)) if (!is.null(xreg)) { origColNames <- colnames(xreg) xreg <- cbind(newxreg, xreg) colnames(xreg) <- c("drift", origColNames) } else { xreg <- as.matrix(data.frame(drift = newxreg)) } use.xreg <- TRUE } if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No regressors provided") } if (ncol(xreg) != ncol(model$xreg)) { stop("Number of regressors does not match fitted model") } } if (model$arma[5] > 1 & sum(abs(model$arma[c(3, 4, 7)])) > 0) # Seasonal model { if (use.xreg) { refit <- Arima( x, order = model$arma[c(1, 6, 2)], seasonal = list(order = model$arma[c(3, 7, 4)], period = model$arma[5]), include.mean = use.intercept, xreg = xreg, method = method, fixed = model$coef ) } else { refit <- Arima( x, order = model$arma[c(1, 6, 2)], seasonal = list(order = model$arma[c(3, 7, 4)], period = model$arma[5]), include.mean = use.intercept, method = method, fixed = model$coef ) } } else if (length(model$coef) > 0) # Nonseasonal model with some parameters { if (use.xreg) { refit <- Arima(x, order = model$arma[c(1, 6, 2)], xreg = xreg, include.mean = use.intercept, method = method, fixed = model$coef) } else { refit <- Arima(x, order = model$arma[c(1, 6, 2)], include.mean = use.intercept, method = method, fixed = model$coef) } } else { # No parameters refit <- Arima(x, order = model$arma[c(1, 6, 2)], include.mean = FALSE, method = method) } refit$var.coef <- matrix(0, length(refit$coef), length(refit$coef)) if (use.xreg) { # Why is this needed? refit$xreg <- xreg } refit$sigma2 <- sigma2 return(refit) } # Modified version of function print.Arima from stats package #' @export print.forecast_ARIMA <- function(x, digits=max(3, getOption("digits") - 3), se=TRUE, ...) { cat("Series:", x$series, "\n") cat(arima.string(x, padding = FALSE), "\n") if (!is.null(x$lambda)) { cat("Box Cox transformation: lambda=", x$lambda, "\n") } # cat("\nCall:", deparse(x$call, width.cutoff=75), "\n", sep=" ") # if(!is.null(x$xreg)) # { # cat("\nRegression variables fitted:\n") # xreg <- as.matrix(x$xreg) # for(i in 1:3) # cat(" ",xreg[i,],"\n") # cat(" . . .\n") # for(i in 1:3) # cat(" ",xreg[nrow(xreg)-3+i,],"\n") # } if (length(x$coef) > 0) { cat("\nCoefficients:\n") coef <- round(x$coef, digits = digits) if (se && NROW(x$var.coef)) { ses <- rep.int(0, length(coef)) ses[x$mask] <- round(sqrt(diag(x$var.coef)), digits = digits) coef <- matrix(coef, 1L, dimnames = list(NULL, names(coef))) coef <- rbind(coef, s.e. = ses) } # Change intercept to mean if no regression variables j <- match("intercept", colnames(coef)) if (is.null(x$xreg) & !is.na(j)) { colnames(coef)[j] <- "mean" } print.default(coef, print.gap = 2) } cm <- x$call$method cat("\nsigma^2 = ", format(x$sigma2, digits = digits), sep="") if(!is.na(x$loglik)) cat(": log likelihood = ", format(round(x$loglik, 2L)), sep = "") cat("\n") if (is.null(cm) || cm != "CSS") { if(!is.na(x$aic)) { npar <- length(x$coef[x$mask]) + 1 missing <- is.na(x$residuals) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- lastnonmiss - firstnonmiss + 1 nstar <- n - x$arma[6] - x$arma[7] * x$arma[5] bic <- x$aic + npar * (log(nstar) - 2) aicc <- x$aic + 2 * npar * (nstar / (nstar - npar - 1) - 1) cat("AIC=", format(round(x$aic, 2L)), sep = "") cat(" AICc=", format(round(aicc, 2L)), sep = "") cat(" BIC=", format(round(bic, 2L)), "\n", sep = "") } } invisible(x) } #' Return the order of an ARIMA or ARFIMA model #' #' Returns the order of a univariate ARIMA or ARFIMA model. #' #' #' @param object An object of class \dQuote{\code{Arima}}, dQuote\code{ar} or #' \dQuote{\code{fracdiff}}. Usually the result of a call to #' \code{\link[stats]{arima}}, \code{\link{Arima}}, \code{\link{auto.arima}}, #' \code{\link[stats]{ar}}, \code{\link{arfima}} or #' \code{\link[fracdiff]{fracdiff}}. #' @return A numerical vector giving the values \eqn{p}, \eqn{d} and \eqn{q} of #' the ARIMA or ARFIMA model. For a seasonal ARIMA model, the returned vector #' contains the values \eqn{p}, \eqn{d}, \eqn{q}, \eqn{P}, \eqn{D}, \eqn{Q} and #' \eqn{m}, where \eqn{m} is the period of seasonality. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{ar}}, \code{\link{auto.arima}}, #' \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link{arfima}}. #' @keywords ts #' @examples #' WWWusage %>% auto.arima %>% arimaorder #' #' @export arimaorder <- function(object) { if (is.element("Arima", class(object))) { order <- object$arma[c(1, 6, 2, 3, 7, 4, 5)] names(order) <- c("p", "d", "q", "P", "D", "Q", "Frequency") seasonal <- (order[7] > 1 & sum(order[4:6]) > 0) if (seasonal) { return(order) } else { return(order[1:3]) } } else if (is.element("ar", class(object))) { return(c("p" = object$order, "d" = 0, "q" = 0)) } else if (is.element("fracdiff", class(object))) { return(c("p" = length(object$ar), "d" = object$d, "q" = length(object$ma))) } else { stop("object not of class Arima, ar or fracdiff") } } #' @export as.character.Arima <- function(x, ...) { arima.string(x, padding = FALSE) } #' @rdname is.ets #' @export is.Arima <- function(x) { inherits(x, "Arima") } #' @rdname fitted.Arima #' @export fitted.ar <- function(object, ...) { getResponse(object) - residuals(object) } #' @export hfitted.Arima <- function(object, h, ...) { # As implemented in Fable if(h == 1){ return(object$fitted) } y <- object$fitted+residuals(object, "innovation") yx <- residuals(object, "regression") # Get fitted model mod <- object$model # Reset model to initial state mod <- stats::makeARIMA(mod$phi, mod$theta, mod$Delta) # Calculate regression component xm <- y - yx fits <- rep_len(NA_real_, length(y)) start <- length(mod$Delta) + 1 end <- length(yx) - h idx <- if(start > end) integer(0L) else start:end for(i in idx) { fc_mod <- attr(stats::KalmanRun(yx[seq_len(i)], mod, update = TRUE), "mod") fits[i + h] <- stats::KalmanForecast(h, fc_mod)$pred[h] + xm[i+h] } fits <- ts(fits) tsp(fits) <- tsp(object$x) fits } forecast/R/bats.R0000644000176200001440000003727314323125536013355 0ustar liggesusers# Author: srazbash ############################################################################### #' BATS model (Exponential smoothing state space model with Box-Cox #' transformation, ARMA errors, Trend and Seasonal components) #' #' Fits a BATS model applied to \code{y}, as described in De Livera, Hyndman & #' Snyder (2011). Parallel processing is used by default to speed up the #' computations. #' #' @aliases as.character.bats print.bats #' #' @param y The time series to be forecast. Can be \code{numeric}, \code{msts} #' or \code{ts}. Only univariate time series are supported. #' @param use.box.cox \code{TRUE/FALSE} indicates whether to use the Box-Cox #' transformation or not. If \code{NULL} then both are tried and the best fit #' is selected by AIC. #' @param use.trend \code{TRUE/FALSE} indicates whether to include a trend or #' not. If \code{NULL} then both are tried and the best fit is selected by AIC. #' @param use.damped.trend \code{TRUE/FALSE} indicates whether to include a #' damping parameter in the trend or not. If \code{NULL} then both are tried #' and the best fit is selected by AIC. #' @param seasonal.periods If \code{y} is a numeric then seasonal periods can #' be specified with this parameter. #' @param use.arma.errors \code{TRUE/FALSE} indicates whether to include ARMA #' errors or not. If \code{TRUE} the best fit is selected by AIC. If #' \code{FALSE} then the selection algorithm does not consider ARMA errors. #' @param use.parallel \code{TRUE/FALSE} indicates whether or not to use #' parallel processing. #' @param num.cores The number of parallel processes to be used if using #' parallel processing. If \code{NULL} then the number of logical cores is #' detected and all available cores are used. #' @param bc.lower The lower limit (inclusive) for the Box-Cox transformation. #' @param bc.upper The upper limit (inclusive) for the Box-Cox transformation. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param model Output from a previous call to \code{bats}. If model is passed, #' this same model is fitted to \code{y} without re-estimating any parameters. #' @param ... Additional arguments to be passed to \code{auto.arima} when #' choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, #' as will any arguments concerning seasonality and differencing, but arguments #' controlling the values of p and q will be used.) #' @return An object of class "\code{bats}". The generic accessor functions #' \code{fitted.values} and \code{residuals} extract useful features of the #' value returned by \code{bats} and associated functions. The fitted model is #' designated BATS(omega, p,q, phi, m1,...mJ) where omega is the Box-Cox #' parameter and phi is the damping parameter; the error is modelled as an #' ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model. #' @author Slava Razbash and Rob J Hyndman #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- bats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- bats(taylor) #' plot(forecast(taylor.fit)) #' } #' #' @export bats <- function(y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ...) { if (!is.numeric(y) || NCOL(y) > 1) { stop("y should be a univariate time series") } seriesname <- deparse(substitute(y)) origy <- y attr_y <- attributes(origy) # Get seasonal periods if (is.null(seasonal.periods)) { if ("msts" %in% class(y)) { seasonal.periods <- attr(y, "msts") } else if ("ts" %in% class(y)) { seasonal.periods <- frequency(y) } else { y <- as.ts(y) seasonal.periods <- 1 } seasonal.periods <- seasonal.periods[seasonal.periods < length(y)] if(length(seasonal.periods) == 0L) seasonal.periods <- 1 } else { # Add ts attributes if (!("ts" %in% class(y))) { y <- msts(y, seasonal.periods) } } seasonal.periods <- unique(pmax(seasonal.periods, 1)) if (all(seasonal.periods == 1)) { seasonal.periods <- NULL } ny <- length(y) y <- na.contiguous(y) if (ny != length(y)) { warning("Missing values encountered. Using longest contiguous portion of time series") if (!is.null(attr_y$tsp)) { attr_y$tsp[1:2] <- range(time(y)) } } # Refit model if available if (!is.null(model)) { refitModel <- try(fitPreviousBATSModel(y, model = model), silent = TRUE) return(refitModel) } # Check for constancy if (is.constant(y)) { fit <- list( y = y, x = matrix(y, nrow = 1, ncol = ny), errors = y * 0, fitted.values = y, seed.states = matrix(y[1]), AIC = -Inf, likelihood = -Inf, variance = 0, alpha = 0.9999, method = "BATS", call = match.call() ) return(structure(fit, class = "bats")) } # Check for non-positive data if (any((y <= 0))) { use.box.cox <- FALSE } if ((!is.null(use.box.cox)) && (!is.null(use.trend)) && (use.parallel)) { if (use.trend && (!is.null(use.damped.trend))) { # In the this case, there is only one alternative. use.parallel <- FALSE } else if (use.trend == FALSE) { # As above, in the this case, there is only one alternative. use.parallel <- FALSE } } if (!is.null(seasonal.periods)) { seasonal.mask <- (seasonal.periods == 1) seasonal.periods <- seasonal.periods[!seasonal.mask] } # Check if there is anything to parallelise if (is.null(seasonal.periods) && !is.null(use.box.cox) && !is.null(use.trend)) { use.parallel <- FALSE } if (is.null(use.box.cox)) { use.box.cox <- c(FALSE, TRUE) } if (any(use.box.cox)) { init.box.cox <- BoxCox.lambda(y, lower = bc.lower, upper = bc.upper) } else { init.box.cox <- NULL } if (is.null(use.trend)) { use.trend <- c(FALSE, TRUE) } else if (use.trend == FALSE) { use.damped.trend <- FALSE } if (is.null(use.damped.trend)) { use.damped.trend <- c(FALSE, TRUE) } y <- as.numeric(y) if (use.parallel) { # Set up the control array control.array <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (!trend && damping) { next } control.line <- c(box.cox, trend, damping) if (!is.null(control.array)) { control.array <- rbind(control.array, control.line) } else { control.array <- control.line } } } } ## Fit the models if (is.null(num.cores)) { num.cores <- detectCores(all.tests = FALSE, logical = TRUE) } clus <- makeCluster(num.cores) models.list <- clusterApplyLB(clus, c(1:nrow(control.array)), parFilterSpecifics, y = y, control.array = control.array, seasonal.periods = seasonal.periods, use.arma.errors = use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) stopCluster(clus) ## Choose the best model #### Get the AICs aics <- numeric(nrow(control.array)) for (i in 1:nrow(control.array)) { aics[i] <- models.list[[i]]$AIC } best.number <- which.min(aics) best.model <- models.list[[best.number]] } else { best.aic <- Inf best.model <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { current.model <- try( filterSpecifics(y, box.cox = box.cox, trend = trend, damping = damping, seasonal.periods = seasonal.periods, use.arma.errors = use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ... ), silent=TRUE ) if(!("try-error" %in% class(current.model))) { if (current.model$AIC < best.aic) { best.aic <- current.model$AIC best.model <- current.model } } } } } } if(is.null(best.model)) stop("Unable to fit a model") best.model$call <- match.call() if (best.model$optim.return.code != 0) { warning("optim() did not converge.") } attributes(best.model$fitted.values) <- attributes(best.model$errors) <- attr_y best.model$y <- origy best.model$series <- seriesname best.model$method <- "BATS" return(best.model) } filterSpecifics <- function(y, box.cox, trend, damping, seasonal.periods, use.arma.errors, force.seasonality = FALSE, init.box.cox = NULL, bc.lower = 0, bc.upper = 1, biasadj = FALSE, ...) { if (!trend && damping) { return(list(AIC = Inf)) } first.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (!is.null(seasonal.periods) && !force.seasonality) { non.seasonal.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = NULL, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (first.model$AIC > non.seasonal.model$AIC) { seasonal.periods <- NULL first.model <- non.seasonal.model } } if (use.arma.errors) { suppressWarnings(arma <- auto.arima(as.numeric(first.model$errors), d = 0, ...)) p <- arma$arma[1] q <- arma$arma[2] if (p != 0 || q != 0) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters # printCASE(box.cox, trend, damping, seasonal.periods, ar.coefs, ma.coefs, p, q) second.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } parFilterSpecifics <- function(control.number, control.array, y, seasonal.periods, use.arma.errors, force.seasonality = FALSE, init.box.cox = NULL, bc.lower = 0, bc.upper = 1, biasadj = FALSE, ...) { box.cox <- control.array[control.number, 1] trend <- control.array[control.number, 2] damping <- control.array[control.number, 3] if (!trend && damping) { return(list(AIC = Inf)) } first.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (!is.null(seasonal.periods) && !force.seasonality) { non.seasonal.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = NULL, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (first.model$AIC > non.seasonal.model$AIC) { seasonal.periods <- NULL first.model <- non.seasonal.model } } if (use.arma.errors) { suppressWarnings(arma <- auto.arima(as.numeric(first.model$errors), d = 0, ...)) p <- arma$arma[1] q <- arma$arma[2] if (p != 0 || q != 0) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters # printCASE(box.cox, trend, damping, seasonal.periods, ar.coefs, ma.coefs, p, q) second.model <- fitSpecificBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj) if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } #' @rdname fitted.Arima #' @export fitted.bats <- function(object, h = 1, ...) { if (h == 1) { return(object$fitted.values) } else { return(hfitted(object = object, h = h, FUN = "bats", ...)) } } #' @export print.bats <- function(x, ...) { cat(as.character(x)) cat("\n") cat("\nCall: ") print(x$call) cat("\nParameters") if (!is.null(x$lambda)) { cat("\n Lambda: ") cat(round(x$lambda, 6)) } cat("\n Alpha: ") cat(x$alpha) if (!is.null(x$beta)) { cat("\n Beta: ") cat(x$beta) cat("\n Damping Parameter: ") cat(round(x$damping.parameter, 6)) } if (!is.null(x$gamma.values)) { cat("\n Gamma Values: ") cat(x$gamma.values) } if (!is.null(x$ar.coefficients)) { cat("\n AR coefficients: ") cat(round(x$ar.coefficients, 6)) } if (!is.null(x$ma.coefficients)) { cat("\n MA coefficients: ") cat(round(x$ma.coefficients, 6)) } cat("\n") cat("\nSeed States:\n") print(x$seed.states) cat("\nSigma: ") cat(sqrt(x$variance)) cat("\nAIC: ") cat(x$AIC) cat("\n") } #' Plot components from BATS model #' #' Produces a plot of the level, slope and seasonal components from a BATS or #' TBATS model. The plotted components are Box-Cox transformed using the estimated transformation parameter. #' #' @param x Object of class \dQuote{bats/tbats}. #' @param object Object of class \dQuote{bats/tbats}. #' @param main Main title for plot. #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If NULL, automatic selection takes #' place. #' @param ... Other plotting parameters passed to \code{\link[graphics]{par}}. #' @return None. Function produces a plot #' @author Rob J Hyndman #' @seealso \code{\link{bats}},\code{\link{tbats}} #' @keywords hplot #' #' @export plot.bats <- function(x, main = "Decomposition by BATS model", ...) { # Get original data, transform if necessary if (!is.null(x$lambda)) { y <- BoxCox(x$y, x$lambda) lambda <- attr(y, "lambda") } else { y <- x$y } # Extract states out <- cbind(observed = c(y), level = x$x[1, ]) if (!is.null(x$beta)) { out <- cbind(out, slope = x$x[2, ]) } nonseas <- 2 + !is.null(x$beta) # No. non-seasonal columns in out nseas <- length(x$gamma.values) # No. seasonal periods if (!is.null(x$gamma.values)) { seas.states <- x$x[-(1:(1 + !is.null(x$beta))), ] j <- cumsum(c(1, x$seasonal.periods)) for (i in 1:nseas) { out <- cbind(out, season = seas.states[j[i], ]) } if (nseas > 1) { colnames(out)[nonseas + 1:nseas] <- paste("season", 1:nseas, sep = "") } } # Add time series characteristics out <- ts(out) tsp(out) <- tsp(y) # Do the plot plot.ts(out, main = main, nc = 1, ...) } #' @rdname is.ets #' @export is.bats <- function(x) { inherits(x, "bats") } forecast/R/getResponse.R0000644000176200001440000000635514323125536014717 0ustar liggesusers# Functions to return the response variable for different models. # If a Box-Cox transformation is used, the series returned here should # be on the original scale, not the Box-Cox transformed scale. #' Get response variable from time series model. #' #' \code{getResponse} is a generic function for extracting the historical data #' from a time series model (including \code{Arima}, \code{ets}, \code{ar}, #' \code{fracdiff}), a linear model of class \code{lm}, or a forecast object. #' The function invokes particular \emph{methods} which depend on the class of #' the first argument. #' #' #' @param object a time series model or forecast object. #' @param ... Additional arguments that are ignored. #' @return A numerical vector or a time series object of class \code{ts}. #' @author Rob J Hyndman #' @keywords ts #' #' @export getResponse <- function(object, ...) UseMethod("getResponse") #' @rdname getResponse #' @export getResponse.default <- function(object, ...) { if (is.list(object)) { return(object$x) } else { return(NULL) } } #' @rdname getResponse #' @export getResponse.lm <- function(object, ...) { if(!is.null(object[["x"]])){ object[["x"]] } else{ responsevar <- deparse(formula(object)[[2]]) model.frame(object$model)[, responsevar] } } #' @rdname getResponse #' @export getResponse.Arima <- function(object, ...) { if (is.element("x", names(object))) { x <- object$x } else { series.name <- object$series if (is.null(series.name)) { return(NULL) } else { x <- try(eval.parent(parse(text = series.name)), silent = TRUE) if (is.element("try-error", class(x))) { # Try one level further up the chain x <- try(eval.parent(parse(text = series.name), 2), silent = TRUE) } if (is.element("try-error", class(x))) { # Give up return(NULL) } } } return(as.ts(x)) } #' @rdname getResponse #' @export getResponse.fracdiff <- function(object, ...) { if (is.element("x", names(object))) { x <- object$x } else { series.name <- as.character(object$call)[2] if (is.null(series.name)) { stop("missing original time series") } else { x <- try(eval.parent(parse(text = series.name)), silent = TRUE) if (is.element("try-error", class(x))) { # Try one level further up the chain x <- try(eval.parent(parse(text = series.name), 2), silent = TRUE) } if (is.element("try-error", class(x))) { # Give up return(NULL) } } } return(as.ts(x)) } #' @rdname getResponse #' @export getResponse.ar <- function(object, ...) { getResponse.Arima(object) } #' @rdname getResponse #' @export getResponse.tbats <- function(object, ...) { if (is.element("y", names(object))) { y <- object$y } else { return(NULL) } return(as.ts(y)) } #' @rdname getResponse #' @export getResponse.bats <- function(object, ...) { return(getResponse.tbats(object, ...)) } #' @rdname getResponse #' @export getResponse.mforecast <- function(object, ...) { return(do.call(cbind, lapply(object$forecast, function(x) x$x))) } #' @rdname getResponse #' @export getResponse.baggedModel <- function(object, ...) { if (is.element("y", names(object))) { y <- object$y } else { return(NULL) } return(as.ts(y)) } forecast/R/forecast.varest.R0000644000176200001440000000355314150370574015531 0ustar liggesusers# forecast function for varest, just a wrapper for predict.varest #' @export forecast.varest <- function(object, h=10, level=c(80, 95), fan=FALSE, ...) { out <- list(model = object, forecast = vector("list", object$K)) # Get residuals and fitted values and fix the times tspx <- tsp(object$y) vres <- residuals(object) vfits <- fitted(object) method <- paste0("VAR(", object$p, ")") # Add forecasts with prediction intervals # out$mean <- out$lower <- out$upper <- vector("list",object$K) for (i in seq_along(level)) { pr <- predict(object, n.ahead = h, ci = level[i] / 100, ...) for (j in 1:object$K) { out$forecast[[j]]$lower <- cbind(out$forecast[[j]]$lower, pr$fcst[[j]][, "lower"]) out$forecast[[j]]$upper <- cbind(out$forecast[[j]]$upper, pr$fcst[[j]][, "upper"]) } } j <- 1 for (fcast in out$forecast) { fcast$mean <- ts(pr$fcst[[j]][, "fcst"], frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) fcast$lower <- ts(fcast$lower, frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) fcast$upper <- ts(fcast$upper, frequency = tspx[3], start = tspx[2] + 1 / tspx[3]) colnames(fcast$lower) <- colnames(fcast$upper) <- paste0(level, "%") fcast$residuals <- fcast$fitted <- ts(rep(NA, nrow(object$y))) fcast$residuals[((1 - nrow(vres)):0) + length(fcast$residuals)] <- vres[, j] fcast$fitted[((1 - nrow(vfits)):0) + length(fcast$fitted)] <- vfits[, j] fcast$method <- method fcast$level <- level fcast$x <- object$y[, j] fcast$series <- colnames(object$y)[j] tsp(fcast$residuals) <- tsp(fcast$fitted) <- tspx fcast <- structure(fcast, class = "forecast") out$forecast[[j]] <- fcast j <- j + 1 } names(out$forecast) <- names(pr$fcst) out$method <- rep(method, object$K) names(out$forecast) <- names(out$method) <- names(pr$fcst) return(structure(out, class = "mforecast")) } forecast/R/naive.R0000644000176200001440000002115014456202551013511 0ustar liggesusers# Random walk related forecasts # Based on lagged walks # lag=1 corresponds to standard random walk (i.e., naive forecast) # lag=m corresponds to seasonal naive method lagwalk <- function(y, lag=1, drift=FALSE, lambda=NULL, biasadj=FALSE) { if(!is.ts(y)){ y <- as.ts(y) } dimy <- dim(y) if(!is.null(dimy)) { if(dimy[2] > 1) stop("Multivariate time series detected. This function is designed for univariate time series only.") } origy <- y if (!is.null(lambda)) { y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") } m <- frequency(y) # Complete missing values with lagged values y_na <- which(is.na(y)) y_na <- y_na[y_na>lag] fits <- stats::lag(y, -lag) for(i in y_na){ if(is.na(fits)[i]){ fits[i] <- fits[i-lag] } } fitted <- ts(c(rep(NA, lag), head(fits, -lag)), start = start(y), frequency = m) fitted <- copy_msts(y, fitted) if(drift){ fit <- summary(lm(y-fitted ~ 1, na.action=na.exclude)) b <- fit$coefficients[1,1] b.se <- fit$coefficients[1,2] sigma <- fit$sigma fitted <- fitted + b res <- y - fitted method <- "Lag walk with drift" } else{ res <- y - fitted b <- b.se <- 0 sigma <- sqrt(mean(res^2, na.rm=TRUE)) method <- "Lag walk" } if (!is.null(lambda)) { fitted <- InvBoxCox(fitted, lambda, biasadj, var(res)) attr(lambda, "biasadj") <- biasadj } model <- structure( list( x = origy, fitted = fitted, future = tail(fits, lag), residuals = res, method = method, series = deparse(substitute(y)), sigma2 = sigma^2, par = list(includedrift = drift, drift = b, drift.se = b.se, lag = lag), lambda = lambda, call = match.call() ), class = "lagwalk" ) } #' @export forecast.lagwalk <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, simulate=FALSE, bootstrap=FALSE, npaths=5000, biasadj=FALSE, ...) { lag <- object$par$lag fullperiods <- (h-1)/lag+1 steps <- rep(1:fullperiods, rep(lag,fullperiods))[1:h] # Point forecasts fc <- rep(object$future, fullperiods)[1:h] + steps*object$par$drift # Intervals # Adjust prediction intervals to allow for drift coefficient standard error mse <- sum(object$residuals^2, na.rm = TRUE)/(sum(!is.na(object$residuals)) - (object$par$drift != 0)) se <- sqrt(mse*steps + (steps*object$par$drift.se)^2) if(fan) level <- seq(51,99,by=3) else { if(min(level) > 0 & max(level) < 1) level <- 100*level else if(min(level) < 0 | max(level) > 99.99) stop("Confidence limit out of range") } nconf <- length(level) if (simulate | bootstrap) # Compute prediction intervals using simulations { sim <- matrix(NA, nrow = npaths, ncol = h) for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, lambda = lambda) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nconf > 1L) { lower <- t(lower) upper <- t(upper) } else { lower <- matrix(lower, ncol = 1) upper <- matrix(upper, ncol = 1) } } else { z <- qnorm(.5 + level/200) lower <- upper <- matrix(NA,nrow=h,ncol=nconf) for(i in 1:nconf) { lower[,i] <- fc - z[i]*se upper[,i] <- fc + z[i]*se } } if (!is.null(lambda)) { fc <- InvBoxCox(fc, lambda, biasadj, se^2) if(!bootstrap){ # Bootstrap intervals are already backtransformed upper <- InvBoxCox(upper, lambda) lower <- InvBoxCox(lower, lambda) } } # Set attributes fc <- future_msts(object$x, fc) lower <- future_msts(object$x, lower) upper <- future_msts(object$x, upper) colnames(lower) <- colnames(upper) <- paste(level,"%",sep="") return(structure( list( method = object$method, model = object, lambda = lambda, x = object$x, fitted = fitted(object), residuals = residuals(object), series = object$series, mean = fc, level = level, lower = lower, upper = upper ), class = "forecast") ) } #' @export print.lagwalk <- function(x, ...) { cat(paste("Call:", deparse(x$call), "\n\n")) if (x$par$includedrift) { cat(paste("Drift: ", round(x$par$drift, 4), " (se ", round(x$par$drift.se, 4), ")\n", sep = "")) } cat(paste("Residual sd:", round(sqrt(x$sigma2), 4), "\n")) } #' @export fitted.lagwalk <- function(object, ...){ object$fitted } # Random walk #' @rdname naive #' #' @examples #' #' gold.fcast <- rwf(gold[1:60], h=50) #' plot(gold.fcast) #' #' @export rwf <- function(y, h=10, drift=FALSE, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fit <- lagwalk( x, lag = 1, drift = drift, lambda = lambda, biasadj = biasadj ) fc <- forecast(fit, h = h, level = level, fan = fan, lambda = fit$lambda, biasadj = biasadj, ...) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) if (drift) { fc$method <- "Random walk with drift" } else { fc$method <- "Random walk" } return(fc) } #' Naive and Random Walk Forecasts #' #' \code{rwf()} returns forecasts and prediction intervals for a random walk #' with drift model applied to \code{y}. This is equivalent to an ARIMA(0,1,0) #' model with an optional drift coefficient. \code{naive()} is simply a wrapper #' to \code{rwf()} for simplicity. \code{snaive()} returns forecasts and #' prediction intervals from an ARIMA(0,0,0)(0,1,0)m model where m is the #' seasonal period. #' #' The random walk with drift model is \deqn{Y_t=c + Y_{t-1} + Z_t}{Y[t]=c + #' Y[t-1] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are #' given by \deqn{Y_n(h)=ch+Y_n}{Y[n+h]=ch+Y[n]}. If there is no drift (as in #' \code{naive}), the drift parameter c=0. Forecast standard errors allow for #' uncertainty in estimating the drift parameter (unlike the corresponding #' forecasts obtained by fitting an ARIMA model directly). #' #' The seasonal naive model is \deqn{Y_t= Y_{t-m} + Z_t}{Y[t]=Y[t-m] + Z[t]} #' where \eqn{Z_t}{Z[t]} is a normal iid error. #' #' @aliases print.naive #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param drift Logical flag. If TRUE, fits a random walk with drift model. #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{naive} or #' \code{snaive}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{Arima}} #' @keywords ts #' @examples #' #' plot(naive(gold,h=50),include=200) #' #' @export naive <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fc <- rwf( x, h = h, level = level, fan = fan, lambda = lambda, drift = FALSE, biasadj = biasadj, ... ) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) fc$method <- "Naive method" return(fc) } #' @rdname naive #' #' @examples #' #' plot(snaive(wineind)) #' #' @export snaive <- function(y, h=2 * frequency(x), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, ..., x=y) { fit <- lagwalk( x, lag = frequency(x), drift = FALSE, lambda = lambda, biasadj = biasadj ) fc <- forecast(fit, h = h, level = level, fan = fan, lambda = fit$lambda, biasadj = biasadj, ...) fc$model$call <- match.call() fc$series <- deparse(substitute(y)) fc$method <- "Seasonal naive method" return(fc) } forecast/R/forecastTBATS.R0000644000176200001440000001333414323125536015020 0ustar liggesusers#' @rdname forecast.bats #' @export forecast.tbats <- function(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) { # Check if forecast.tbats called incorrectly if (identical(class(object), "bats")) { return(forecast.bats(object, h, level, fan, biasadj, ...)) } # Set up the variables if (any(class(object$y) == "ts")) { ts.frequency <- frequency(object$y) } else { ts.frequency <- ifelse(!is.null(object$seasonal.periods), max(object$seasonal.periods), 1) } if (missing(h)) { if (is.null(object$seasonal.periods)) { h <- ifelse(ts.frequency == 1, 10, 2 * ts.frequency) } else { h <- 2 * max(object$seasonal.periods) } } else if (h <= 0) { stop("Forecast horizon out of bounds") } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } if (!is.null(object$k.vector)) { tau <- 2 * sum(object$k.vector) } else { tau <- 0 } x <- matrix(0, nrow = nrow(object$x), ncol = h) y.forecast <- numeric(h) if (!is.null(object$beta)) { adj.beta <- 1 } else { adj.beta <- 0 } # Set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = object$damping.parameter, kVector_s = as.integer(object$k.vector), arCoefs_s = object$ar.coefficients, maCoefs_s = object$ma.coefficients, tau_s = as.integer(tau), PACKAGE = "forecast") if (!is.null(object$seasonal.periods)) { gamma.bold <- matrix(0, nrow = 1, ncol = tau) .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = as.integer(object$k.vector), gammaOne_s = object$gamma.one.values, gammaTwo_s = object$gamma.two.values, PACKAGE = "forecast") } else { gamma.bold <- NULL } g <- matrix(0, nrow = (tau + 1 + adj.beta + object$p + object$q), ncol = 1) if (object$p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (object$q != 0) { g[(1 + adj.beta + tau + object$p + 1), 1] <- 1 } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = object$alpha, beta_s = object$beta.v, PACKAGE = "forecast") # print(g) F <- makeTBATSFMatrix(alpha = object$alpha, beta = object$beta, small.phi = object$damping.parameter, seasonal.periods = object$seasonal.periods, k.vector = as.integer(object$k.vector), gamma.bold.matrix = gamma.bold, ar.coefs = object$ar.coefficients, ma.coefs = object$ma.coefficients) # Do the forecast y.forecast[1] <- w$w.transpose %*% object$x[, ncol(object$x)] x[, 1] <- F %*% object$x[, ncol(object$x)] # + g %*% object$errors[length(object$errors)] if (h > 1) { for (t in 2:h) { x[, t] <- F %*% x[, (t - 1)] y.forecast[t] <- w$w.transpose %*% x[, (t - 1)] } } ## Make prediction intervals here lower.bounds <- upper.bounds <- matrix(NA, ncol = length(level), nrow = h) variance.multiplier <- numeric(h) variance.multiplier[1] <- 1 if (h > 1) { for (j in 1:(h - 1)) { if (j == 1) { f.running <- diag(ncol(F)) } else { f.running <- f.running %*% F } c.j <- w$w.transpose %*% f.running %*% g variance.multiplier[(j + 1)] <- variance.multiplier[j] + c.j^2 } } variance <- object$variance * variance.multiplier # print(variance) st.dev <- sqrt(variance) for (i in 1:length(level)) { marg.error <- st.dev * abs(qnorm((100 - level[i]) / 200)) lower.bounds[, i] <- y.forecast - marg.error upper.bounds[, i] <- y.forecast + marg.error } # Inv Box Cox transform if required if (!is.null(object$lambda)) { y.forecast <- InvBoxCox(y.forecast, object$lambda, biasadj, list(level = level, upper = upper.bounds, lower = lower.bounds)) lower.bounds <- InvBoxCox(lower.bounds, object$lambda) if (object$lambda < 1) { lower.bounds <- pmax(lower.bounds, 0) } upper.bounds <- InvBoxCox(upper.bounds, object$lambda) } colnames(upper.bounds) <- colnames(lower.bounds) <- paste0(level, "%") forecast.object <- list( model = object, mean = future_msts(object$y, y.forecast), level = level, x = object$y, series = object$series, upper = future_msts(object$y, upper.bounds), lower = future_msts(object$y, lower.bounds), fitted = copy_msts(object$y, object$fitted.values), method = as.character(object), residuals = copy_msts(object$y, object$errors) ) if (is.null(object$series)) { forecast.object$series <- deparse(object$call$y) } class(forecast.object) <- "forecast" return(forecast.object) } #' @export as.character.tbats <- function(x, ...) { name <- "TBATS(" if (!is.null(x$lambda)) { name <- paste(name, round(x$lambda, digits = 3), sep = "") } else { name <- paste(name, "1", sep = "") } name <- paste(name, ", {", sep = "") if (!is.null(x$ar.coefficients)) { name <- paste(name, length(x$ar.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, ",", sep = "") if (!is.null(x$ma.coefficients)) { name <- paste(name, length(x$ma.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, "}, ", sep = "") if (!is.null(x$damping.parameter)) { name <- paste(name, round(x$damping.parameter, digits = 3), ",", sep = "") } else { name <- paste(name, "-,", sep = "") } if (!is.null(x$seasonal.periods)) { name <- paste(name, " {", sep = "") M <- length(x$seasonal.periods) for (i in 1:M) { name <- paste(name, "<", round(x$seasonal.periods[i], 2), ",", x$k.vector[i], ">", sep = "") if (i < M) { name <- paste(name, ", ", sep = "") } else { name <- paste(name, "})", sep = "") } } } else { name <- paste(name, "{-})", sep = "") } return(name) } forecast/R/theta.R0000644000176200001440000001155314150370574013524 0ustar liggesusers# Implement standard Theta method of Assimakopoulos and Nikolopoulos (2000) # More general methods are available in the forecTheta package # Author: RJH #' Theta method forecast #' #' Returns forecasts and prediction intervals for a theta method forecast. #' #' The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to #' simple exponential smoothing with drift. This is demonstrated in Hyndman and #' Billah (2003). #' #' The series is tested for seasonality using the test outlined in A&N. If #' deemed seasonal, the series is seasonally adjusted using a classical #' multiplicative decomposition before applying the theta method. The resulting #' forecasts are then reseasonalized. #' #' Prediction intervals are computed using the underlying state space model. #' #' More general theta methods are available in the #' \code{\link[forecTheta]{forecTheta}} package. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param x Deprecated. Included for backwards compatibility. #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{rwf}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{arima}}, \code{\link{meanf}}, \code{\link{rwf}}, #' \code{\link{ses}} #' @references Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: #' a decomposition approach to forecasting. \emph{International Journal of #' Forecasting} \bold{16}, 521-530. #' #' Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. #' \emph{International J. Forecasting}, \bold{19}, 287-290. #' @keywords ts #' @examples #' nile.fcast <- thetaf(Nile) #' plot(nile.fcast) #' @export thetaf <- function(y, h = ifelse(frequency(y) > 1, 2 * frequency(y), 10), level = c(80, 95), fan = FALSE, x = y) { # Check inputs if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check seasonality n <- length(x) x <- as.ts(x) m <- frequency(x) if (m > 1 && !is.constant(x) && n > 2 * m) { r <- as.numeric(acf(x, lag.max = m, plot = FALSE)$acf)[-1] stat <- sqrt((1 + 2 * sum(r[-m]^2)) / n) seasonal <- (abs(r[m]) / stat > qnorm(0.95)) } else { seasonal <- FALSE } # Seasonal decomposition origx <- x if (seasonal) { decomp <- decompose(x, type = "multiplicative") if (any(abs(seasonal(decomp)) < 1e-4)) { warning("Seasonal indexes close to zero. Using non-seasonal Theta method") } else { x <- seasadj(decomp) } } # Find theta lines fcast <- ses(x, h = h) tmp2 <- lsfit(0:(n - 1), x)$coefficients[2] / 2 alpha <- pmax(1e-10, fcast$model$par["alpha"]) fcast$mean <- fcast$mean + tmp2 * (0:(h - 1) + (1 - (1 - alpha)^n) / alpha) # Reseasonalize if (seasonal) { fcast$mean <- fcast$mean * rep(tail(decomp$seasonal, m), trunc(1 + h / m))[1:h] fcast$fitted <- fcast$fitted * decomp$seasonal } fcast$residuals <- origx - fcast$fitted # Find prediction intervals fcast.se <- sqrt(fcast$model$sigma2) * sqrt((0:(h - 1)) * alpha^2 + 1) nconf <- length(level) fcast$lower <- fcast$upper <- ts(matrix(NA, nrow = h, ncol = nconf)) tsp(fcast$lower) <- tsp(fcast$upper) <- tsp(fcast$mean) for (i in 1:nconf) { zt <- -qnorm(0.5 - level[i] / 200) fcast$lower[, i] <- fcast$mean - zt * fcast.se fcast$upper[, i] <- fcast$mean + zt * fcast.se } # Return results fcast$x <- origx fcast$level <- level fcast$method <- "Theta" fcast$model <- list(alpha = alpha, drift = tmp2, sigma = fcast$model$sigma2) fcast$model$call <- match.call() return(fcast) } forecast/R/modelAR.R0000644000176200001440000004173714323125536013747 0ustar liggesusers# Defaults: # For non-seasonal data, p chosen using AIC from linear AR(p) model # For seasonal data, p chosen using AIC from linear AR(p) model after # seasonally adjusting with STL decomposition, and P=1 #' Time Series Forecasts with a user-defined model #' #' Experimental function to forecast univariate time series with a #' user-defined model #' #' This is an experimental function and only recommended for advanced users. #' The selected model is fitted with lagged values of \code{y} as #' inputs. The inputs are for #' lags 1 to \code{p}, and lags \code{m} to \code{mP} where #' \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also #' used as inputs. If there are missing values in \code{y} or #' \code{xreg}, the corresponding rows (and any others which depend on them as #' lags) are omitted from the fit. The model is trained for one-step #' forecasting. Multi-step forecasts are computed recursively. #' #' @aliases print.modelAR #' #' @param y A numeric vector or time series of class \code{ts}. #' @param p Embedding dimension for non-seasonal time series. Number of #' non-seasonal lags used as inputs. For non-seasonal time series, the default #' is the optimal number of lags (according to the AIC) for a linear AR(p) #' model. For seasonal time series, the same method is used but applied to #' seasonally adjusted data (from an stl decomposition). #' @param P Number of seasonal lags used as inputs. #' @param FUN Function used for model fitting. Must accept argument \code{x} #' and \code{y} for the predictors and response, respectively (\code{formula} #' object not currently supported). #' @param predict.FUN Prediction function used to apply \code{FUN} to new data. #' Must accept an object of class \code{FUN} as its first argument, and a #' data frame or matrix of new data for its second argument. Additionally, #' it should return fitted values when new data is omitted. #' @param xreg Optionally, a vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. Must be numeric. #' @param model Output from a previous call to \code{nnetar}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param subset Optional vector specifying a subset of observations to be used #' in the fit. Can be an integer index vector or a logical vector the same #' length as \code{y}. All observations are used by default. #' @param scale.inputs If TRUE, inputs are scaled by subtracting the column #' means and dividing by their respective standard deviations. If \code{lambda} #' is not \code{NULL}, scaling is applied after Box-Cox transformation. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{FUN} for #' \code{modelAR}. #' @inheritParams forecast.ts #' #' @return Returns an object of class "\code{modelAR}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{nnetar}. #' #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{x}{The original time series.} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @keywords ts #' #' @export modelAR <- function(y, p, P=1, FUN, predict.FUN, xreg=NULL, lambda=NULL, model=NULL, subset=NULL, scale.inputs=FALSE, x=y, ...) { useoldmodel <- FALSE yname <- deparse(substitute(y)) if (!is.null(model)) { # Use previously fitted model useoldmodel <- TRUE # Check for conflicts between new and old data: # Check model class if (!is.modelAR(model)) { stop("Model must be a modelAR object") } # Check new data m <- max(round(frequency(model$x)), 1L) minlength <- max(c(model$p, model$P * m)) + 1 if (length(x) < minlength) { stop(paste("Series must be at least of length", minlength, "to use fitted model")) } if (tsp(as.ts(x))[3] != m) { warning(paste("Data frequency doesn't match fitted model, coercing to frequency =", m)) x <- ts(x, frequency = m) } # Check xreg if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No external regressors provided") } if (NCOL(xreg) != NCOL(model$xreg)) { stop("Number of external regressors does not match fitted model") } } # Update parameters with previous model lambda <- model$lambda p <- model$p P <- model$P FUN <- model$FUN predict.FUN <- model$predict.FUN if (P > 0) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p } if (!is.null(model$scalex)) { scale.inputs <- TRUE } } else { # when not using an old model if (length(y) < 3) { stop("Not enough data to fit a model") } # Check for constant data in time series constant_data <- is.constant(na.interp(x)) if (constant_data){ warning("Constant data, setting p=1, P=0, lambda=NULL, scale.inputs=FALSE") scale.inputs <- FALSE lambda <- NULL p <- 1 P <- 0 } ## Check for constant data in xreg if (!is.null(xreg)){ constant_xreg <- any(apply(as.matrix(xreg), 2, function(x) is.constant(na.interp(x)))) if (constant_xreg){ warning("Constant xreg column, setting scale.inputs=FALSE") scale.inputs <- FALSE } } } # Check for NAs in x if (any(is.na(x))) { warning("Missing values in x, omitting rows") } # Transform data if (!is.null(lambda)) { xx <- BoxCox(x, lambda) lambda <- attr(xx, "lambda") } else { xx <- x } ## Check whether to use a subset of the data xsub <- rep(TRUE, length(x)) if (is.numeric(subset)) { xsub[-subset] <- FALSE } if (is.logical(subset)) { xsub <- subset } # Scale series scalex <- NULL if (scale.inputs) { if (useoldmodel) { scalex <- model$scalex } else { tmpx <- scale(xx[xsub], center = TRUE, scale = TRUE) scalex <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xx <- scale(xx, center = scalex$center, scale = scalex$scale) xx <- xx[, 1] } # Check xreg class & dim xxreg <- NULL scalexreg <- NULL if (!is.null(xreg)) { xxreg <- xreg <- as.matrix(xreg) if (length(x) != NROW(xreg)) { stop("Number of rows in xreg does not match series length") } # Check for NAs in xreg if (any(is.na(xreg))) { warning("Missing values in xreg, omitting rows") } # Scale xreg if (scale.inputs) { if (useoldmodel) { scalexreg <- model$scalexreg } else { tmpx <- scale(xxreg[xsub, ], center = TRUE, scale = TRUE) scalexreg <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xxreg <- scale(xxreg, center = scalexreg$center, scale = scalexreg$scale) } } # Set up lagged matrix n <- length(xx) xx <- as.ts(xx) m <- max(round(frequency(xx)), 1L) if (!useoldmodel) { if (m == 1) { if (missing(p)) { p <- max(length(ar(na.interp(xx))$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } lags <- 1:p if (P > 1) { warning("Non-seasonal data, ignoring seasonal lags") } P <- 0 } else { if (missing(p)) { if (n >= 2 * m) { x.sa <- seasadj(mstl(na.interp(xx))) } else { x.sa <- na.interp(xx) } p <- max(length(ar(x.sa)$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } if (P > 0 && n >= m * P + 2) { lags <- sort(unique(c(1:p, m * (1:P)))) } else { lags <- 1:p if (P > 0) { warning("Series too short for seasonal lags") P <- 0 } } } } maxlag <- max(lags) nlag <- length(lags) y <- xx[-(1:maxlag)] lags.X <- matrix(NA_real_, ncol = nlag, nrow = n - maxlag) for (i in 1:nlag){ lags.X[, i] <- xx[(maxlag - lags[i] + 1):(n - lags[i])] } # Add xreg into lagged matrix lags.X <- cbind(lags.X, xxreg[-(1:maxlag), ]) # Remove missing values if present j <- complete.cases(lags.X, y) ## Remove values not in subset j <- j & xsub[-(1:maxlag)] ## Stop if there's no data to fit (e.g. due to NAs or NaNs) if (NROW(lags.X[j,, drop=FALSE]) == 0) { stop("No data to fit (possibly due to NA or NaN)") } ## Fit selected model if (useoldmodel) { fit <- model$model } else { fit <- FUN(x = lags.X[j,, drop=FALSE], y = y[j], ...) } # Return results out <- list() out$x <- as.ts(x) out$m <- m out$p <- p out$P <- P out$FUN <- FUN out$predict.FUN <- predict.FUN out$scalex <- scalex out$scalexreg <- scalexreg out$xreg <- xreg out$lambda <- lambda out$subset <- (1:length(x))[xsub] out$model <- fit out$modelargs <- list(...) if (useoldmodel) { out$modelargs <- model$modelargs fits <- c(rep(NA_real_, maxlag), predict.FUN(fit, lags.X[j,, drop=FALSE])) } else { fits <- c(rep(NA_real_, maxlag), predict.FUN(fit)) } if (scale.inputs) { fits <- fits * scalex$scale + scalex$center } fits <- ts(fits) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) } out$fitted <- ts(rep(NA_real_, length(out$x))) out$fitted[c(rep(TRUE, maxlag), j)] <- fits tsp(out$fitted) <- tsp(out$x) out$residuals <- out$x - out$fitted out$lags <- lags out$series <- yname out$method <- deparse(substitute(FUN)) out$method <- paste0(out$method, "-AR(", p) if (P > 0) out$method <- paste(out$method, ",", P, sep = "") out$method <- paste0(out$method, ")") if (P > 0) out$method <- paste(out$method, "[", m, "]", sep = "") out$call <- match.call() return(structure(out, class = c("modelAR"))) } #' Forecasting using user-defined model #' #' Returns forecasts and other information for user-defined #' models. #' #' Prediction intervals are calculated through simulations and can be slow. #' Note that if the model is too complex and overfits the data, the residuals #' can be arbitrarily small; if used for prediction interval calculations, they #' could lead to misleadingly small values. #' #' @param object An object of class "\code{modelAR}" resulting from a call to #' \code{\link{modelAR}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{bootstrap} and \code{npaths} are all ignored. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of external regressor variables. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulations with resampled residuals rather than normally distributed #' errors. Ignored if \code{innov} is not \code{NULL}. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param innov Values to use as innovations for prediction intervals. Must be #' a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced #' into a matrix). If present, \code{bootstrap} is ignored. #' @param ... Additional arguments passed to \code{\link{simulate.nnetar}} #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.nnetar}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time series #' used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @seealso \code{\link{nnetar}}. #' @keywords ts #' #' @export forecast.modelAR <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), PI=FALSE, level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=1000, innov=NULL, ...) { out <- object tspx <- tsp(out$x) # if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } h <- NROW(xreg) } fcast <- numeric(h) xx <- object$x xxreg <- xreg if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xxreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } # Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) # Iterative 1-step forecast for (i in 1:h) { newdata <- c(flag[lags], xxreg[i, ]) if (any(is.na(newdata))) { stop("I can't forecast when there are missing values near the end of the series.") } fcast[i] <- object$predict.FUN(object$model, newdata) flag <- c(fcast[i], flag[-maxlag]) } # Re-scale point forecasts if (!is.null(object$scalex)) { fcast <- fcast * object$scalex$scale + object$scalex$center } # Add ts properties fcast <- ts(fcast, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) # Back-transform point forecasts if (!is.null(lambda)) { fcast <- InvBoxCox(fcast, lambda) } # Compute prediction intervals using simulations if (isTRUE(PI)) { nint <- length(level) sim <- matrix(NA, nrow = npaths, ncol = h) if (!is.null(innov)) { if (length(innov) != h * npaths) { stop("Incorrect number of innovations, need h*npaths values") } innov <- matrix(innov, nrow = h, ncol = npaths) bootstrap <- FALSE } for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, xreg = xreg, lambda = lambda, innov = innov[, i], ...) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8) if (nint > 1L) { lower <- ts(t(lower)) upper <- ts(t(upper)) } else { lower <- ts(matrix(lower, ncol = 1L)) upper <- ts(matrix(upper, ncol = 1L)) } tsp(lower) <- tsp(upper) <- tsp(fcast) } else { level <- NULL lower <- NULL upper <- NULL } out$mean <- fcast out$level <- level out$lower <- lower out$upper <- upper return(structure(out, class = "forecast")) } #' @rdname fitted.Arima #' @export fitted.modelAR <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "modelAR", ...)) } } #' @export print.modelAR <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") cat("Call: ") print(x$call) print(x$model) cat( "\nsigma^2 estimated as ", format(mean(residuals(x) ^ 2, na.rm = TRUE), digits = digits), "\n", sep = "" ) invisible(x) } #' @rdname is.ets #' @export is.modelAR <- function(x) { inherits(x, "modelAR") } forecast/R/acf.R0000644000176200001440000003437514474044013013152 0ustar liggesusers# Replacement for the acf() function. #' (Partial) Autocorrelation and Cross-Correlation Function Estimation #' #' The function \code{Acf} computes (and by default plots) an estimate of the #' autocorrelation function of a (possibly multivariate) time series. Function #' \code{Pacf} computes (and by default plots) an estimate of the partial #' autocorrelation function of a (possibly multivariate) time series. Function #' \code{Ccf} computes the cross-correlation or cross-covariance of two #' univariate series. #' #' The functions improve the \code{\link[stats]{acf}}, #' \code{\link[stats]{pacf}} and \code{\link[stats]{ccf}} functions. The main #' differences are that \code{Acf} does not plot a spike at lag 0 when #' \code{type=="correlation"} (which is redundant) and the horizontal axes show #' lags in time units rather than seasonal units. #' #' The tapered versions implement the ACF and PACF estimates and plots #' described in Hyndman (2015), based on the banded and tapered estimates of #' autocovariance proposed by McMurry and Politis (2010). #' #' @param x a univariate or multivariate (not Ccf) numeric time series object #' or a numeric vector or matrix. #' @param y a univariate numeric time series object or a numeric vector. #' @param lag.max maximum lag at which to calculate the acf. Default is #' $10*log10(N/m)$ where $N$ is the number of observations and $m$ the number #' of series. Will be automatically limited to one less than the number of #' observations in the series. #' @param type character string giving the type of acf to be computed. Allowed #' values are \dQuote{\code{correlation}} (the default), #' \dQuote{\code{covariance}} or \dQuote{\code{partial}}. #' @param plot logical. If \code{TRUE} (the default) the resulting acf, pacf or #' ccf is plotted. #' @param na.action function to handle missing values. Default is #' \code{\link[stats]{na.contiguous}}. Useful alternatives are #' \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param demean Should covariances be about the sample means? #' @param calc.ci If \code{TRUE}, confidence intervals for the ACF/PACF #' estimates are calculated. #' @param level Percentage level used for the confidence intervals. #' @param nsim The number of bootstrap samples used in estimating the #' confidence intervals. #' @param ... Additional arguments passed to the plotting function. #' @return The \code{Acf}, \code{Pacf} and \code{Ccf} functions return objects #' of class "acf" as described in \code{\link[stats]{acf}} from the stats #' package. The \code{taperedacf} and \code{taperedpacf} functions return #' objects of class "mpacf". #' @author Rob J Hyndman #' @seealso \code{\link[stats]{acf}}, \code{\link[stats]{pacf}}, #' \code{\link[stats]{ccf}}, \code{\link{tsdisplay}} #' @references Hyndman, R.J. (2015). Discussion of ``High-dimensional #' autocovariance matrices and optimal linear prediction''. \emph{Electronic #' Journal of Statistics}, 9, 792-796. #' #' McMurry, T. L., & Politis, D. N. (2010). Banded and tapered estimates for #' autocovariance matrices and the linear process bootstrap. \emph{Journal of #' Time Series Analysis}, 31(6), 471-482. #' @keywords ts #' @examples #' #' Acf(wineind) #' Pacf(wineind) #' \dontrun{ #' taperedacf(wineind, nsim=50) #' taperedpacf(wineind, nsim=50) #' } #' #' @export Acf <- function(x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { type <- match.arg(type) # Set maximum lag nseries <- NCOL(x) if (is.null(lag.max)) { lag.max <- as.integer(max( floor(10 * (log10(NROW(x)) - log10(nseries))), 2 * frequency(x) )) } acf.out <- stats::acf( x, plot = FALSE, lag.max = lag.max, type = type, na.action = na.action, demean = demean ) acf.out$tsp <- tsp(x) acf.out$periods <- attributes(x)$msts acf.out$series <- deparse(substitute(x)) # Make lags in integer units nlags <- dim(acf.out$lag)[1] if (type == "partial") { acf.out$lag[, , ] <- seq(nlags) } else { acf.out$lag[, , ] <- seq(nlags)-1 } # Plot if required if (plot) { plot.out <- acf.out # Hide 0 lag if autocorrelations if (type == "correlation") { for (i in seq(NCOL(x))) { plot.out$lag[1, i, i] <- 1 plot.out$acf[1, i, i] <- 0 } } if (nseries > 1) { plot(plot.out, ...) } else { # Check if there is a ylim input input_list <- as.list(substitute(list(...))) ylimarg <- is.element("ylim", names(input_list)) if (ylimarg) { plot(plot.out, xaxt = "n", ...) } else { ylim <- c(-1, 1) * 3 / sqrt(length(x)) ylim <- range(ylim, plot.out$acf) plot(plot.out, ylim = ylim, xaxt = "n", ...) } # Make nice horizontal axis if (is.element("msts", class(x))) { seasonalaxis(attributes(x)$msts, nlags, type = "acf") } else { seasonalaxis(frequency(x), nlags, type = "acf") } if (type == "covariance") { axis(at = 0, side = 1) } } return(invisible(acf.out)) } else { return(acf.out) } } # Make nice horizontal axis with ticks at seasonal lags # Return tick points if breaks=TRUE seasonalaxis <- function(frequency, nlags, type, plot=TRUE) { # List of unlabelled tick points out2 <- NULL # Check for non-seasonal data if (length(frequency) == 1) { # Compute number of seasonal periods np <- trunc(nlags / frequency) evenfreq <- (frequency %% 2L) == 0L # Defaults for labelled tick points if (type == "acf") { out <- pretty(1:nlags) } else { out <- pretty(-nlags:nlags) } if (frequency == 1) { if (type == "acf" && nlags <= 16) { out <- 1:nlags } else if (type == "ccf" && nlags <= 8) { out <- (-nlags:nlags) } else { if (nlags <= 30 && type == "acf") { out2 <- 1:nlags } else if (nlags <= 15 && type == "ccf") { out2 <- (-nlags:nlags) } if (!is.null(out2)) { out <- pretty(out2) } } } else if (frequency > 1 && ((type == "acf" && np >= 2L) || (type == "ccf" && np >= 1L))) { if (type == "acf" && nlags <= 40) { out <- frequency * (1:np) out2 <- 1:nlags # Add half-years if (nlags <= 30 && evenfreq && np <= 3) { out <- c(out, frequency * ((1:np) - 0.5)) } } else if (type == "ccf" && nlags <= 20) { out <- frequency * (-np:np) out2 <- (-nlags:nlags) # Add half-years if (nlags <= 15 && evenfreq && np <= 3) { out <- c(out, frequency * ((-np:np) + 0.5)) } } else if (np < (12 - 4 * (type == "ccf"))) { out <- frequency * (-np:np) } } } else { # Determine which frequency to show np <- trunc(nlags / frequency) frequency <- frequency[which(np <= 16)] if (length(frequency) > 0L) { frequency <- min(frequency) } else { frequency <- 1 } out <- seasonalaxis(frequency, nlags, type, plot = FALSE) } if (plot) { axis(1, at = out) if (!is.null(out2)) { axis(1, at = out2, tcl = -0.2, labels = FALSE) } } else { return(out) } } #' @rdname Acf #' @export Pacf <- function(x, lag.max=NULL, plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf( x, lag.max = lag.max, type = "partial", na.action = na.action, demean = demean, plot = FALSE ) object$series <- deparse(substitute(x)) # Plot if required if (plot) { nlags <- dim(object$lag)[1] plot.out <- object # Check if there is a ylim input input_list <- as.list(substitute(list(...))) ylimarg <- is.element("ylim", names(input_list)) if (ylimarg) { plot(plot.out, xaxt = "n", ...) } else { ylim <- c(-1, 1) * 3 / sqrt(length(x)) ylim <- range(ylim, plot.out$acf) plot(plot.out, ylim = ylim, xaxt = "n", ...) } # Make nice horizontal axis if (is.element("msts", class(x))) { seasonalaxis(attributes(x)$msts, nlags, type = "acf") } else { seasonalaxis(frequency(x), nlags, type = "acf") } return(invisible(object)) } else { return(object) } } #' @rdname Acf #' @export Ccf <- function(x, y, lag.max=NULL, type=c("correlation", "covariance"), plot=TRUE, na.action=na.contiguous, ...) { type <- match.arg(type) if (is.null(lag.max)) { lag.max <- as.integer(max(floor(10 * log10(NROW(x))), 2 * frequency(x))) } ccf.out <- stats::ccf( x, y, plot = FALSE, type = type, lag.max = lag.max, na.action = na.action ) # Make lags in integer units nlags <- (dim(ccf.out$lag)[1] - 1) / 2 ccf.out$lag[, 1, 1] <- -nlags:nlags # Plot if required if (plot) { vnames <- c(deparse(substitute(x))[1L], deparse(substitute(y))[1L]) ccf.out$snames <- paste(vnames, collapse = " & ") plot(ccf.out, ylab = "CCF", xaxt = "n", ...) seasonalaxis(frequency(x), nlags, type = "ccf") return(invisible(ccf.out)) } else { return(ccf.out) } } kappa <- function(x) { k <- rep(0, length(x)) x <- abs(x) k[x <= 1] <- 1 k[x > 1 & x <= 2] <- 2 - x[x > 1 & x <= 2] return(k) } # McMurray-Politis estimate of ACF wacf <- function(x, lag.max = length(x) - 1) { n <- length(x) lag.max <- min(lag.max, n - 1) if (lag.max < 0) { stop("'lag.max' must be at least 0") } # Standard estimator acfest <- stats::acf( c(x), lag.max = lag.max, plot = FALSE, na.action = na.contiguous ) acfest$series <- deparse(substitute(x)) # Taper estimates s <- 1:length(acfest$acf[, , 1]) upper <- 2 * sqrt(log(n, 10) / n) ac <- abs(acfest$acf[, , 1]) # Find l: ac < upper for 5 consecutive lags j <- (ac < upper) l <- 0 k <- 1 N <- length(j) - 4 while (l < 1 && k <= N) { if (all(j[k:(k + 4)])) { l <- k } else { k <- k + 1 } } acfest$acf[, , 1] <- acfest$acf[, , 1] * kappa(s / l) # End of Tapering # Now do some shrinkage towards white noise using eigenvalues # Construct covariance matrix gamma <- acfest$acf[, , 1] s <- length(gamma) Gamma <- matrix(1, s, s) d <- row(Gamma) - col(Gamma) for (i in 1:(s - 1)) Gamma[d == i | d == (-i)] <- gamma[i + 1] # Compute eigenvalue decomposition ei <- eigen(Gamma) # Shrink eigenvalues d <- pmax(ei$values, 20 / n) # Construct new covariance matrix Gamma2 <- ei$vectors %*% diag(d) %*% t(ei$vectors) Gamma2 <- Gamma2 / mean(d) # Estimate new ACF d <- row(Gamma2) - col(Gamma2) for (i in 2:s) gamma[i] <- mean(Gamma2[d == (i - 1)]) acfest$acf[, , 1] <- gamma ############### end of shrinkage return(acfest) } # Find tapered PACF using LD recursions wpacf <- function(x, lag.max=length(x) - 1) { # Compute pacf as usual, just to set up structure out <- Pacf(x, lag.max = lag.max, plot = FALSE) # Compute acf using tapered estimate acvf <- wacf(x, lag.max = lag.max)$acf[, , 1] # Durbin-Levinson recursions # Modified from http://faculty.washington.edu/dbp/s519/R-code/LD-recursions.R p <- length(acvf) - 1 phis <- acvf[2] / acvf[1] pev <- rep(acvf[1], p + 1) pacf <- rep(phis, p) pev[2] <- pev[1] * (1 - phis ^ 2) if (p > 1) { for (k in 2:p) { old.phis <- phis phis <- rep(0, k) ## compute kth order pacf (reflection coefficient) phis[k] <- (acvf[k + 1] - sum(old.phis * acvf[k:2])) / pev[k] phis[1:(k - 1)] <- old.phis - phis[k] * rev(old.phis) pacf[k] <- phis[k] pev[k + 1] <- pev[k] * (1 - phis[k] ^ 2) # if(abs(pacf[k]) > 1) # warning("PACF larger than 1 in absolute value") } } out$acf[, , 1] <- pacf return(out) } # Function to produce new style plot of ACF or PACF with CI # x = time series #' @rdname Acf #' @export taperedacf <- function(x, lag.max=NULL, type=c("correlation", "partial"), plot=TRUE, calc.ci=TRUE, level=95, nsim=100, ...) { type <- match.arg(type) if (is.null(lag.max)) { lag.max <- max(floor(20 * log10(length(x))), 4 * frequency(x)) } lag <- min(lag.max, length(x) - 1) if (type == "correlation") { z <- wacf(x, )$acf[2:(lag + 1), , 1] } else { z <- wpacf(x, )$acf[1:lag, , 1] } out <- list(z = z, lag = lag, type = type, x = x) if (calc.ci) { # Get confidence intervals for plots bootsim <- lpb(x, nsim = nsim) s1 <- matrix(0, nrow = lag, ncol = nsim) if (type == "correlation") { for (i in 1:nsim) s1[, i] <- wacf(bootsim[, i])$acf[2:(lag + 1), , 1] } else { for (i in 1:nsim) s1[, i] <- wpacf(bootsim[, i])$acf[1:lag, , 1] } prob <- (100 - level) / 200 out$upper <- apply(s1, 1, quantile, prob = 1 - prob) out$lower <- apply(s1, 1, quantile, prob = prob) } out <- structure(out, class = "mpacf") if (!plot) { return(out) } else { plot(out, ...) return(invisible(out)) } return(out) } #' @rdname Acf #' @export taperedpacf <- function(x, ...) { taperedacf(x, type = "partial", ...) } #' @export plot.mpacf <- function(x, xlim=NULL, ylim=NULL, xlab="Lag", ylab="", ...) { object <- x lagx <- 1:object$lag if (is.null(xlim)) { xlim <- c(1, object$lag) } if (is.null(ylim)) { ylim <- range(object$z, object$upper, object$lower) } if (ylab == "") { ylab <- ifelse(object$type == "partial", "PACF", "ACF") } plot( lagx, object$z, type = "n", xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, xaxt = "n", ... ) grid(col = gray(.80), nx = NA, ny = NULL, lty = 1) abline(h = 0, col = gray(.4)) if (frequency(object$x) > 1) { axis(1, at = (0:100) * frequency(object$x)) for (i in 1:100) abline(v = (i - 1) * frequency(object$x), lty = 1, col = gray(0.80)) } else { axis(1) grid(col = gray(.80), ny = NA, lty = 1) } if (!is.null(object$lower)) { for (j in 1:object$lag) { polygon( lagx[j] + c(-0.55, 0.55, 0.55, -0.55), c(rep(object$lower[j], 2), rep(object$upper[j], 2)), col = gray(0.60), border = FALSE ) } # polygon(c(lagx,rev(lagx)),c(object$lower,rev(object$upper)),col=gray(.60),border=FALSE) } lines(lagx, object$z, lwd = 1.5) j <- (object$lower < 0 & object$upper > 0) points(lagx[j], object$z[j], pch = 1, cex = 0.5) points(lagx[!j], object$z[!j], pch = 19) } #' @rdname is.ets #' @export is.acf <- function(x) { inherits(x, "acf") } forecast/R/nnetar.R0000644000176200001440000005160214633662406013711 0ustar liggesusers# Defaults: # For non-seasonal data, p chosen using AIC from linear AR(p) model # For seasonal data, p chosen using AIC from linear AR(p) model after # seasonally adjusting with STL decomposition, and P=1 # size set to average of number of inputs and number of outputs: (p+P+1)/2 # if xreg is included then size = (p+P+ncol(xreg)+1)/2 #' Neural Network Time Series Forecasts #' #' Feed-forward neural networks with a single hidden layer and lagged inputs #' for forecasting univariate time series. #' #' A feed-forward neural network is fitted with lagged values of \code{y} as #' inputs and a single hidden layer with \code{size} nodes. The inputs are for #' lags 1 to \code{p}, and lags \code{m} to \code{mP} where #' \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also #' used as inputs. If there are missing values in \code{y} or #' \code{xreg}, the corresponding rows (and any others which depend on them as #' lags) are omitted from the fit. A total of \code{repeats} networks are #' fitted, each with random starting weights. These are then averaged when #' computing forecasts. The network is trained for one-step forecasting. #' Multi-step forecasts are computed recursively. #' #' For non-seasonal data, the fitted model is denoted as an NNAR(p,k) model, #' where k is the number of hidden nodes. This is analogous to an AR(p) model #' but with nonlinear functions. For seasonal data, the fitted model is called #' an NNAR(p,P,k)[m] model, which is analogous to an ARIMA(p,0,0)(P,0,0)[m] #' model but with nonlinear functions. #' #' @aliases print.nnetar print.nnetarmodels #' #' @param y A numeric vector or time series of class \code{ts}. #' @param p Embedding dimension for non-seasonal time series. Number of #' non-seasonal lags used as inputs. For non-seasonal time series, the default #' is the optimal number of lags (according to the AIC) for a linear AR(p) #' model. For seasonal time series, the same method is used but applied to #' seasonally adjusted data (from an stl decomposition). If set to zero to #' indicate that no non-seasonal lags should be included, then P must be at #' least 1 and a model with only seasonal lags will be fit. #' @param P Number of seasonal lags used as inputs. #' @param size Number of nodes in the hidden layer. Default is half of the #' number of input nodes (including external regressors, if given) plus 1. #' @param repeats Number of networks to fit with different random starting #' weights. These are then averaged when producing forecasts. #' @param xreg Optionally, a vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. Must be numeric. #' @param model Output from a previous call to \code{nnetar}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param subset Optional vector specifying a subset of observations to be used #' in the fit. Can be an integer index vector or a logical vector the same #' length as \code{y}. All observations are used by default. #' @param scale.inputs If TRUE, inputs are scaled by subtracting the column #' means and dividing by their respective standard deviations. If \code{lambda} #' is not \code{NULL}, scaling is applied after Box-Cox transformation. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{\link[nnet]{nnet}} for #' \code{nnetar}. #' @inheritParams forecast.ts #' #' @return Returns an object of class "\code{nnetar}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{nnetar}. #' #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{x}{The original time series.} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @keywords ts #' @examples #' fit <- nnetar(lynx) #' fcast <- forecast(fit) #' plot(fcast) #' #' ## Arguments can be passed to nnet() #' fit <- nnetar(lynx, decay=0.5, maxit=150) #' plot(forecast(fit)) #' lines(lynx) #' #' ## Fit model to first 100 years of lynx data #' fit <- nnetar(window(lynx,end=1920), decay=0.5, maxit=150) #' plot(forecast(fit,h=14)) #' lines(lynx) #' #' ## Apply fitted model to later data, including all optional arguments #' fit2 <- nnetar(window(lynx,start=1921), model=fit) #' #' @export nnetar <- function(y, p, P=1, size, repeats=20, xreg=NULL, lambda=NULL, model=NULL, subset=NULL, scale.inputs=TRUE, x=y, ...) { useoldmodel <- FALSE yname <- deparse(substitute(y)) if (!is.null(model)) { # Use previously fitted model useoldmodel <- TRUE # Check for conflicts between new and old data: # Check model class if (!is.nnetar(model)) { stop("Model must be a nnetar object") } # Check new data m <- max(round(frequency(model$x)), 1L) minlength <- max(c(model$p, model$P * m)) + 1 if (length(x) < minlength) { stop(paste("Series must be at least of length", minlength, "to use fitted model")) } if (tsp(as.ts(x))[3] != m) { warning(paste("Data frequency doesn't match fitted model, coercing to frequency =", m)) x <- ts(x, frequency = m) } # Check xreg if (!is.null(model$xreg)) { if (is.null(xreg)) { stop("No external regressors provided") } if (NCOL(xreg) != NCOL(model$xreg)) { stop("Number of external regressors does not match fitted model") } } # Update parameters with previous model lambda <- model$lambda size <- model$size p <- model$p P <- model$P if (p == 0 && P == 0){ stop("Both p = 0 and P = 0 in supplied 'model' object") } if (P > 0) { lags <- sort(unique(c(seq_len(p), m * (seq_len(P))))) } else { lags <- seq_len(p) } if (is.null(model$scalex)) { scale.inputs <- FALSE } } else { # when not using an old model if (length(y) < 3) { stop("Not enough data to fit a model") } # Check for constant data in time series constant_data <- is.constant(na.interp(x)) if (constant_data){ warning("Constant data, setting p=1, P=0, lambda=NULL, scale.inputs=FALSE") scale.inputs <- FALSE lambda <- NULL p <- 1 P <- 0 } ## Check for constant data in xreg if (!is.null(xreg)){ constant_xreg <- any(apply(as.matrix(xreg), 2, function(x) is.constant(na.interp(x)))) if (constant_xreg){ warning("Constant xreg column, setting scale.inputs=FALSE") scale.inputs <- FALSE } } } # Check for NAs in x if (any(is.na(x))) { warning("Missing values in x, omitting rows") } # Transform data if (!is.null(lambda)) { xx <- BoxCox(x, lambda) lambda <- attr(xx, "lambda") } else { xx <- x } ## Check whether to use a subset of the data xsub <- rep(TRUE, length(x)) if (is.numeric(subset)) { xsub[-subset] <- FALSE } if (is.logical(subset)) { xsub <- subset } # Scale series scalex <- NULL if (scale.inputs) { if (useoldmodel) { scalex <- model$scalex } else { tmpx <- scale(xx[xsub], center = TRUE, scale = TRUE) scalex <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xx <- scale(xx, center = scalex$center, scale = scalex$scale) xx <- xx[, 1] } # Check xreg class & dim xxreg <- NULL scalexreg <- NULL if (!is.null(xreg)) { xxreg <- xreg <- as.matrix(xreg) if (length(x) != NROW(xreg)) { stop("Number of rows in xreg does not match series length") } # Check for NAs in xreg if (any(is.na(xreg))) { warning("Missing values in xreg, omitting rows") } # Scale xreg if (scale.inputs) { if (useoldmodel) { scalexreg <- model$scalexreg } else { tmpx <- scale(xxreg[xsub, ], center = TRUE, scale = TRUE) scalexreg <- list( center = attr(tmpx, "scaled:center"), scale = attr(tmpx, "scaled:scale") ) } xxreg <- scale(xxreg, center = scalexreg$center, scale = scalexreg$scale) } } # Set up lagged matrix n <- length(xx) xx <- as.ts(xx) m <- max(round(frequency(xx)), 1L) if (!useoldmodel) { if (m == 1) { if (missing(p)) { p <- max(length(ar(na.interp(xx))$ar), 1) } # For non-seasonal data also use default calculation for p if that # argument is 0, but issue a warning if (p == 0){ warning("Cannot set p = 0 for non-seasonal data; using default calculation for p") p <- max(length(ar(na.interp(xx))$ar), 1) } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } lags <- seq_len(p) if (P > 1) { warning("Non-seasonal data, ignoring seasonal lags") } P <- 0 } else { if (missing(p)) { if (n > 2 * m) { x.sa <- seasadj(mstl(na.interp(xx))) } else { x.sa <- na.interp(xx) } p <- max(length(ar(x.sa)$ar), 1) } if (p == 0 && P == 0){ stop("'p' and 'P' cannot both be zero") } if (p >= n) { warning("Reducing number of lagged inputs due to short series") p <- n - 1 } if (P > 0 && n >= m * P + 2) { lags <- sort(unique(c(seq_len(p), m * (seq_len(P))))) } else { lags <- seq_len(p) if (P > 0) { warning("Series too short for seasonal lags") P <- 0 } } } } maxlag <- max(lags) nlag <- length(lags) y <- xx[-(1:maxlag)] lags.X <- matrix(NA_real_, ncol = nlag, nrow = n - maxlag) for (i in 1:nlag) lags.X[, i] <- xx[(maxlag - lags[i] + 1):(n - lags[i])] # Add xreg into lagged matrix lags.X <- cbind(lags.X, xxreg[-(1:maxlag), , drop = FALSE]) if (missing(size)) { size <- round((NCOL(lags.X) + 1) / 2) } # Remove missing values if present j <- complete.cases(lags.X, y) ## Remove values not in subset j <- j & xsub[-(1:maxlag)] ## Stop if there's no data to fit (e.g. due to NAs or NaNs) if (NROW(lags.X[j,, drop=FALSE]) == 0) { stop("No data to fit (possibly due to NA or NaN)") } ## Fit average ANN. if (useoldmodel) { fit <- oldmodel_avnnet(lags.X[j, , drop = FALSE], y[j], size = size, model) } else { fit <- avnnet(lags.X[j, , drop=FALSE], y[j], size = size, repeats = repeats, ...) } # Return results out <- list() out$x <- as.ts(x) out$m <- m out$p <- p out$P <- P out$scalex <- scalex out$scalexreg <- scalexreg out$size <- size out$xreg <- xreg out$lambda <- lambda out$subset <- (1:length(x))[xsub] out$model <- fit out$nnetargs <- list(...) if (useoldmodel) { out$nnetargs <- model$nnetargs } if (NROW(lags.X[j,, drop=FALSE]) == 1){ fits <- c(rep(NA_real_, maxlag), mean(sapply(fit, predict))) } else{ fits <- c(rep(NA_real_, maxlag), rowMeans(sapply(fit, predict))) } if (scale.inputs) { fits <- fits * scalex$scale + scalex$center } fits <- ts(fits) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) } out$fitted <- ts(rep(NA_real_, length(out$x))) out$fitted[c(rep(TRUE, maxlag), j)] <- fits out$fitted <- copy_msts(out$x, out$fitted) out$residuals <- out$x - out$fitted out$lags <- lags out$series <- yname out$method <- paste("NNAR(", p, sep = "") if (P > 0) { out$method <- paste(out$method, ",", P, sep = "") } out$method <- paste(out$method, ",", size, ")", sep = "") if (P > 0) { out$method <- paste(out$method, "[", m, "]", sep = "") } out$call <- match.call() return(structure(out, class = c("nnetar"))) } # Aggregate several neural network models avnnet <- function(x, y, repeats, linout=TRUE, trace=FALSE, ...) { mods <- list() for (i in 1:repeats) mods[[i]] <- nnet::nnet(x, y, linout = linout, trace = trace, ...) return(structure(mods, class = "nnetarmodels")) } # Fit old model to new data oldmodel_avnnet <- function(x, y, size, model) { repeats <- length(model$model) args <- list(x = x, y = y, size = size, linout = 1, trace = FALSE) # include additional nnet arguments args <- c(args, model$nnetargs) # set iterations to zero (i.e. weights stay fixed) args$maxit <- 0 mods <- list() for (i in 1:repeats) { args$Wts <- model$model[[i]]$wts mods[[i]] <- do.call(nnet::nnet, args) } return(structure(mods, class = "nnetarmodels")) } #' @export print.nnetarmodels <- function(x, ...) { cat(paste("\nAverage of", length(x), "networks, each of which is\n")) print(x[[1]]) } #' Forecasting using neural network models #' #' Returns forecasts and other information for univariate neural network #' models. #' #' Prediction intervals are calculated through simulations and can be slow. #' Note that if the network is too complex and overfits the data, the residuals #' can be arbitrarily small; if used for prediction interval calculations, they #' could lead to misleadingly small values. It is possible to use out-of-sample #' residuals to ameliorate this, see examples. #' #' @param object An object of class "\code{nnetar}" resulting from a call to #' \code{\link{nnetar}}. #' @param h Number of periods for forecasting. If \code{xreg} is used, \code{h} #' is ignored and the number of forecast periods is set to the number of rows #' of \code{xreg}. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{bootstrap} and \code{npaths} are all ignored. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param xreg Future values of external regressor variables. #' @param bootstrap If \code{TRUE}, then prediction intervals computed using #' simulations with resampled residuals rather than normally distributed #' errors. Ignored if \code{innov} is not \code{NULL}. #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param innov Values to use as innovations for prediction intervals. Must be #' a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced #' into a matrix). If present, \code{bootstrap} is ignored. #' @param ... Additional arguments passed to \code{\link{simulate.nnetar}} #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.nnetar}. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time series #' used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' \item{...}{Other arguments} #' #' @author Rob J Hyndman and Gabriel Caceres #' @seealso \code{\link{nnetar}}. #' @keywords ts #' @examples #' ## Fit & forecast model #' fit <- nnetar(USAccDeaths, size=2) #' fcast <- forecast(fit, h=20) #' plot(fcast) #' #' \dontrun{ #' ## Include prediction intervals in forecast #' fcast2 <- forecast(fit, h=20, PI=TRUE, npaths=100) #' plot(fcast2) #' #' ## Set up out-of-sample innovations using cross-validation #' fit_cv <- CVar(USAccDeaths, size=2) #' res_sd <- sd(fit_cv$residuals, na.rm=TRUE) #' myinnovs <- rnorm(20*100, mean=0, sd=res_sd) #' ## Forecast using new innovations #' fcast3 <- forecast(fit, h=20, PI=TRUE, npaths=100, innov=myinnovs) #' plot(fcast3) #' } #' #' @export forecast.nnetar <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), PI=FALSE, level=c(80, 95), fan=FALSE, xreg=NULL, lambda=object$lambda, bootstrap=FALSE, npaths=1000, innov=NULL, ...) { # require(nnet) out <- object tspx <- tsp(out$x) # if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if(!identical(colnames(xreg), colnames(object$xreg))){ warning("xreg contains different column names from the xreg used in training. Please check that the regressors are in the same order.") } h <- NROW(xreg) } fcast <- numeric(h) xx <- object$x xxreg <- xreg if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xxreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } # Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) # Iterative 1-step forecast for (i in 1:h) { newdata <- c(flag[lags], xxreg[i, ]) if (any(is.na(newdata))) { stop("I can't forecast when there are missing values near the end of the series.") } fcast[i] <- mean(sapply(object$model, predict, newdata = newdata)) flag <- c(fcast[i], flag[-maxlag]) } # Re-scale point forecasts if (!is.null(object$scalex)) { fcast <- fcast * object$scalex$scale + object$scalex$center } # Add ts properties fcast <- ts(fcast, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) # Back-transform point forecasts if (!is.null(lambda)) { fcast <- InvBoxCox(fcast, lambda) } # Compute prediction intervals using simulations if (isTRUE(PI)) { nint <- length(level) sim <- matrix(NA, nrow = npaths, ncol = h) if (!is.null(innov)) { if (length(innov) != h * npaths) { stop("Incorrect number of innovations, need h*npaths values") } innov <- matrix(innov, nrow = h, ncol = npaths) bootstrap <- FALSE } for (i in 1:npaths) sim[i, ] <- simulate(object, nsim = h, bootstrap = bootstrap, xreg = xreg, lambda = lambda, innov = innov[, i], ...) lower <- apply(sim, 2, quantile, 0.5 - level / 200, type = 8, na.rm = TRUE) upper <- apply(sim, 2, quantile, 0.5 + level / 200, type = 8, na.rm = TRUE) if (nint > 1L) { lower <- ts(t(lower)) upper <- ts(t(upper)) } else { lower <- ts(matrix(lower, ncol = 1L)) upper <- ts(matrix(upper, ncol = 1L)) } out$lower <- future_msts(out$x, lower) out$upper <- future_msts(out$x, upper) } else { level <- NULL lower <- NULL upper <- NULL } out$mean <- future_msts(out$x, fcast) out$level <- level return(structure(out, class = "forecast")) } #' @rdname fitted.Arima #' @export fitted.nnetar <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "nnetar", ...)) } } #' @export print.nnetar <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") # cat(" one hidden layer with",x$size,"nodes\n") cat("Call: ") print(x$call) print(x$model) cat( "\nsigma^2 estimated as ", format(mean(residuals(x) ^ 2, na.rm = TRUE), digits = digits), "\n", sep = "" ) invisible(x) } #' @rdname is.ets #' @export is.nnetar <- function(x) { inherits(x, "nnetar") } #' @rdname is.ets #' @export is.nnetarmodels <- function(x) { inherits(x, "nnetarmodels") } # Scale a univariate time series #' @export scale.ts <- function(x, center=TRUE, scale=TRUE) { tspx <- tsp(x) x <- as.ts(scale.default(x, center = center, scale = scale)) tsp(x) <- tspx return(x) } forecast/R/simulate.R0000644000176200001440000006623214323125536014244 0ustar liggesusers#' Simulation from a time series model #' #' Returns a time series based on the model object \code{object}. #' #' With \code{simulate.Arima()}, the \code{object} should be produced by #' \code{\link{Arima}} or \code{\link{auto.arima}}, rather than #' \code{\link[stats]{arima}}. By default, the error series is assumed normally #' distributed and generated using \code{\link[stats]{rnorm}}. If \code{innov} #' is present, it is used instead. If \code{bootstrap=TRUE} and #' \code{innov=NULL}, the residuals are resampled instead. #' #' When \code{future=TRUE}, the sample paths are conditional on the data. When #' \code{future=FALSE} and the model is stationary, the sample paths do not #' depend on the data at all. When \code{future=FALSE} and the model is #' non-stationary, the location of the sample paths is arbitrary, so they all #' start at the value of the first observation. #' #' @param object An object of class "\code{ets}", "\code{Arima}", "\code{ar}" #' or "\code{nnetar}". #' @param nsim Number of periods for the simulated series. Ignored if either #' \code{xreg} or \code{innov} are not \code{NULL}. Otherwise the default is #' the length of series used to train model (or 100 if no data found). #' @param seed Either \code{NULL} or an integer that will be used in a call to #' \code{\link[base]{set.seed}} before simulating the time series. The default, #' \code{NULL}, will not change the random generator state. #' @param future Produce sample paths that are future to and conditional on the #' data in \code{object}. Otherwise simulate unconditionally. #' @param bootstrap Do simulation using resampled errors rather than normally #' distributed errors or errors provided as \code{innov}. #' @param innov A vector of innovations to use as the error series. Ignored if #' \code{bootstrap==TRUE}. If not \code{NULL}, the value of \code{nsim} is set #' to length of \code{innov}. #' @param xreg New values of \code{xreg} to be used for forecasting. The value #' of \code{nsim} is set to the number of rows of \code{xreg} if it is not #' \code{NULL}. #' @param ... Other arguments, not currently used. #' @inheritParams forecast.ts #' #' @return An object of class "\code{ts}". #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link{Arima}}, \code{\link{auto.arima}}, #' \code{\link{ar}}, \code{\link{arfima}}, \code{\link{nnetar}}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(USAccDeaths, xlim = c(1973, 1982)) #' lines(simulate(fit, 36), col = "red") #' @export simulate.ets <- function(object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (!is.null(object$x)) { if (is.null(tsp(object$x))) { object$x <- ts(object$x, frequency = 1, start = 1) } } else { if (nsim == 0L) { nsim <- 100 } object$x <- ts(10, frequency = object$m, start = 1 / object$m) future <- FALSE } if (future) { initstate <- object$states[length(object$x) + 1, ] } else { # choose a random starting point initstate <- object$states[sample(1:length(object$x), 1), ] } if (bootstrap) { res <- na.omit(c(object$residuals) - mean(object$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, sqrt(object$sigma2)) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } if (object$components[1] == "M") { e <- pmax(-1, e) } tmp <- ts(.C( "etssimulate", as.double(initstate), as.integer(object$m), as.integer(switch(object$components[1], "A" = 1, "M" = 2 )), as.integer(switch(object$components[2], "N" = 0, "A" = 1, "M" = 2 )), as.integer(switch(object$components[3], "N" = 0, "A" = 1, "M" = 2 )), as.double(object$par["alpha"]), as.double(ifelse(object$components[2] == "N", 0, object$par["beta"])), as.double(ifelse(object$components[3] == "N", 0, object$par["gamma"])), as.double(ifelse(object$components[4] == "FALSE", 1, object$par["phi"])), as.integer(nsim), as.double(numeric(nsim)), as.double(e), PACKAGE = "forecast" )[[11]], frequency = object$m, start = ifelse(future, tsp(object$x)[2] + 1 / tsp(object$x)[3], tsp(object$x)[1])) if (is.na(tmp[1])) { stop("Problem with multiplicative damped trend") } if (!is.null(object$lambda)) { tmp <- InvBoxCox(tmp, object$lambda) } return(tmp) } # Simulate ARIMA model starting with observed data x # Some of this function is borrowed from the arima.sim() function in the stats package. # Note that myarima.sim() does simulation conditional on the values of observed x, whereas # arima.sim() is unconditional on any observed x. myarima.sim <- function(model, n, x, e, ...) { start.innov <- residuals(model) innov <- e data <- x # Remove initial NAs first.nonmiss <- which(!is.na(x))[1] if (first.nonmiss > 1) { tsp.x <- tsp(x) start.x <- tsp.x[1] + (first.nonmiss - 1) / tsp.x[3] x <- window(x, start = start.x) start.innov <- window(start.innov, start = start.x) } model$x <- x n.start <- length(x) x <- ts(c(start.innov, innov), start = 1 - n.start, frequency = model$seasonal.period) flag.noadjust <- FALSE if (is.null(tsp(data))) { data <- ts(data, frequency = 1, start = 1) } if (!is.list(model)) { stop("'model' must be list") } if (n <= 0L) { stop("'n' must be strictly positive") } p <- length(model$ar) q <- length(model$ma) d <- 0 D <- model$seasonal.difference m <- model$seasonal.period if (!is.null(ord <- model$order)) { if (length(ord) != 3L) { stop("'model$order' must be of length 3") } if (p != ord[1L]) { stop("inconsistent specification of 'ar' order") } if (q != ord[3L]) { stop("inconsistent specification of 'ma' order") } d <- ord[2L] if (d != round(d) || d < 0) { stop("number of differences must be a positive integer") } } if (p) { minroots <- min(Mod(polyroot(c(1, -model$ar)))) if (minroots <= 1) { stop("'ar' part of model is not stationary") } } if (length(model$ma)) { # MA filtering x <- stats::filter(x, c(1, model$ma), method = "convolution", sides = 1L) x[seq_along(model$ma)] <- 0 } ## AR "filtering" len.ar <- length(model$ar) if (length(model$ar) && (len.ar <= length(data))) { if ((D != 0) && (d != 0)) { diff.data <- diff(data, lag = 1, differences = d) diff.data <- diff(diff.data, lag = m, differences = D) } else if ((D != 0) && (d == 0)) { diff.data <- diff(data, lag = model$seasonal.period, differences = D) } else if ((D == 0) && (d != 0)) { diff.data <- diff(data, lag = 1, differences = d) } else { diff.data <- data } x.new.innovations <- x[(length(start.innov) + 1):length(x)] x.with.data <- c(diff.data, x.new.innovations) for (i in (length(diff.data) + 1):length(x.with.data)) { lagged.x.values <- x.with.data[(i - len.ar):(i - 1)] ar.coefficients <- model$ar[length(model$ar):1] sum.multiplied.x <- sum((lagged.x.values * ar.coefficients)[abs(ar.coefficients) > .Machine$double.eps]) x.with.data[i] <- x.with.data[i] + sum.multiplied.x } x.end <- x.with.data[(length(diff.data) + 1):length(x.with.data)] x <- ts(x.end, start = 1, frequency = model$seasonal.period) flag.noadjust <- TRUE } else if (length(model$ar)) # but data too short { # AR filtering for all other cases where AR is used. x <- stats::filter(x, model$ar, method = "recursive") } if ((d == 0) && (D == 0) && (flag.noadjust == FALSE)) # Adjust to ensure end matches approximately { # Last 20 diffs if (n.start >= 20) { xdiff <- (model$x - x[1:n.start])[n.start - (19:0)] } else { xdiff <- model$x - x[1:n.start] } # If all same sign, choose last if (all(sign(xdiff) == 1) || all(sign(xdiff) == -1)) { xdiff <- xdiff[length(xdiff)] } else { # choose mean. xdiff <- mean(xdiff) } x <- x + xdiff } if ((n.start > 0) && (flag.noadjust == FALSE)) { x <- x[-(1:n.start)] } ######## Undo all differences if ((D > 0) && (d == 0)) { # Seasonal undifferencing, if there is no regular differencing i <- length(data) - D * m + 1 seasonal.xi <- data[i:length(data)] length.s.xi <- length(seasonal.xi) x <- diffinv(x, lag = m, differences = D, xi = seasonal.xi)[-(1:length.s.xi)] } else if ((d > 0) && (D == 0)) { # Regular undifferencing, if there is no seasonal differencing x <- diffinv(x, differences = d, xi = data[length(data) - (d:1) + 1])[-(1:d)] } else if ((d > 0) && (D > 0)) { # Undifferencing for where the differencing is both Seasonal and Non-Seasonal # Regular first delta.four <- diff(data, lag = m, differences = D) regular.xi <- delta.four[(length(delta.four) - D):length(delta.four)] x <- diffinv(x, differences = d, xi = regular.xi[length(regular.xi) - (d:1) + 1])[-(1:d)] # Then seasonal i <- length(data) - D * m + 1 seasonal.xi <- data[i:length(data)] length.s.xi <- length(seasonal.xi) x <- diffinv(x, lag = m, differences = D, xi = seasonal.xi) x <- x[-(1:length.s.xi)] } x <- ts(x[1:n], frequency = frequency(data), start = tsp(data)[2] + 1 / tsp(data)[3]) return(x) } #' @rdname simulate.ets #' @export simulate.Arima <- function(object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { # Error check: if (object$arma[7] < 0) { stop("Value for seasonal difference is < 0. Must be >= 0") } else if ((sum(object$arma[c(3, 4, 7)]) > 0) && (object$arma[5] < 2)) { stop("Invalid value for seasonal period") } # Check if data is included x <- object$x <- getResponse(object) if (is.null(x)) { n <- 0 future <- FALSE if (nsim == 0L) { nsim <- 100 } } else { if (is.null(tsp(x))) { x <- ts(x, frequency = 1, start = 1) } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } n <- length(x) } # Check xreg if (!is.null(xreg)) { xreg <- as.matrix(xreg) nsim <- nrow(xreg) } use.drift <- is.element("drift", names(object$coef)) usexreg <- (!is.null(xreg) | use.drift | !is.null(object$xreg)) xm <- oldxm <- 0 if (use.drift) { # Remove existing drift column if (NCOL(xreg) == 1 && all(diff(xreg) == 1)) { xreg <- NULL } else if (!is.null(colnames(xreg))) { xreg <- xreg[, colnames(xreg) != "drift", drop = FALSE] } # Create new drift column xreg <- cbind(drift = as.matrix(seq(nsim) + n * future), xreg) } # Check xreg has the correct dimensions if (usexreg) { if (is.null(xreg)) { stop("xreg argument missing") } else if (is.null(object$xreg)) { stop("xreg not required") } else if (NCOL(xreg) != NCOL(object$xreg)) { stop("xreg has incorrect dimension.") } } ######## Random Seed Code if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } ######## End Random seed code # Check for seasonal ARMA components and set flag accordingly. This will be used later in myarima.sim() flag.s.arma <- (sum(object$arma[c(3, 4)]) > 0) # Check for Seasonality in ARIMA model if (sum(object$arma[c(3, 4, 7)]) > 0) { # return(simulateSeasonalArima(object, nsim=nsim, seed=seed, xreg=xreg, future=future, bootstrap=bootstrap, ...)) if (sum(object$model$phi) == 0) { ar <- NULL } else { ar <- as.double(object$model$phi) } if (sum(object$model$theta) == 0) { ma <- NULL } else { ma <- as.double(object$model$theta) } order <- c(length(ar), object$arma[6], length(ma)) if (future) { model <- list( order = order, ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object), seasonal.difference = object$arma[7], seasonal.period = object$arma[5], flag.seasonal.arma = flag.s.arma, seasonal.order = object$arma[c(3, 7, 4)] ) } else { model <- list(order = order, ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object)) } flag.seasonal.diff <- (object$arma[7] > 0) } else { #### Non-Seasonal ARIMA specific code: Set up the model order <- object$arma[c(1, 6, 2)] if (order[1] > 0) { ar <- object$model$phi[1:order[1]] } else { ar <- NULL } if (order[3] > 0) { ma <- object$model$theta[1:order[3]] } else { ma <- NULL } if (object$arma[2] != length(ma)) { stop("MA length wrong") } else if (object$arma[1] != length(ar)) { stop("AR length wrong") } if (future) { model <- list( order = object$arma[c(1, 6, 2)], ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object), seasonal.difference = 0, flag.seasonal.arma = flag.s.arma, seasonal.order = c(0, 0, 0), seasonal.period = 1 ) } else { model <- list(order = object$arma[c(1, 6, 2)], ar = ar, ma = ma, sd = sqrt(object$sigma2), residuals = residuals(object)) } flag.seasonal.diff <- FALSE ### End non-seasonal ARIMA specific code } if (bootstrap) { res <- na.omit(c(model$residuals) - mean(model$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, model$sd) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } narma <- sum(object$arma[1L:4L]) if (length(object$coef) > narma) { if (names(object$coef)[narma + 1L] == "intercept") { xreg <- cbind(intercept = rep(1, nsim), xreg) if (future) { object$xreg <- cbind(intercept = rep(1, n), object$xreg) } } if (!is.null(xreg)) { xm <- if (narma == 0) { drop(as.matrix(xreg) %*% object$coef) } else { drop(as.matrix(xreg) %*% object$coef[-(1L:narma)]) } if (future) { oldxm <- if (narma == 0) { drop(as.matrix(object$xreg) %*% object$coef) } else { drop(as.matrix(object$xreg) %*% object$coef[-(1L:narma)]) } } } } if (future) { sim <- myarima.sim(model, nsim, x - oldxm, e = e) + xm } else { if (flag.seasonal.diff) { zeros <- object$arma[5] * object$arma[7] sim <- arima.sim(model, nsim, innov = e) sim <- diffinv(sim, lag = object$arma[5], differences = object$arma[7])[-(1:zeros)] sim <- tail(sim, nsim) + xm } else { sim <- tail(arima.sim(model, nsim, innov = e), nsim) + xm } if (!is.null(x)) { sim <- ts(sim, start = tsp(x)[1], frequency = tsp(x)[3]) } else { sim <- ts(sim, frequency = object$frequency) } # If model is non-stationary, then condition simulated data on first observation if (!is.null(x) & (model$order[2] > 0 || flag.seasonal.diff)) { sim <- sim - sim[1] + x[1] } } if (!is.null(lambda)) { sim <- InvBoxCox(sim, lambda) } return(sim) } #' @rdname simulate.ets #' @export simulate.ar <- function(object, nsim = object$n.used, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } object$x <- getResponse(object) if (is.null(object$x)) { future <- FALSE x.mean <- 0 if (is.null(nsim)) { nsim <- 100 } } else { x.mean <- object$x.mean object$x <- object$x - x.mean } if (future) { model <- list(ar = object$ar, sd = sqrt(object$var.pred), residuals = object$resid, seasonal.difference = 0, seasonal.period = 1, flag.seasonal.arma = FALSE) } else { model <- list(ar = object$ar, sd = sqrt(object$var.pred), residuals = object$resid) } if (bootstrap) { res <- na.omit(c(model$residuals) - mean(model$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, model$sd) } else if (length(innov) == nsim) { e <- innov } else { stop("Length of innov must be equal to nsim") } if (future) { return(myarima.sim(model, nsim, x = object$x, e = e) + x.mean) } else { return(arima.sim(model, nsim, innov = e) + x.mean) } } #' @rdname simulate.ets #' @export simulate.lagwalk <- function(object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (bootstrap) { res <- na.omit(c(object$residuals) - mean(object$residuals, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { se <- sqrt(object$sigma2) e <- rnorm(nsim, 0, se) } else { e <- innov } # Cumulate errors lag_grp <- rep_len(seq_len(object$par$lag), length(e)) e <- split(e, lag_grp) cumulative_e <- unsplit(lapply(e, cumsum), lag_grp) # Find starting position x <- object$x if (is.null(x)) { future <- FALSE if (nsim == 0L) { nsim <- 100 } x <- 1 } if (!is.null(lambda)) { x <- BoxCox(x, lambda) } if (future) { start <- tail(x, object$par$lag) } else { start <- head(x, object$par$lag) } # Handle missing values if (any(na_pos <- is.na(start))) { if (!is.null(innov)) { warning("Missing values encountered at simulation starting values, simulating starting values from closest observed value.") } lag_grp <- rep_len(seq_len(object$par$lag), length(x)) start[na_pos] <- vapply(split(x, lag_grp)[na_pos], function(x) { if (future) { x <- rev(x) } pos <- which.min(is.na(x)) x[pos] + sum(rnorm(pos - 1, 0, sqrt(object$sigma2))) }, numeric(1L)) } # Construct simulated ts simdrift <- object$par$drift + rnorm(1, 0, object$par$drift.se) sim <- rep_len(start, nsim) + seq_len(nsim) * simdrift + cumulative_e if (!is.null(lambda)) { sim <- InvBoxCox(sim, lambda) } tspx <- tsp(x) ts(sim, start = ifelse(future, tspx[2] + 1 / tspx[3], tspx[1]), frequency = tspx[3]) } #' @rdname simulate.ets #' @export simulate.fracdiff <- function(object, nsim = object$n, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ...) { x <- getResponse(object) if (is.null(x)) { future <- FALSE if (is.null(nsim)) { nsim <- 100 } x <- 0 } # Strip initial and final missing values xx <- na.ends(x) n <- length(xx) # Remove mean meanx <- mean(xx) xx <- xx - meanx # Difference series (removes mean as well) y <- undo.na.ends(x, diffseries(xx, d = object$d)) # Create ARMA model for differenced series arma <- Arima( y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma) ) # Simulate from ARMA model ysim <- simulate(arma, nsim, seed, future = future, bootstrap = bootstrap, innov = innov) # Undo differencing and add back mean return(unfracdiff(xx, ysim, n, nsim, object$d) + meanx) } #' @rdname simulate.ets #' @export simulate.nnetar <- function(object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (is.null(object$x)) { future <- FALSE } ## only future currently implemented if (!future) { warning("simulate.nnetar() currently only supports future=TRUE") } ## set simulation innovations if (bootstrap) { res <- na.omit(c(residuals(object, type = "innovation"))) res <- res - mean(res) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { res <- na.omit(c(residuals(object, type = "innovation"))) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- rnorm(nsim, 0, sd(res, na.rm = TRUE)) } else if (length(innov) == nsim) { e <- innov if (!is.null(object$scalex$scale)) { e <- e / object$scalex$scale } } else if (isTRUE(innov == 0L)) { ## to pass innov=0 so simulation equals mean forecast e <- rep(innov, nsim) } else { stop("Length of innov must be equal to nsim") } ## tspx <- tsp(object$x) # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if (NROW(xreg) != nsim) { stop("Number of rows in xreg does not match nsim") } } xx <- object$x if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } ## Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) ## Simulate by iteratively forecasting and adding innovation path <- numeric(nsim) for (i in 1:nsim) { newdata <- c(flag[lags], xreg[i, ]) if (any(is.na(newdata))) { stop("I can't simulate when there are missing values near the end of the series.") } path[i] <- mean(sapply(object$model, predict, newdata = newdata)) + e[i] flag <- c(path[i], flag[-maxlag]) } ## Re-scale simulated points if (!is.null(object$scalex)) { path <- path * object$scalex$scale + object$scalex$center } ## Add ts properties path <- ts(path, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) ## Back-transform simulated points if (!is.null(lambda)) { path <- InvBoxCox(path, lambda) } return(path) } #' @rdname simulate.ets #' @export simulate.modelAR <- function(object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) { runif(1) } if (is.null(seed)) { RNGstate <- get(".Random.seed", envir = .GlobalEnv) } else { R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (is.null(object$x)) { future <- FALSE } ## only future currently implemented if (!future) { warning("simulate.nnetar() currently only supports future=TRUE") } ## set simulation innovations if (bootstrap) { res <- na.omit(c(residuals(object, type = "innovation"))) res <- res - mean(res) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { res <- na.omit(c(residuals(object, type = "innovation"))) ## scale if appropriate if (!is.null(object$scalex$scale)) { res <- res / object$scalex$scale } e <- rnorm(nsim, 0, sd(res, na.rm = TRUE)) } else if (length(innov) == nsim) { e <- innov if (!is.null(object$scalex$scale)) { e <- e / object$scalex$scale } } else if (isTRUE(innov == 0L)) { ## to pass innov=0 so simulation equals mean forecast e <- rep(innov, nsim) } else { stop("Length of innov must be equal to nsim") } ## tspx <- tsp(object$x) # Check if xreg was used in fitted model if (is.null(object$xreg)) { if (!is.null(xreg)) { warning("External regressors were not used in fitted model, xreg will be ignored") } xreg <- NULL } else { if (is.null(xreg)) { stop("No external regressors provided") } xreg <- as.matrix(xreg) if (NCOL(xreg) != NCOL(object$xreg)) { stop("Number of external regressors does not match fitted model") } if (NROW(xreg) != nsim) { stop("Number of rows in xreg does not match nsim") } } xx <- object$x if (!is.null(lambda)) { xx <- BoxCox(xx, lambda) lambda <- attr(xx, "lambda") } # Check and apply scaling of fitted model if (!is.null(object$scalex)) { xx <- scale(xx, center = object$scalex$center, scale = object$scalex$scale) if (!is.null(xreg)) { xreg <- scale(xreg, center = object$scalexreg$center, scale = object$scalexreg$scale) } } ## Get lags used in fitted model lags <- object$lags maxlag <- max(lags) flag <- rev(tail(xx, n = maxlag)) ## Simulate by iteratively forecasting and adding innovation path <- numeric(nsim) for (i in 1:nsim) { newdata <- c(flag[lags], xreg[i, ]) if (any(is.na(newdata))) { stop("I can't simulate when there are missing values near the end of the series.") } path[i] <- object$predict.FUN(object$model, newdata) + e[i] flag <- c(path[i], flag[-maxlag]) } ## Re-scale simulated points if (!is.null(object$scalex)) { path <- path * object$scalex$scale + object$scalex$center } ## Add ts properties path <- ts(path, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) ## Back-transform simulated points if (!is.null(lambda)) { path <- InvBoxCox(path, lambda) } return(path) } forecast/R/spline.R0000644000176200001440000002010314323125536013676 0ustar liggesusers############################################### ##### Forecasting Using Smoothing Splines ##### ############################################### # Optimal smoothing paramter denoted by beta # lambda is Box-Cox parameter. ################# FUNCTIONS ################## ## Set up Sigma of order (n x n) make.Sigma <- function(n, n0=0) { nn <- n + n0 Sigma <- matrix(0, nrow = nn, ncol = nn) for (i in 1:nn) Sigma[i, i:nn] <- Sigma[i:nn, i] <- (i * i * (3 * (i:nn) - i)) / 6 return(Sigma / (n ^ 3)) } ## Compute spline matrices spline.matrices <- function(n, beta, cc=1e2, n0=0) { nn <- n + n0 Sigma <- make.Sigma(n, n0) s <- cbind(rep(1, nn), (1:nn) / n) Omega <- cc * s %*% t(s) + Sigma / beta + diag(nn) max.Omega <- max(Omega) inv.Omega <- solve(Omega / max.Omega, tol = 1e-10) / max.Omega P <- chol(inv.Omega) return(list(s = s, Sigma = Sigma, Omega = Omega, inv.Omega = inv.Omega, P = P)) } ## Compute smoothing splines ## Return -loglikelihood # beta multiplied by 1e6 to avoid numerical difficulties in optimization spline.loglik <- function(beta, y, cc=1e2) { n <- length(y) mat <- spline.matrices(n, beta / 1e6, cc = cc) y.star <- mat$P %*% matrix(y) return(-log(det(mat$P)) + 0.5 * n * log(sum(y.star ^ 2))) } # Spline forecasts #' Cubic Spline Forecast #' #' Returns local linear forecasts and prediction intervals using cubic #' smoothing splines. #' #' The cubic smoothing spline model is equivalent to an ARIMA(0,2,2) model but #' with a restricted parameter space. The advantage of the spline model over #' the full ARIMA model is that it provides a smooth historical trend as well #' as a linear forecast function. Hyndman, King, Pitrun, and Billah (2002) show #' that the forecast performance of the method is hardly affected by the #' restricted parameter space. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param method Method for selecting the smoothing parameter. If #' \code{method="gcv"}, the generalized cross-validation method from #' \code{\link[stats]{smooth.spline}} is used. If \code{method="mle"}, the #' maximum likelihood method from Hyndman et al (2002) is used. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast.ts #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{splinef}. #' #' An object of class \code{"forecast"} containing the following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for #' prediction intervals} \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time #' series used to create the model stored as \code{object}).} #' \item{onestepf}{One-step forecasts from the fitted model.} #' \item{fitted}{Smooth estimates of the fitted trend using all data.} #' \item{residuals}{Residuals from the fitted model. That is x minus one-step #' forecasts.} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{smooth.spline}}, \code{\link[stats]{arima}}, #' \code{\link{holt}}. #' @references Hyndman, King, Pitrun and Billah (2005) Local linear forecasts #' using cubic smoothing splines. \emph{Australian and New Zealand Journal of #' Statistics}, \bold{47}(1), 87-99. #' \url{https://robjhyndman.com/publications/splinefcast/}. #' @keywords ts #' @examples #' fcast <- splinef(uspop,h=5) #' plot(fcast) #' summary(fcast) #' #' @export splinef <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, method=c("gcv", "mle"), x=y) { method <- match.arg(method) if (!is.ts(x)) { x <- ts(x) } n <- length(x) freq <- frequency(x) if (!is.null(lambda)) { origx <- x x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Find optimal beta using likelihood approach in Hyndman et al paper. if (method == "mle") { if (n > 100) { # Use only last 100 observations to get beta xx <- x[(n - 99):n] } else { xx <- x } beta.est <- optimize(spline.loglik, interval = c(1e-6, 1e7), y = xx)$minimum / 1e6 # Compute spar which is equivalent to beta r <- 256 * smooth.spline(1:n, x, spar = 0)$lambda lss <- beta.est * n ^ 3 / (n - 1) ^ 3 spar <- (log(lss / r) / log(256) + 1) / 3 splinefit <- smooth.spline(1:n, x, spar = spar) sfits <- splinefit$y } else # Use GCV { splinefit <- smooth.spline(1:n, x, cv = FALSE, spar = NULL) sfits <- splinefit$y beta.est <- pmax(1e-7, splinefit$lambda * (n - 1) ^ 3 / n ^ 3) } # Compute matrices for optimal beta mat <- spline.matrices(n, beta.est) newmat <- spline.matrices(n, beta.est, n0 = h) # Get one-step predictors yfit <- e <- rep(NA, n) if (n > 1000) { warning("Series too long to compute training set fits and residuals") } else # This is probably grossly inefficient but I can't think of a better way right now { for (i in 1:(n - 1)) { U <- mat$Omega[1:i, i + 1] Oinv <- solve(mat$Omega[1:i, 1:i] / 1e6) / 1e6 yfit[i + 1] <- t(U) %*% Oinv %*% x[1:i] sd <- sqrt(mat$Omega[i + 1, i + 1] - t(U) %*% Oinv %*% U) e[i + 1] <- (x[i + 1] - yfit[i + 1]) / sd } } # Compute sigma^2 sigma2 <- mean(e ^ 2, na.rm = TRUE) # Compute mean and var of forecasts U <- newmat$Omega[1:n, n + (1:h)] Omega0 <- newmat$Omega[n + (1:h), n + (1:h)] Yhat <- t(U) %*% mat$inv.Omega %*% x sd <- sqrt(sigma2 * diag(Omega0 - t(U) %*% mat$inv.Omega %*% U)) # Compute prediction intervals. if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nconf <- length(level) lower <- upper <- matrix(NA, nrow = h, ncol = nconf) for (i in 1:nconf) { conf.factor <- qnorm(0.5 + 0.005 * level[i]) upper[, i] <- Yhat + conf.factor * sd lower[, i] <- Yhat - conf.factor * sd } lower <- ts(lower, start = tsp(x)[2] + 1 / freq, frequency = freq) upper <- ts(upper, start = tsp(x)[2] + 1 / freq, frequency = freq) res <- ts(x - yfit, start = start(x), frequency = freq) if (!is.null(lambda)) { Yhat <- InvBoxCox(Yhat, lambda, biasadj, list(level = level, upper = upper, lower = lower)) upper <- InvBoxCox(upper, lambda) lower <- InvBoxCox(lower, lambda) yfit <- InvBoxCox(yfit, lambda) sfits <- InvBoxCox(sfits, lambda) x <- origx } return(structure( list( method = "Cubic Smoothing Spline", level = level, x = x, series = deparse(substitute(y)), mean = ts(Yhat, frequency = freq, start = tsp(x)[2] + 1 / freq), upper = ts(upper, start = tsp(x)[2] + 1 / freq, frequency = freq), lower = ts(lower, start = tsp(x)[2] + 1 / freq, frequency = freq), model = list(beta = beta.est * n ^ 3, call = match.call()), fitted = ts(sfits, start = start(x), frequency = freq), residuals = res, standardizedresiduals = ts(e, start = start(x), frequency = freq), onestepf = ts(yfit, start = start(x), frequency = freq) ), lambda = lambda, class = c("splineforecast", "forecast") )) } #' @rdname plot.forecast #' #' @examples #' fcast <- splinef(airmiles,h=5) #' plot(fcast) #' autoplot(fcast) #' #' @export plot.splineforecast <- function(x, fitcol=2, type="o", pch=19, ...) { plot.forecast(x, type = type, pch = pch, ...) lines(x$fitted, col = fitcol) } #' @rdname is.forecast #' @export is.splineforecast <- function(x) { inherits(x, "splineforecast") } forecast/R/whichmodels.R0000644000176200001440000000103714323125536014717 0ustar liggesusersWhichModels <- function(max.p, max.q, max.P, max.Q, maxK) { total.models <- (max.p + 1) * (max.q + 1) * (max.P + 1) * (max.Q + 1) * length(0:maxK) x <- numeric(total.models) i <- 1 for (x1 in 0:max.p) for (x2 in 0:max.q) { for (x3 in 0:max.P) for (x4 in 0:max.Q) { for (K in 0:maxK) { x[i] <- paste(x1, "f", x2, "f", x3, "f", x4, "f", K, sep = "") i <- i + 1 } } } return(x) } UndoWhichModels <- function(n) { as.numeric(unlist(strsplit(n, split = "f"))) } forecast/R/calendar.R0000644000176200001440000001336114323125536014165 0ustar liggesusers## Add as.Date.timeDate to S3 method table #' @export as.Date.timeDate <- timeDate::as.Date.timeDate #' Number of trading days in each season #' #' Returns number of trading days in each month or quarter of the observed time #' period in a major financial center. #' #' Useful for trading days length adjustments. More on how to define "business #' days", please refer to \code{\link[timeDate]{isBizday}}. #' #' @param x Monthly or quarterly time series #' @param FinCenter Major financial center. #' @return Time series #' @author Earo Wang #' @seealso \code{\link[forecast]{monthdays}} #' @keywords ts #' @examples #' #' x <- ts(rnorm(30), start = c(2013, 2), frequency = 12) #' bizdays(x, FinCenter = "New York") #' @export bizdays <- function(x, FinCenter = c( "New York", "London", "NERC", "Toronto", "Zurich" )) { # Return the number of trading days corresponding to the input ts # # Args: # x: a ts object # FinCenter: inherits holiday calendar from "timeDate" package # # Returns: # A matrix contains the number of trading days if (is.null(tsp(x))) { stop("We cannot handle a time series without time attributes.") } # Convert tsp to date freq <- frequency(x) years <- start(x)[1L]:end(x)[1L] # Grab the holidays from years and financial center FinCenter <- match.arg(FinCenter) if (FinCenter == "New York") { holidays <- timeDate::holidayNYSE(years) } else if (FinCenter == "London") { holidays <- timeDate::holidayLONDON(years) } else if (FinCenter == "NERC") { holidays <- timeDate::holidayNERC(years) } else if (FinCenter == "Toronto") { holidays <- timeDate::holidayTSX(years) } else if (FinCenter == "Zurich") { holidays <- timeDate::holidayZURICH(years) } if (freq == 12L) { # monthly data date <- zoo::as.Date(time(x)) start <- date[1L] end <- seq(date[length(date)], length.out = 2L, by = "month")[2L] - 1L days.len <- timeDate::timeSequence(from = start, to = end) # Grab business days biz <- days.len[timeDate::isBizday(days.len, holidays = holidays)] bizdays <- format(biz, format = "%Y-%m") } else if (freq == 4L) { # Quarterly data date <- zoo::as.Date(time(x)) start <- date[1L] end <- seq(date[length(date)], length.out = 2L, by = "3 month")[2L] - 1L days.len <- timeDate::timeSequence(from = start, to = end) # Grab business days biz <- days.len[timeDate::isBizday(days.len, holidays = holidays)] bizdays <- format(zoo::as.yearqtr(biz), format = "%Y Qtr%q") } # else if (freq == 52L) { # Weekly data # start <- paste0(start(x)[1L], "-01-01") # start <- as.Date(start) + start(x)[2L] * 7L # end <- start + length(time(x)) * 7L # days.len <- as.timeDate(seq(start, end, by = "days"), FinCenter = FinCenter) # biz <- days.len[isBizday(days.len, # holidays = unique(format(days.len, "%Y")))] # bizdays <- format(biz, format = "%Y Wk%W") # } num.days <- table(bizdays) out <- ts(num.days, start = tsp(x)[1L], frequency = freq) return(out) } #' Easter holidays in each season #' #' Returns a vector of 0's and 1's or fractional results if Easter spans March #' and April in the observed time period. Easter is defined as the days from #' Good Friday to Easter Sunday inclusively, plus optionally Easter Monday if #' \code{easter.mon=TRUE}. #' #' Useful for adjusting calendar effects. #' #' @param x Monthly or quarterly time series #' @param easter.mon If TRUE, the length of Easter holidays includes Easter #' Monday. #' @return Time series #' @author Earo Wang #' @keywords ts #' @examples #' #' easter(wineind, easter.mon = TRUE) #' @export easter <- function(x, easter.mon = FALSE) { # Return a vector of 0's and 1's for easter holidays # # Args: # x: monthly, quarterly or weekly data # easter.mon: An option including easter.mon if (is.null(tsp(x))) { stop("We cannot handle a time series without time attributes.") } freq <- frequency(x) date <- zoo::as.Date(time(x)) start.yr <- start(x)[1L] end.yr <- end(x)[1L] yr.span <- seq(start.yr, end.yr) gd.fri0 <- Easter(yr.span, -2L) if (easter.mon) { easter0 <- Easter(yr.span, 1L) } else { easter0 <- Easter(yr.span) } if (freq == 12L) { fmat <- "%Y-%m" yr.mon <- format(date, format = fmat) gd.fri <- format(gd.fri0, format = fmat) # good fri easter <- format(easter0, format = fmat) # easter mon } else if (freq == 4L) { fmat <- "%Y-%q" yr.mon <- format(zoo::as.yearqtr(date), format = fmat) # yr.qtr gd.fri <- format(zoo::as.yearqtr(gd.fri0), format = fmat) easter <- format(zoo::as.yearqtr(easter0), format = fmat) } span <- cbind(gd.fri, easter) # the span of easter holidays hdays <- unlist(apply(span, 1, unique)) dummies <- ifelse(yr.mon %in% hdays, 1L, 0L) # Allow fractional results denominator <- (easter0 - gd.fri0 + 1L)[1L] last.mar <- as.timeDate(paste0(yr.span, "-03-31")) dif <- difftimeDate(last.mar, gd.fri0, units = "days") + 1L # Remove easter out of date range if (date[1L] > as.character(last.mar[1L])) { dif <- dif[-1L] } if (date[length(yr.mon)] < as.character(last.mar[length(last.mar)])) { dif <- dif[-length(dif)] } replace <- dif > denominator | dif <= 0L dif[replace] <- denominator # Easter in the same month # Start to insert the remaining part falling in Apr index <- which(dif != denominator) if (length(index) != 0L) { values <- denominator - dif[index] new.index <- index[1L] for (i in 1L:length(index)) { dif <- append(dif, values = values[i], new.index) new.index <- index[i + 1L] + i } dummies[dummies == 1L] <- round(dif / unclass(denominator), digits = 2) } out <- ts(dummies, start = tsp(x)[1L], frequency = freq) return(out) } forecast/R/graph.R0000644000176200001440000001715614323125536013523 0ustar liggesusers### Time series graphics and transformations #' Time series display #' #' Plots a time series along with its acf and either its pacf, lagged #' scatterplot or spectrum. #' #' \code{ggtsdisplay} will produce the equivalent plot using ggplot graphics. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param plot.type type of plot to include in lower right corner. #' @param points logical flag indicating whether to show the individual points #' or not in the time plot. #' @param smooth logical flag indicating whether to show a smooth loess curve #' superimposed on the time plot. #' @param ci.type type of confidence limits for ACF that is passed to #' \code{\link[stats]{acf}}. Should the confidence limits assume a white noise #' input or for lag \eqn{k} an MA(\eqn{k-1}) input? #' @param lag.max the maximum lag to plot for the acf and pacf. A suitable #' value is selected by default if the argument is missing. #' @param na.action function to handle missing values in acf, pacf and spectrum #' calculations. The default is \code{\link[stats]{na.contiguous}}. Useful #' alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param theme Adds a ggplot element to each plot, typically a theme. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param pch Plotting character. #' @param cex Character size. #' @param \dots additional arguments to \code{\link[stats]{acf}}. #' @return None. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{plot.ts}}, \code{\link{Acf}}, #' \code{\link[stats]{spec.ar}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' tsdisplay(diff(WWWusage)) #' ggtsdisplay(USAccDeaths, plot.type="scatter") #' #' @export tsdisplay <- function(x, plot.type=c("partial", "histogram", "scatter", "spectrum"), points=TRUE, ci.type=c("white", "ma"), lag.max, na.action=na.contiguous, main=NULL, xlab="", ylab="", pch=1, cex=0.5, ...) { plot.type <- match.arg(plot.type) ci.type <- match.arg(ci.type) def.par <- par(no.readonly = TRUE) # save default, for resetting... nf <- layout(matrix(c(1, 1, 2, 3), 2, 2, byrow = TRUE)) if (is.null(main)) { main <- deparse(substitute(x)) } if (!is.ts(x)) { x <- ts(x) } if (missing(lag.max)) { lag.max <- round(min(max(10 * log10(length(x)), 3 * frequency(x)), length(x) / 3)) } plot.ts(x, main = main, ylab = ylab, xlab = xlab, ylim = range(x, na.rm = TRUE), ...) if (points) { points(x, pch = pch, cex = cex, ...) } ylim <- c(-1, 1) * 3 / sqrt(length(x)) junk1 <- stats::acf(c(x), lag.max = lag.max, plot = FALSE, na.action = na.action) junk1$acf[1, 1, 1] <- 0 if (ci.type == "ma") { ylim <- range(ylim, 0.66 * ylim * max(sqrt(cumsum(c(1, 2 * junk1$acf[-1, 1, 1] ^ 2))))) } ylim <- range(ylim, junk1$acf) if (plot.type == "partial") { junk2 <- stats::pacf(c(x), lag.max = lag.max, plot = FALSE, na.action = na.action) ylim <- range(ylim, junk2$acf) } oldpar <- par(mar = c(5, 4.1, 1.5, 2)) plot(junk1, ylim = ylim, xlim = c(1, lag.max), ylab = "ACF", main = "", ci.type = ci.type, ...) if (plot.type == "scatter") { n <- length(x) plot(x[1:(n - 1)], x[2:n], xlab = expression(Y[t - 1]), ylab = expression(Y[t]), ...) } else if (plot.type == "spectrum") { spec.ar(x, main = "", na.action = na.action) } else if (plot.type == "histogram") { graphics::hist(x, breaks = "FD", main = "", xlab = main) } else { plot(junk2, ylim = ylim, xlim = c(1, lag.max), ylab = "PACF", main = "", ...) } par(def.par) layout(1) invisible() } #' Seasonal plot #' #' Plots a seasonal plot as described in Hyndman and Athanasopoulos (2014, #' chapter 2). This is like a time plot except that the data are plotted #' against the seasons in separate years. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param s seasonal frequency of x #' @param season.labels Labels for each season in the "year" #' @param year.labels Logical flag indicating whether labels for each year of #' data should be plotted on the right. #' @param year.labels.left Logical flag indicating whether labels for each year #' of data should be plotted on the left. #' @param type plot type (as for \code{\link[graphics]{plot}}). Not yet #' supported for ggseasonplot. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param col Colour #' @param labelgap Distance between year labels and plotted lines #' @param \dots additional arguments to \code{\link[graphics]{plot}}. #' @return None. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{monthplot}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' seasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) #' #' @export seasonplot <- function(x, s, season.labels=NULL, year.labels=FALSE, year.labels.left=FALSE, type="o", main, xlab=NULL, ylab="", col=1, labelgap=0.1, ...) { if (missing(main)) { main <- paste("Seasonal plot:", deparse(substitute(x))) } # Check data are seasonal and convert to integer seasonality if (missing(s)) { s <- round(frequency(x)) } if (s <= 1) { stop("Data are not seasonal") } tspx <- tsp(x) x <- ts(x, start = tspx[1], frequency = s) # Pad series tsx <- x startperiod <- round(cycle(x)[1]) if (startperiod > 1) { x <- c(rep(NA, startperiod - 1), x) } x <- c(x, rep(NA, s - length(x) %% s)) Season <- rep(c(1:s, NA), length(x) / s) xnew <- rep(NA, length(x)) xnew[!is.na(Season)] <- x if (s == 12) { labs <- month.abb xLab <- "Month" } else if (s == 4) { labs <- paste("Q", 1:4, sep = "") xLab <- "Quarter" } else if (s == 7) { labs <- c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat") xLab <- "Day" } else if (s == 52) { labs <- 1:s xLab <- "Week" } else if (s == 24) { labs <- 0:(s - 1) xLab <- "Hour" } else if (s == 48) { labs <- seq(0, 23.5, by = 0.5) xLab <- "Half-hour" } else { if (s < 20) { labs <- 1:s } else { labs <- NULL } xLab <- "Season" } if (is.null(xlab)) { xlab <- xLab } if (is.null(season.labels)) { season.labels <- labs } if (year.labels) { xlim <- c(1 - labelgap, s + 0.4 + labelgap) } else { xlim <- c(1 - labelgap, s) } if (year.labels.left) { xlim[1] <- 0.4 - labelgap } plot(Season, xnew, xaxt = "n", xlab = xlab, type = type, ylab = ylab, main = main, xlim = xlim, col = 0, ...) nn <- length(Season) / s col <- rep(col, nn)[1:nn] for (i in 0:(nn - 1)) lines(Season[(i * (s + 1) + 1):((s + 1) * (i + 1))], xnew[(i * (s + 1) + 1):((s + 1) * (i + 1))], type = type, col = col[i + 1], ...) if (year.labels) { idx <- which(Season[!is.na(xnew)] == s) year <- round(time(tsx)[idx], nchar(s)) text(x = rep(s + labelgap, length(year)), y = tsx[idx], labels = paste(c(trunc(year))), adj = 0, ..., col = col[1:length(idx)]) } if (year.labels.left) { idx <- which(Season[!is.na(xnew)] == 1) year <- round(time(tsx)[idx], nchar(s)) if (min(idx) > 1) { # First year starts after season 1n col <- col[-1] } text(x = rep(1 - labelgap, length(year)), y = tsx[idx], labels = paste(c(trunc(year))), adj = 1, ..., col = col[1:length(idx)]) } if (is.null(labs)) { axis(1, ...) } else { axis(1, labels = season.labels, at = 1:s, ...) } } forecast/R/errors.R0000644000176200001440000002524314323125536013732 0ustar liggesusers## Measures of forecast accuracy ## Forecasts in f. This may be a numerical vector or the output from arima or ets or derivatives. ## Actual values in x # dx = response variable in historical data ## test enables a subset of x and f to be tested. # MASE: d is the # of differencing # MASE: D is the # of seasonal differencing testaccuracy <- function(f, x, test, d, D) { dx <- getResponse(f) if (is.data.frame(x)) { responsevar <- as.character(formula(f$model))[2] if (is.element(responsevar, colnames(x))) { x <- x[, responsevar] } else { stop("I can't figure out what data to use.") } } if (is.list(f)) { if (is.element("mean", names(f))) { f <- f$mean } else { stop("Unknown list structure") } } if (is.ts(x) && is.ts(f)) { tspf <- tsp(f) tspx <- tsp(x) start <- max(tspf[1], tspx[1]) end <- min(tspf[2], tspx[2]) # Adjustment to allow for floating point issues start <- min(start, end) end <- max(start, end) f <- window(f, start = start, end = end) x <- window(x, start = start, end = end) } n <- length(x) if (is.null(test)) { test <- 1:n } else if (min(test) < 1 || max(test) > n) { warning("test elements must be within sample") test <- test[test >= 1 & test <= n] } ff <- f xx <- x # Check length of f if (length(f) < n) { stop("Not enough forecasts. Check that forecasts and test data match.") } error <- (xx - ff[1:n])[test] pe <- error / xx[test] * 100 me <- mean(error, na.rm = TRUE) mse <- mean(error^2, na.rm = TRUE) mae <- mean(abs(error), na.rm = TRUE) mape <- mean(abs(pe), na.rm = TRUE) mpe <- mean(pe, na.rm = TRUE) out <- c(me, sqrt(mse), mae, mpe, mape) names(out) <- c("ME", "RMSE", "MAE", "MPE", "MAPE") # Compute MASE if historical data available if (!is.null(dx)) { tspdx <- tsp(dx) if (!is.null(tspdx)) { if (D > 0) { # seasonal differencing nsd <- diff(dx, lag = round(tspdx[3L]), differences = D) } else { # non seasonal differencing nsd <- dx } if (d > 0) { nd <- diff(nsd, differences = d) } else { nd <- nsd } scale <- mean(abs(nd), na.rm = TRUE) } else { # not time series scale <- mean(abs(dx - mean(dx, na.rm = TRUE)), na.rm = TRUE) } mase <- mean(abs(error / scale), na.rm = TRUE) out <- c(out, mase) names(out)[length(out)] <- "MASE" } # Additional time series measures if (!is.null(tsp(x)) && n > 1) { fpe <- (c(ff[2:n]) / c(xx[1:(n - 1)]) - 1)[test - 1] ape <- (c(xx[2:n]) / c(xx[1:(n - 1)]) - 1)[test - 1] theil <- sqrt(sum((fpe - ape)^2, na.rm = TRUE) / sum(ape^2, na.rm = TRUE)) if (length(error) > 1) { r1 <- acf(error, plot = FALSE, lag.max = 2, na.action = na.pass)$acf[2, 1, 1] } else { r1 <- NA } nj <- length(out) out <- c(out, r1, theil) names(out)[nj + (1:2)] <- c("ACF1", "Theil's U") } return(out) } trainingaccuracy <- function(f, test, d, D) { # Make sure x is an element of f when f is a fitted model rather than a forecast # if(!is.list(f)) # stop("f must be a forecast object or a time series model object.") dx <- getResponse(f) if (is.element("splineforecast", class(f))) { fits <- f$onestepf } else { fits <- fitted(f) } # Don't use f$resid as this may contain multiplicative errors. res <- dx - fits n <- length(res) if (is.null(test)) { test <- 1:n } if (min(test) < 1 || max(test) > n) { warning("test elements must be within sample") test <- test[test >= 1 & test <= n] } tspdx <- tsp(dx) res <- res[test] dx <- dx[test] pe <- res / dx * 100 # Percentage error me <- mean(res, na.rm = TRUE) mse <- mean(res^2, na.rm = TRUE) mae <- mean(abs(res), na.rm = TRUE) mape <- mean(abs(pe), na.rm = TRUE) mpe <- mean(pe, na.rm = TRUE) out <- c(me, sqrt(mse), mae, mpe, mape) names(out) <- c("ME", "RMSE", "MAE", "MPE", "MAPE") # Compute MASE if historical data available if (!is.null(dx)) { if (!is.null(tspdx)) { if (D > 0) { # seasonal differencing nsd <- diff(dx, lag = round(tspdx[3L]), differences = D) } else { # non seasonal differencing nsd <- dx } if (d > 0) { nd <- diff(nsd, differences = d) } else { nd <- nsd } scale <- mean(abs(nd), na.rm = TRUE) } else { # not time series scale <- mean(abs(dx - mean(dx, na.rm = TRUE)), na.rm = TRUE) } mase <- mean(abs(res / scale), na.rm = TRUE) out <- c(out, mase) names(out)[length(out)] <- "MASE" } # Additional time series measures if (!is.null(tspdx)) { if (length(res) > 1) { r1 <- acf(res, plot = FALSE, lag.max = 2, na.action = na.pass)$acf[2, 1, 1] } else { r1 <- NA } nj <- length(out) out <- c(out, r1) names(out)[nj + 1] <- "ACF1" } return(out) } #' Accuracy measures for a forecast model #' #' Returns range of summary measures of the forecast accuracy. If \code{x} is #' provided, the function measures test set forecast accuracy #' based on \code{x-f}. If \code{x} is not provided, the function only produces #' training set accuracy measures of the forecasts based on #' \code{f["x"]-fitted(f)}. All measures are defined and discussed in Hyndman #' and Koehler (2006). #' #' The measures calculated are: #' \itemize{ #' \item ME: Mean Error #' \item RMSE: Root Mean Squared Error #' \item MAE: Mean Absolute Error #' \item MPE: Mean Percentage Error #' \item MAPE: Mean Absolute Percentage Error #' \item MASE: Mean Absolute Scaled Error #' \item ACF1: Autocorrelation of errors at lag 1. #' } #' By default, the MASE calculation is scaled using MAE of training set naive #' forecasts for non-seasonal time series, training set seasonal naive forecasts #' for seasonal time series and training set mean forecasts for non-time series data. #' If \code{f} is a numerical vector rather than a \code{forecast} object, the MASE #' will not be returned as the training data will not be available. #' #' See Hyndman and Koehler (2006) and Hyndman and Athanasopoulos (2014, Section #' 2.5) for further details. #' #' @param object An object of class \dQuote{\code{forecast}}, or a numerical vector #' containing forecasts. It will also work with \code{Arima}, \code{ets} and #' \code{lm} objects if \code{x} is omitted -- in which case training set accuracy #' measures are returned. #' @param x An optional numerical vector containing actual values of the same #' length as object, or a time series overlapping with the times of \code{f}. #' @param test Indicator of which elements of \code{x} and \code{f} to test. If #' \code{test} is \code{NULL}, all elements are used. Otherwise test is a #' numeric vector containing the indices of the elements to use in the test. #' @param d An integer indicating the number of lag-1 differences to be used #' for the denominator in MASE calculation. Default value is 1 for non-seasonal #' series and 0 for seasonal series. #' @param D An integer indicating the number of seasonal differences to be used #' for the denominator in MASE calculation. Default value is 0 for non-seasonal #' series and 1 for seasonal series. #' @param ... Additional arguments depending on the specific method. #' @param f Deprecated. Please use `object` instead. #' @return Matrix giving forecast accuracy measures. #' @author Rob J Hyndman #' @references Hyndman, R.J. and Koehler, A.B. (2006) "Another look at measures #' of forecast accuracy". \emph{International Journal of Forecasting}, #' \bold{22}(4), 679-688. Hyndman, R.J. and Athanasopoulos, G. (2018) #' "Forecasting: principles and practice", 2nd ed., OTexts, Melbourne, Australia. #' Section 3.4 "Evaluating forecast accuracy". #' \url{https://otexts.com/fpp2/accuracy.html}. #' @keywords ts #' @examples #' #' fit1 <- rwf(EuStockMarkets[1:200, 1], h = 100) #' fit2 <- meanf(EuStockMarkets[1:200, 1], h = 100) #' accuracy(fit1) #' accuracy(fit2) #' accuracy(fit1, EuStockMarkets[201:300, 1]) #' accuracy(fit2, EuStockMarkets[201:300, 1]) #' plot(fit1) #' lines(EuStockMarkets[1:300, 1]) #' @export accuracy.default <- function(object, x, test = NULL, d = NULL, D = NULL, f = NULL, ...) { if (!is.null(f)) { warning("Using `f` as the argument for `accuracy()` is deprecated. Please use `object` instead.") object <- f } if (!any(is.element(class(object), c( "ARFIMA", "mforecast", "forecast", "ts", "integer", "numeric", "Arima", "ets", "lm", "bats", "tbats", "nnetar", "stlm", "baggedModel" )))) { stop(paste("No accuracy method found for an object of class",class(object))) } if (is.element("mforecast", class(object))) { return(accuracy.mforecast(object, x, test, d, D)) } trainset <- (is.list(object)) testset <- (!missing(x)) if (testset && !is.null(test)) { trainset <- FALSE } if (!trainset && !testset) { stop("Unable to compute forecast accuracy measures") } # Find d and D if (is.null(D) && is.null(d)) { if (testset) { d <- as.numeric(frequency(x) == 1) D <- as.numeric(frequency(x) > 1) } else if (trainset) { if (!is.null(object$mean)) { d <- as.numeric(frequency(object$mean) == 1) D <- as.numeric(frequency(object$mean) > 1) } else { d <- as.numeric(frequency(object[["x"]]) == 1) D <- as.numeric(frequency(object[["x"]]) > 1) } } else { d <- as.numeric(frequency(object) == 1) D <- as.numeric(frequency(object) > 1) } } if (trainset) { trainout <- trainingaccuracy(object, test, d, D) trainnames <- names(trainout) } else { trainnames <- NULL } if (testset) { testout <- testaccuracy(object, x, test, d, D) testnames <- names(testout) } else { testnames <- NULL } outnames <- unique(c(trainnames, testnames)) out <- matrix(NA, nrow = 2, ncol = length(outnames)) colnames(out) <- outnames rownames(out) <- c("Training set", "Test set") if (trainset) { out[1, names(trainout)] <- trainout } if (testset) { out[2, names(testout)] <- testout } if (!testset) { out <- out[1, , drop = FALSE] } if (!trainset) { out <- out[2, , drop = FALSE] } return(out) } # Compute accuracy for an mforecast object #' @export accuracy.mforecast <- function(object, x, test = NULL, d, D, f = NULL, ...) { if (!is.null(f)) { warning("Using `f` as the argument for `accuracy()` is deprecated. Please use `object` instead.") object <- f } out <- NULL nox <- missing(x) i <- 1 for (fcast in object$forecast) { if (nox) { out1 <- accuracy(fcast, test = test, d = d, D = D) } else { out1 <- accuracy(fcast, x[, i], test, d, D) } rownames(out1) <- paste(fcast$series, rownames(out1)) out <- rbind(out, out1) i <- i + 1 } return(out) } forecast/R/mforecast.R0000644000176200001440000002532714323125536014404 0ustar liggesusers#' @rdname is.forecast #' @export is.mforecast <- function(x) { inherits(x, "mforecast") } mlmsplit <- function(x, index=NULL) { if (is.null(index)) { stop("Must select lm using index=integer(1)") } mfit <- match(c("coefficients", "residuals", "effects", "fitted.values"), names(x), 0L) for (j in mfit) { x[[j]] <- x[[j]][, index] } class(x) <- "lm" y <- attr(x$terms, "response") yName <- make.names(colnames(x$model[[y]])[index]) x$model[[y]] <- x$model[[y]][, index] colnames(x$model)[y] <- yName attr(x$model, "terms") <- terms(reformulate(attr(x$terms, "term.labels"), response = yName), data = x$model) if (!is.null(tsp(x$data[, 1]))) { tspx <- tsp(x$data[, 1]) # Consolidate ts attributes for forecast.lm x$data <- lapply(x$model, function(x) ts(x, start = tspx[1], end = tspx[2], frequency = tspx[3])) class(x$data) <- "data.frame" row.names(x$data) <- 1:max(sapply(x$data, NROW)) } x$terms <- terms(x$model) return(x) } #' Forecast a multiple linear model with possible time series components #' #' \code{forecast.mlm} is used to predict multiple linear models, especially #' those involving trend and seasonality components. #' #' \code{forecast.mlm} is largely a wrapper for #' \code{\link[forecast]{forecast.lm}()} except that it allows forecasts to be #' generated on multiple series. Also, the output is reformatted into a #' \code{mforecast} object. #' #' @param object Object of class "mlm", usually the result of a call to #' \code{\link[stats]{lm}} or \code{\link{tslm}}. #' @param newdata An optional data frame in which to look for variables with #' which to predict. If omitted, it is assumed that the only variables are #' trend and season, and \code{h} forecasts are produced. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param h Number of periods for forecasting. Ignored if \code{newdata} #' present. #' @param ts If \code{TRUE}, the forecasts will be treated as time series #' provided the original data is a time series; the \code{newdata} will be #' interpreted as related to the subsequent time periods. If \code{FALSE}, any #' time series attributes of the original data will be ignored. #' @param ... Other arguments passed to \code{\link[forecast]{forecast.lm}()}. #' @inheritParams forecast.ts #' #' @return An object of class "\code{mforecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.lm}. #' #' An object of class \code{"mforecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a multivariate time series} #' \item{lower}{Lower limits for prediction intervals of each series} #' \item{upper}{Upper limits for prediction intervals of each series} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The historical data for the response variable.} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted #' values.} \item{fitted}{Fitted values} #' @author Mitchell O'Hara-Wild #' @seealso \code{\link{tslm}}, \code{\link{forecast.lm}}, #' \code{\link[stats]{lm}}. #' @examples #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' fit <- tslm(lungDeaths ~ trend + season) #' fcast <- forecast(fit, h=10) #' #' carPower <- as.matrix(mtcars[,c("qsec","hp")]) #' carmpg <- mtcars[,"mpg"] #' fit <- lm(carPower ~ carmpg) #' fcast <- forecast(fit, newdata=data.frame(carmpg=30)) #' #' @export forecast.mlm <- function(object, newdata, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ts=TRUE, ...) { out <- list(model = object, forecast = vector("list", NCOL(object$coefficients))) cl <- match.call() cl[[1]] <- quote(forecast.lm) cl$object <- quote(mlmsplit(object, index = i)) for (i in seq_along(out$forecast)) { out$forecast[[i]] <- eval(cl) out$forecast[[i]]$series <- colnames(object$coefficients)[i] } out$method <- rep("Multiple linear regression model", length(out$forecast)) names(out$forecast) <- names(out$method) <- colnames(object$coefficients) return(structure(out, class = "mforecast")) } #' Forecasting time series #' #' \code{mforecast} is a class of objects for forecasting from multivariate #' time series or multivariate time series models. The function invokes #' particular \emph{methods} which depend on the class of the first argument. #' #' For example, the function \code{\link{forecast.mlm}} makes multivariate #' forecasts based on the results produced by \code{\link{tslm}}. #' #' @aliases mforecast print.mforecast summary.mforecast as.data.frame.mforecast #' #' @param object a multivariate time series or multivariate time series model #' for which forecasts are required #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param robust If TRUE, the function is robust to missing values and outliers #' in \code{object}. This argument is only valid when \code{object} is of class #' \code{mts}. #' @param find.frequency If TRUE, the function determines the appropriate #' period, if the data is of unknown period. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param ... Additional arguments affecting the forecasts produced. #' @inheritParams forecast.ts #' @return An object of class "\code{mforecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the multivariate #' forecasts and prediction intervals. #' #' The generic accessors functions \code{fitted.values} and \code{residuals} #' extract various useful features of the value returned by #' \code{forecast$model}. #' #' An object of class \code{"mforecast"} is a list usually containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals will be x minus the fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso Other functions which return objects of class \code{"mforecast"} #' are \code{\link{forecast.mlm}}, \code{forecast.varest}. #' #' @export forecast.mts <- function(object, h=ifelse(frequency(object) > 1, 2 * frequency(object), 10), level=c(80, 95), fan=FALSE, robust=FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend=FALSE, ...) { out <- list(forecast = vector("list", NCOL(object))) cl <- match.call() cl[[1]] <- quote(forecast.ts) cl$object <- quote(object[, i]) for (i in 1:NCOL(object)) { out$forecast[[i]] <- eval(cl) out$forecast[[i]]$series <- colnames(object)[i] } out$method <- vapply(out$forecast, function(x) x$method, character(1)) names(out$forecast) <- names(out$method) <- colnames(object) return(structure(out, class = "mforecast")) } #' @export print.mforecast <- function(x, ...) { lapply(x$forecast, function(x) { cat(paste0(x$series, "\n")) print(x) cat("\n") }) return(invisible()) } #' Multivariate forecast plot #' #' Plots historical data with multivariate forecasts and prediction intervals. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Multivariate forecast object of class \code{mforecast}. #' @param object Multivariate forecast object of class \code{mforecast}. Used #' for ggplot graphics (S3 method consistency). #' @param main Main title. Default is the forecast method. For autoplot, #' specify a vector of titles for each plot. #' @param xlab X-axis label. For autoplot, specify a vector of labels for each #' plot. #' @param PI If \code{FALSE}, confidence intervals will not be plotted, giving #' only the forecast line. #' @param facets If TRUE, multiple time series will be faceted. If FALSE, each #' series will be assigned a colour. #' @param colour If TRUE, the time series will be assigned a colour aesthetic #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param \dots additional arguments to each individual \code{plot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[forecast]{plot.forecast}}, \code{\link[stats]{plot.ts}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' library(ggplot2) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' fit <- tslm(lungDeaths ~ trend + season) #' fcast <- forecast(fit, h=10) #' plot(fcast) #' autoplot(fcast) #' #' carPower <- as.matrix(mtcars[,c("qsec","hp")]) #' carmpg <- mtcars[,"mpg"] #' fit <- lm(carPower ~ carmpg) #' fcast <- forecast(fit, newdata=data.frame(carmpg=30)) #' plot(fcast, xlab="Year") #' autoplot(fcast, xlab=rep("Year",2)) #' #' @export plot.mforecast <- function(x, main=paste("Forecasts from", unique(x$method)), xlab="time", ...) { oldpar <- par(mfrow = c(length(x$forecast), 1), mar = c(0, 5.1, 0, 2.1), oma = c(6, 0, 5, 0)) on.exit(par(oldpar)) for (fcast in x$forecast) { plot(fcast, main = "", xaxt = "n", ylab = fcast$series, ...) } axis(1) mtext(xlab, outer = TRUE, side = 1, line = 3) title(main = main, outer = TRUE) } #' @export summary.mforecast <- function(object, ...) { class(object) <- c("summary.mforecast", class(object)) object } #' @export print.summary.mforecast <- function(x, ...) { cat(paste("\nForecast method:", unique(x$method))) cat(paste("\n\nModel Information:\n")) print(x$model) cat("\nError measures:\n") print(accuracy(x)) if (is.null(x$forecast)) { cat("\n No forecasts\n") } else { cat("\nForecasts:\n") NextMethod() } } forecast/R/findfrequency.R0000644000176200001440000000376614323125536015266 0ustar liggesusers## A function determining the appropriate period, if the data is of unknown period ## Written by Rob Hyndman #' Find dominant frequency of a time series #' #' \code{findfrequency} returns the period of the dominant frequency of a time #' series. For seasonal data, it will return the seasonal period. For cyclic #' data, it will return the average cycle length. #' #' The dominant frequency is determined from a spectral analysis of the time #' series. First, a linear trend is removed, then the spectral density function #' is estimated from the best fitting autoregressive model (based on the AIC). #' If there is a large (possibly local) maximum in the spectral density #' function at frequency \eqn{f}, then the function will return the period #' \eqn{1/f} (rounded to the nearest integer). If no such dominant frequency #' can be found, the function will return 1. #' #' @param x a numeric vector or time series of class \code{ts} #' @return an integer value #' @author Rob J Hyndman #' @keywords ts #' @examples #' #' findfrequency(USAccDeaths) # Monthly data #' findfrequency(taylor) # Half-hourly data #' findfrequency(lynx) # Annual data #' #' @export findfrequency <- function(x) { n <- length(x) x <- as.ts(x) # Remove trend from data x <- residuals(tslm(x ~ trend)) # Compute spectrum by fitting ar model to largest section of x n.freq <- 500 spec <- spec.ar(c(na.contiguous(x)), plot = FALSE, n.freq = n.freq) if (max(spec$spec) > 10) # Arbitrary threshold chosen by trial and error. { period <- floor(1 / spec$freq[which.max(spec$spec)] + 0.5) if (period == Inf) # Find next local maximum { j <- which(diff(spec$spec) > 0) if (length(j) > 0) { nextmax <- j[1] + which.max(spec$spec[(j[1] + 1):n.freq]) if (nextmax < length(spec$freq)) { period <- floor(1 / spec$freq[nextmax] + 0.5) } else { period <- 1L } } else { period <- 1L } } } else { period <- 1L } return(as.integer(period)) } forecast/R/arfima.R0000644000176200001440000002525414323125536013657 0ustar liggesusers# Remove missing values from end points na.ends <- function(x) { tspx <- tsp(x) # Strip initial and final missing values nonmiss <- (1:length(x))[!is.na(x)] if (length(nonmiss) == 0) { stop("No non-missing data") } j <- nonmiss[1] k <- nonmiss[length(nonmiss)] x <- x[j:k] if (!is.null(tspx)) { x <- ts(x, start = tspx[1] + (j - 1) / tspx[3], frequency = tspx[3]) } return(x) } # Add back missing values at ends # x is original series. y is the series with NAs removed at ends. # returns y with the nas put back at beginning but not end. undo.na.ends <- function(x, y) { n <- length(x) nonmiss <- (1:length(x))[!is.na(x)] j <- nonmiss[1] k <- nonmiss[length(nonmiss)] if (j > 1) { y <- c(rep(NA, j - 1), y) } if (k < n) { y <- c(y, rep(NA, n - k)) } tspx <- tsp(x) if (!is.null(tspx)) { tsp(y) <- tsp(x) } return(y) } ## Undifference unfracdiff <- function(x, y, n, h, d) { bin.c <- (-1) ^ (0:(n + h)) * choose(d, (0:(n + h))) b <- numeric(n) xnew <- LHS <- numeric(h) RHS <- cumsum(y) bs <- cumsum(bin.c[1:h]) b <- bin.c[(1:n) + 1] xnew[1] <- RHS[1] <- y[1] - sum(b * rev(x)) if (h > 1) { for (k in 2:h) { b <- b + bin.c[(1:n) + k] RHS[k] <- RHS[k] - sum(b * rev(x)) LHS[k] <- sum(rev(xnew[1:(k - 1)]) * bs[2:k]) xnew[k] <- RHS[k] - LHS[k] } } tspx <- tsp(x) if (is.null(tspx)) { tspx <- c(1, length(x), 1) } return(ts(xnew, frequency = tspx[3], start = tspx[2] + 1 / tspx[3])) } ## Automatic ARFIMA modelling ## Will return Arima object if d < 0.01 to prevent estimation problems #' Fit a fractionally differenced ARFIMA model #' #' An ARFIMA(p,d,q) model is selected and estimated automatically using the #' Hyndman-Khandakar (2008) algorithm to select p and q and the Haslett and #' Raftery (1989) algorithm to estimate the parameters including d. #' #' This function combines \code{\link[fracdiff]{fracdiff}} and #' \code{\link{auto.arima}} to automatically select and estimate an ARFIMA #' model. The fractional differencing parameter is chosen first assuming an #' ARFIMA(2,d,0) model. Then the data are fractionally differenced using the #' estimated d and an ARMA model is selected for the resulting time series #' using \code{\link{auto.arima}}. Finally, the full ARFIMA(p,d,q) model is #' re-estimated using \code{\link[fracdiff]{fracdiff}}. If \code{estim=="mle"}, #' the ARMA coefficients are refined using \code{\link[stats]{arima}}. #' #' @param y a univariate time series (numeric vector). #' @param drange Allowable values of d to be considered. Default of #' \code{c(0,0.5)} ensures a stationary model is returned. #' @param estim If \code{estim=="ls"}, then the ARMA parameters are calculated #' using the Haslett-Raftery algorithm. If \code{estim=="mle"}, then the ARMA #' parameters are calculated using full MLE via the \code{\link[stats]{arima}} #' function. #' @param model Output from a previous call to \code{arfima}. If model is #' passed, this same model is fitted to y without re-estimating any parameters. #' @param x Deprecated. Included for backwards compatibility. #' @param \dots Other arguments passed to \code{\link{auto.arima}} when #' selecting p and q. #' @inheritParams forecast.ts #' #' @return A list object of S3 class \code{"fracdiff"}, which is described in #' the \code{\link[fracdiff]{fracdiff}} documentation. A few additional objects #' are added to the list including \code{x} (the original time series), and the #' \code{residuals} and \code{fitted} values. #' #' @export #' #' @author Rob J Hyndman and Farah Yasmeen #' @seealso \code{\link[fracdiff]{fracdiff}}, \code{\link{auto.arima}}, #' \code{\link{forecast.fracdiff}}. #' @references J. Haslett and A. E. Raftery (1989) Space-time Modelling with #' Long-memory Dependence: Assessing Ireland's Wind Power Resource (with #' discussion); \emph{Applied Statistics} \bold{38}, 1-50. #' #' Hyndman, R.J. and Khandakar, Y. (2008) "Automatic time series forecasting: #' The forecast package for R", \emph{Journal of Statistical Software}, #' \bold{26}(3). #' @keywords ts #' @examples #' #' library(fracdiff) #' x <- fracdiff.sim( 100, ma=-.4, d=.3)$series #' fit <- arfima(x) #' tsdisplay(residuals(fit)) #' arfima <- function(y, drange = c(0, 0.5), estim = c("mle", "ls"), model = NULL, lambda = NULL, biasadj = FALSE, x=y, ...) { estim <- match.arg(estim) # require(fracdiff) seriesname <- deparse(substitute(y)) orig.x <- x if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Re-fit arfima model if (!is.null(model)) { fit <- model fit$residuals <- fit$fitted <- fit$lambda <- NULL if (!is.null(lambda)) { fit$lambda <- lambda # Required for residuals.fracdiff() } } # Estimate model else { # Strip initial and final missing values xx <- na.ends(x) # Remove mean meanx <- mean(xx) xx <- xx - meanx # Choose differencing parameter with AR(2) proxy to handle correlations suppressWarnings(fit <- fracdiff::fracdiff(xx, nar = 2, drange = drange)) # Choose p and q d <- fit$d y <- fracdiff::diffseries(xx, d = d) fit <- auto.arima(y, max.P = 0, max.Q = 0, stationary = TRUE, ...) # Refit model using fracdiff suppressWarnings(fit <- fracdiff::fracdiff(xx, nar = fit$arma[1], nma = fit$arma[2], drange = drange)) # Refine parameters with MLE if (estim == "mle") { y <- fracdiff::diffseries(xx, d = fit$d) p <- length(fit$ar) q <- length(fit$ma) fit2 <- try(Arima(y, order = c(p, 0, q), include.mean = FALSE)) if (is.element("try-error", class(fit2))) { fit2 <- try(Arima(y, order = c(p, 0, q), include.mean = FALSE, method = "ML")) } if (!is.element("try-error", class(fit2))) { if (p > 0) { fit$ar <- fit2$coef[1:p] } if (q > 0) { fit$ma <- -fit2$coef[p + (1:q)] } fit$residuals <- fit2$residuals } else { warning("MLE estimation failed. Returning LS estimates") } } } # Add things to model that will be needed by forecast.fracdiff fit$x <- orig.x fit$residuals <- undo.na.ends(x, residuals(fit)) fit$fitted <- x - fit$residuals if (!is.null(lambda)) { fit$fitted <- InvBoxCox(fit$fitted, lambda, biasadj, var(fit$residuals)) attr(lambda, "biasadj") <- biasadj } fit$lambda <- lambda fit$call <- match.call() fit$series <- seriesname fit <- structure(fit, class = c("ARFIMA","fracdiff")) # fit$call$data <- data.frame(x=x) #Consider replacing fit$call with match.call for consistency and tidyness return(fit) } # Forecast the output of fracdiff() or arfima() #' @rdname forecast.Arima #' @export forecast.fracdiff <- function(object, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ...) { # Extract data x <- object$x <- getResponse(object) if (is.null(x)) { stop("Unable to find original time series") } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } xx <- na.ends(x) n <- length(xx) meanx <- mean(xx) xx <- xx - meanx # Construct ARMA part of model and forecast with it y <- fracdiff::diffseries(xx, d = object$d) fit <- Arima(y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma)) fcast.y <- forecast(fit, h = h, level = level) # Undifference fcast.x <- unfracdiff(xx, fcast.y$mean, n, h, object$d) # Binomial coefficient for expansion of d bin.c <- (-1) ^ (0:(n + h)) * choose(object$d, (0:(n + h))) # Cumulative forecasts of y and forecast of y # b <- numeric(n) # fcast.x <- LHS <- numeric(h) # RHS <- cumsum(fcast.y$mean) # bs <- cumsum(bin.c[1:h]) # b <- bin.c[(1:n)+1] # fcast.x[1] <- RHS[1] <- fcast.y$mean[1] - sum(b*rev(xx)) # if(h>1) # { # for (k in 2:h) # { # b <- b + bin.c[(1:n)+k] # RHS[k] <- RHS[k] - sum(b*rev(xx)) # LHS[k] <- sum(rev(fcast.x[1:(k-1)]) * bs[2:k]) # fcast.x[k] <- RHS[k] - LHS[k] # } # } # Extract stuff from ARMA model p <- fit$arma[1] q <- fit$arma[2] phi <- theta <- numeric(h) if (p > 0) { phi[1:p] <- fit$coef[1:p] } if (q > 0) { theta[1:q] <- fit$coef[p + (1:q)] } # Calculate psi weights new.phi <- psi <- numeric(h) psi[1] <- new.phi[1] <- 1 if (h > 1) { new.phi[2:h] <- -bin.c[2:h] for (i in 2:h) { if (p > 0) { new.phi[i] <- sum(phi[1:(i - 1)] * bin.c[(i - 1):1]) - bin.c[i] } psi[i] <- sum(new.phi[2:i] * rev(psi[1:(i - 1)])) + theta[i - 1] } } # Compute forecast variances fse <- sqrt(cumsum(psi ^ 2) * fit$sigma2) # Compute prediction intervals if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) upper <- lower <- matrix(NA, ncol = nint, nrow = h) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- fcast.x - qq * fse upper[, i] <- fcast.x + qq * fse } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") res <- undo.na.ends(x, residuals(fit)) fits <- x - res data.tsp <- tsp(x) if (is.null(data.tsp)) { data.tsp <- c(1, length(x), 1) } mean.fcast <- ts(fcast.x + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) lower <- ts(lower + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) upper <- ts(upper + meanx, frequency = data.tsp[3], start = data.tsp[2] + 1 / data.tsp[3]) method <- paste("ARFIMA(", p, ",", round(object$d, 2), ",", q, ")", sep = "") if (!is.null(lambda)) { x <- InvBoxCox(x, lambda) fits <- InvBoxCox(fits, lambda) mean.fcast <- InvBoxCox(mean.fcast, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } seriesname <- if (!is.null(object$series)) { object$series } else { deparse(object$call$x) } return(structure(list( x = x, mean = mean.fcast, upper = upper, lower = lower, level = level, method = method, model = object, series = seriesname, residuals = res, fitted = fits ), class = "forecast")) } # Fitted values from arfima() #' @rdname fitted.Arima #' @export fitted.ARFIMA <- function(object, h = 1, ...) { if (!is.null(object$fitted)) { # Object produced by arfima() if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "arfima", ...)) } } else { if (h != 1) { warning("h-step fits are not supported for models produced by fracdiff(), returning one-step fits (h=1)") } x <- getResponse(object) return(x - residuals(object)) } } forecast/R/forecast.R0000644000176200001440000005646014633662406014237 0ustar liggesusers#' Forecasting time series #' #' \code{forecast} is a generic function for forecasting from time series or #' time series models. The function invokes particular \emph{methods} which #' depend on the class of the first argument. #' #' For example, the function \code{\link{forecast.Arima}} makes forecasts based #' on the results produced by \code{\link[stats]{arima}}. #' #' If \code{model=NULL},the function \code{\link{forecast.ts}} makes forecasts #' using \code{\link{ets}} models (if the data are non-seasonal or the seasonal #' period is 12 or less) or \code{\link{stlf}} (if the seasonal period is 13 or #' more). #' #' If \code{model} is not \code{NULL}, \code{forecast.ts} will apply the #' \code{model} to the \code{object} time series, and then generate forecasts #' accordingly. #' #' @aliases print.forecast summary.forecast as.data.frame.forecast as.ts.forecast #' #' @param object a time series or time series model for which forecasts are #' required #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is #' suitable for fan plots. #' @param robust If TRUE, the function is robust to missing values and outliers #' in \code{object}. This argument is only valid when \code{object} is of class #' \code{ts}. #' @param lambda Box-Cox transformation parameter. If \code{lambda="auto"}, #' then a transformation is automatically selected using \code{BoxCox.lambda}. #' The transformation is ignored if NULL. Otherwise, #' data transformed before model is estimated. #' @param find.frequency If TRUE, the function determines the appropriate #' period, if the data is of unknown period. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param model An object describing a time series model; e.g., one of of class #' \code{ets}, \code{Arima}, \code{bats}, \code{tbats}, or \code{nnetar}. #' @param ... Additional arguments affecting the forecasts produced. If #' \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or #' \code{\link{stlf}} depending on the frequency of the time series. If #' \code{model} is not \code{NULL}, the arguments are passed to the relevant #' modelling function. #' @inheritParams BoxCox #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessors functions \code{fitted.values} and \code{residuals} #' extract various useful features of the value returned by #' \code{forecast$model}. #' #' An object of class \code{"forecast"} is a list usually containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals will be x minus the fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso Other functions which return objects of class \code{"forecast"} are #' \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}, #' \code{\link{forecast.HoltWinters}}, \code{\link{forecast.StructTS}}, #' \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{splinef}}, #' \code{\link{thetaf}}, \code{\link{croston}}, \code{\link{ses}}, #' \code{\link{holt}}, \code{\link{hw}}. #' @keywords ts #' @examples #' #' WWWusage %>% forecast %>% plot #' fit <- ets(window(WWWusage, end=60)) #' fc <- forecast(WWWusage, model=fit) #' @export forecast.ts <- function(object, h=ifelse(frequency(object) > 1, 2 * frequency(object), 10), level=c(80, 95), fan=FALSE, robust=FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend=FALSE, model=NULL, ...) { n <- length(object) if (find.frequency) { object <- ts(object, frequency = findfrequency(object)) obj.freq <- frequency(object) } else { obj.freq <- frequency(object) } if (robust) { object <- tsclean(object, replace.missing = TRUE, lambda = lambda) } if (!is.null(model)) { if (inherits(model, "forecast")) { model <- model$model } if (inherits(model, "ets")) { fit <- ets(object, model = model, ...) } else if (inherits(model, "Arima")) { fit <- Arima(object, model = model, ...) } else if (inherits(model, "tbats")) { fit <- tbats(object, model = model, ...) } else if (inherits(model, "bats")) { fit <- bats(object, model = model, ...) } else if (inherits(model, "nnetar")) { fit <- nnetar(object, model = model, ...) } else { stop("Unknown model class") } return(forecast(fit, h = h, level = level, fan = fan)) } if (n > 3) { if (obj.freq < 13) { out <- forecast( ets(object, lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ...), h = h, level = level, fan = fan ) } else if (n > 2 * obj.freq) { out <- stlf( object, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } else { out <- forecast( ets(object, model = "ZZN", lambda = lambda, biasadj = biasadj, allow.multiplicative.trend = allow.multiplicative.trend, ...), h = h, level = level, fan = fan ) } } else { out <- meanf(object, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, ...) } out$series <- deparse(substitute(object)) return(out) } #' @rdname forecast.ts #' @method forecast default #' @export forecast.default <- function(object, ...) forecast.ts(object, ...) #' @rdname forecast.ts #' @export print.forecast <- function(x, ...) { print(as.data.frame(x)) } #' @export summary.forecast <- function(object, ...) { class(object) <- c("summary.forecast", class(object)) object } #' @export print.summary.forecast <- function(x, ...) { cat(paste("\nForecast method:", x$method)) # cat(paste("\n\nCall:\n",deparse(x$call))) cat(paste("\n\nModel Information:\n")) print(x$model) cat("\nError measures:\n") print(accuracy(x)) if (is.null(x$mean)) { cat("\n No forecasts\n") } else { cat("\nForecasts:\n") NextMethod() } } plotlmforecast <- function(object, PI, shaded, shadecols, col, fcol, pi.col, pi.lty, xlim=NULL, ylim, main, ylab, xlab, ...) { xvar <- attributes(terms(object$model))$term.labels if (length(xvar) > 1) { stop("Forecast plot for regression models only available for a single predictor") } else if (ncol(object$newdata) == 1) { # Make sure column has correct name colnames(object$newdata) <- xvar } if (is.null(xlim)) { xlim <- range(object$newdata[, xvar], model.frame(object$model)[, xvar]) } if (is.null(ylim)) { ylim <- range(object$upper, object$lower, fitted(object$model) + residuals(object$model)) } plot( formula(object$model), data = model.frame(object$model), xlim = xlim, ylim = ylim, xlab = xlab, ylab = ylab, main = main, col = col, ... ) abline(object$model) nf <- length(object$mean) if (PI) { nint <- length(object$level) idx <- rev(order(object$level)) if (is.null(shadecols)) { # require(colorspace) if (min(object$level) < 50) { # Using very small confidence levels. shadecols <- rev(colorspace::sequential_hcl(100)[object$level]) } else { # This should happen almost all the time. Colors mapped to levels. shadecols <- rev(colorspace::sequential_hcl(52)[object$level - 49]) } } if (length(shadecols) == 1) { if (shadecols == "oldstyle") { # Default behaviour up to v3.25. shadecols <- heat.colors(nint + 2)[switch(1 + (nint > 1), 2, nint:1) + 1] } } for (i in 1:nf) { for (j in 1:nint) { if (shaded) { lines(rep(object$newdata[i, xvar], 2), c(object$lower[i, idx[j]], object$upper[i, idx[j]]), col = shadecols[j], lwd = 6) } else { lines(rep(object$newdata[i, xvar], 2), c(object$lower[i, idx[j]], object$upper[i, idx[j]]), col = pi.col, lty = pi.lty) } } } } points(object$newdata[, xvar], object$mean, pch = 19, col = fcol) } #' Forecast plot #' #' Plots historical data with forecasts and prediction intervals. #' #' \code{autoplot} will produce a ggplot object. #' #' plot.splineforecast autoplot.splineforecast #' @param x Forecast object produced by \code{\link{forecast}}. #' @param object Forecast object produced by \code{\link{forecast}}. Used for #' ggplot graphics (S3 method consistency). #' @param include number of values from time series to include in plot. Default #' is all values. #' @param PI Logical flag indicating whether to plot prediction intervals. #' @param showgap If \code{showgap=FALSE}, the gap between the historical #' observations and the forecasts is removed. #' @param shaded Logical flag indicating whether prediction intervals should be #' shaded (\code{TRUE}) or lines (\code{FALSE}) #' @param shadebars Logical flag indicating if prediction intervals should be #' plotted as shaded bars (if \code{TRUE}) or a shaded polygon (if #' \code{FALSE}). Ignored if \code{shaded=FALSE}. Bars are plotted by default #' if there are fewer than five forecast horizons. #' @param shadecols Colors for shaded prediction intervals. To get default #' colors used prior to v3.26, set \code{shadecols="oldstyle"}. #' @param col Colour for the data line. #' @param fcol Colour for the forecast line. #' @param flty Line type for the forecast line. #' @param flwd Line width for the forecast line. #' @param pi.col If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction #' intervals are plotted in this colour. #' @param pi.lty If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction #' intervals are plotted using this line type. #' @param ylim Limits on y-axis. #' @param main Main title. #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param fitcol Line colour for fitted values. #' @param type 1-character string giving the type of plot desired. As for #' \code{\link[graphics]{plot.default}}. #' @param pch Plotting character (if \code{type=="p"} or \code{type=="o"}). #' @param ... Other plotting parameters to affect the plot. #' @return None. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.ts}} #' @references Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles #' and practice}, 2nd edition, OTexts: Melbourne, Australia. #' \url{https://otexts.com/fpp2/} #' @keywords ts #' @examples #' library(ggplot2) #' #' wine.fit <- hw(wineind,h=48) #' plot(wine.fit) #' autoplot(wine.fit) #' #' fit <- tslm(wineind ~ fourier(wineind,4)) #' fcast <- forecast(fit, newdata=data.frame(fourier(wineind,4,20))) #' autoplot(fcast) #' #' @export plot.forecast <- function(x, include, PI=TRUE, showgap = TRUE, shaded=TRUE, shadebars=(length(x$mean) < 5), shadecols=NULL, col=1, fcol=4, pi.col=1, pi.lty=2, ylim=NULL, main=NULL, xlab="", ylab="", type="l", flty = 1, flwd = 2, ...) { if (is.element("x", names(x))) { # Assume stored as x xx <- x$x } else { xx <- NULL } if (is.null(x$lower) || is.null(x$upper) || is.null(x$level)) { PI <- FALSE } else if (!is.finite(max(x$upper))) { PI <- FALSE } if (!shaded) { shadebars <- FALSE } if (is.null(main)) { main <- paste("Forecasts from ", x$method, sep = "") } if (PI) { x$upper <- as.matrix(x$upper) x$lower <- as.matrix(x$lower) } if (is.element("lm", class(x$model)) && !is.element("ts", class(x$mean))) # Non time series linear model { plotlmforecast( x, PI = PI, shaded = shaded, shadecols = shadecols, col = col, fcol = fcol, pi.col = pi.col, pi.lty = pi.lty, ylim = ylim, main = main, xlab = xlab, ylab = ylab, ... ) if (PI) { return(invisible(list(mean = x$mean, lower = as.matrix(x$lower), upper = as.matrix(x$upper)))) } else { return(invisible(list(mean = x$mean))) } } # Otherwise assume x is from a time series forecast n <- length(xx) if (n == 0) { include <- 0 } else if (missing(include)) { include <- length(xx) } # Check if all historical values are missing if (n > 0) { if (sum(is.na(xx)) == length(xx)) { n <- 0 } } if (n > 0) { xx <- as.ts(xx) freq <- frequency(xx) strt <- start(xx) nx <- max(which(!is.na(xx))) xxx <- xx[1:nx] include <- min(include, nx) if (!showgap) { lastObs <- x$x[length(x$x)] lastTime <- time(x$x)[length(x$x)] x$mean <- ts(c(lastObs, x$mean), start = lastTime, frequency = freq) x$upper <- ts(rbind(lastObs, x$upper), start = lastTime, frequency = freq) x$lower <- ts(rbind(lastObs, x$lower), start = lastTime, frequency = freq) } } else { freq <- frequency(x$mean) strt <- start(x$mean) nx <- include <- 1 xx <- xxx <- ts(NA, frequency = freq, end = tsp(x$mean)[1] - 1 / freq) if (!showgap) { warning("Removing the gap requires historical data, provide this via model$x. Defaulting showgap to TRUE.") } } pred.mean <- x$mean if (is.null(ylim)) { ylim <- range(c(xx[(n - include + 1):n], pred.mean), na.rm = TRUE) if (PI) { ylim <- range(ylim, x$lower, x$upper, na.rm = TRUE) } } npred <- length(pred.mean) tsx <- is.ts(pred.mean) if (!tsx) { pred.mean <- ts(pred.mean, start = nx + 1, frequency = 1) type <- "p" } plot( ts(c(xxx[(nx - include + 1):nx], rep(NA, npred)), end = tsp(xx)[2] + (nx - n) / freq + npred / freq, frequency = freq), xlab = xlab, ylim = ylim, ylab = ylab, main = main, col = col, type = type, ... ) if (PI) { if (is.ts(x$upper)) { xxx <- time(x$upper) } else { xxx <- tsp(pred.mean)[1] - 1 / freq + (1:npred) / freq } idx <- rev(order(x$level)) nint <- length(x$level) if (is.null(shadecols)) { # require(colorspace) if (min(x$level) < 50) { # Using very small confidence levels. shadecols <- rev(colorspace::sequential_hcl(100)[x$level]) } else { # This should happen almost all the time. Colors mapped to levels. shadecols <- rev(colorspace::sequential_hcl(52)[x$level - 49]) } } if (length(shadecols) == 1) { if (shadecols == "oldstyle") { # Default behaviour up to v3.25. shadecols <- heat.colors(nint + 2)[switch(1 + (nint > 1), 2, nint:1) + 1] } } for (i in 1:nint) { if (shadebars) { for (j in 1:npred) { polygon( xxx[j] + c(-0.5, 0.5, 0.5, -0.5) / freq, c(rep(x$lower[j, idx[i]], 2), rep(x$upper[j, idx[i]], 2)), col = shadecols[i], border = FALSE ) } } else if (shaded) { polygon( c(xxx, rev(xxx)), c(x$lower[, idx[i]], rev(x$upper[, idx[i]])), col = shadecols[i], border = FALSE ) } else if (npred == 1) { lines(c(xxx) + c(-0.5, 0.5) / freq, rep(x$lower[, idx[i]], 2), col = pi.col, lty = pi.lty) lines(c(xxx) + c(-0.5, 0.5) / freq, rep(x$upper[, idx[i]], 2), col = pi.col, lty = pi.lty) } else { lines(x$lower[, idx[i]], col = pi.col, lty = pi.lty) lines(x$upper[, idx[i]], col = pi.col, lty = pi.lty) } } } if (npred > 1 && !shadebars && tsx) { lines(pred.mean, lty = flty, lwd = flwd, col = fcol) } else { points(pred.mean, col = fcol, pch = 19) } if (PI) { invisible(list(mean = pred.mean, lower = x$lower, upper = x$upper)) } else { invisible(list(mean = pred.mean)) } } #' @export predict.default <- function(object, ...) { forecast(object, ...) } hfitted <- function(object, h=1, FUN=NULL, ...) { UseMethod("hfitted") } #' @export hfitted.default <- function(object, h=1, FUN=NULL, ...) { if (h == 1) { return(fitted(object)) } # Attempt to get model function if (is.null(FUN)) { FUN <- class(object) for (i in FUN) { if (exists(i)) { if (typeof(eval(parse(text = i)[[1]])) == "closure") { FUN <- i i <- "Y" break } } } if (i != "Y") { stop("Could not find appropriate function to refit, specify FUN=function") } } x <- getResponse(object) tspx <- tsp(x) fits <- fitted(object) * NA n <- length(fits) refitarg <- list(x = NULL, model = object) names(refitarg)[1] <- names(formals(FUN))[1] fcarg <- list(h = h, biasadj=TRUE, lambda=object$lambda) if (FUN == "ets") { refitarg$use.initial.values <- TRUE } for (i in 1:(n - h)) { refitarg[[1]] <- ts(x[1:i], start = tspx[1], frequency = tspx[3]) if(!is.null(object$xreg) & any(colnames(object$xreg)!="drift")){ if(any(colnames(object$xreg)=="drift")){ idx <- which(colnames(object$xreg)=="drift") refitarg$xreg <- ts(object$xreg[1:i, -idx], start = tspx[1], frequency = tspx[3]) fcarg$xreg <- ts(object$xreg[(i + 1):(i + h), -idx], start = tspx[1] + i / tspx[3], frequency = tspx[3]) }else{ refitarg$xreg <- ts(object$xreg[1:i, ], start = tspx[1], frequency = tspx[3]) fcarg$xreg <- ts(object$xreg[(i + 1):(i + h), ], start = tspx[1] + i / tspx[3], frequency = tspx[3]) } } fcarg$object <- try(suppressWarnings(do.call(FUN, refitarg)), silent = TRUE) if (!is.element("try-error", class(fcarg$object))) { # Keep original variance estimate (for consistent bias adjustment) if(!is.null(object$sigma2)) fcarg$object$sigma2 <- object$sigma2 fits[i + h] <- suppressWarnings(do.call("forecast", fcarg)$mean[h]) } } return(fits) } # The following function is for when users don't realise they already have the forecasts. # e.g., with the dshw(), meanf() or rwf() functions. #' @export forecast.forecast <- function(object, ...) { input_names <- as.list(substitute(list(...))) # Read level argument if (is.element("level", names(input_names))) { level <- list(...)[["level"]] if (!identical(level, object$level)) { stop("Please set the level argument when the forecasts are first computed") } } # Read h argument if (is.element("h", names(input_names))) { h <- list(...)[["h"]] if (h > length(object$mean)) { stop("Please select a longer horizon when the forecasts are first computed") } tspf <- tsp(object$mean) object$mean <- ts(object$mean[1:h], start = tspf[1], frequency = tspf[3]) if (!is.null(object$upper)) { object$upper <- ts(object$upper[1:h, , drop = FALSE], start = tspf[1], frequency = tspf[3]) object$lower <- ts(object$lower[1:h, , drop = FALSE], start = tspf[1], frequency = tspf[3]) } } return(object) } #' @export subset.forecast <- function(x, ...) { tspx <- tsp(x$mean) x$mean <- subset(x$mean, ...) x$lower <- subset(ts(x$lower, start = tspx[1], frequency = tspx[3]), ...) x$upper <- subset(ts(x$upper, start = tspx[1], frequency = tspx[3]), ...) return(x) } #' Is an object a particular forecast type? #' #' Returns true if the forecast object is of a particular type #' #' @param x object to be tested #' @export is.forecast <- function(x) { inherits(x, "forecast") } #' @export as.ts.forecast <- function(x, ...) { df <- ts(as.matrix(as.data.frame.forecast(x))) tsp(df) <- tsp(x$mean) return(df) } #' @export as.data.frame.mforecast <- function(x, ...) { tmp <- lapply(x$forecast, as.data.frame) series <- names(tmp) times <- rownames(tmp[[1]]) h <- NROW(tmp[[1]]) output <- cbind(Time = times, Series = rep(series[1], h), tmp[[1]]) if (length(tmp) > 1) { for (i in 2:length(tmp)) output <- rbind( output, cbind(Time = times, Series = rep(series[i], h), tmp[[i]]) ) } rownames(output) <- NULL return(output) } #' @export as.data.frame.forecast <- function(x, ...) { nconf <- length(x$level) out <- matrix(x$mean, ncol = 1) ists <- is.ts(x$mean) fr.x <- frequency(x$mean) if (ists) { out <- ts(out) attributes(out)$tsp <- attributes(x$mean)$tsp } names <- c("Point Forecast") if (!is.null(x$lower) && !is.null(x$upper) && !is.null(x$level)) { x$upper <- as.matrix(x$upper) x$lower <- as.matrix(x$lower) for (i in 1:nconf) { out <- cbind(out, x$lower[, i, drop = FALSE], x$upper[, i, drop = FALSE]) names <- c(names, paste("Lo", x$level[i]), paste("Hi", x$level[i])) } } colnames(out) <- names tx <- time(x$mean) if (max(abs(tx - round(tx))) < 1e-11) { nd <- 0L } else { nd <- max(round(log10(fr.x) + 1), 2L) } if(nd == 0L) rownames(out) <- round(tx) else rownames(out) <- format(tx, nsmall = nd, digits = nd) # Rest of function borrowed from print.ts(), but with header() omitted if (!ists) { return(as.data.frame(out)) } x <- as.ts(out) calendar <- any(fr.x == c(4, 12)) && length(start(x)) == 2L Tsp <- tsp(x) if (is.null(Tsp)) { warning("series is corrupt, with no 'tsp' attribute") print(unclass(x)) return(invisible(x)) } nn <- 1 + round((Tsp[2L] - Tsp[1L]) * Tsp[3L]) if (NROW(x) != nn) { warning(gettextf("series is corrupt: length %d with 'tsp' implying %d", NROW(x), nn), domain = NA, call. = FALSE) calendar <- FALSE } if (NCOL(x) == 1) { if (calendar) { if (fr.x > 1) { dn2 <- if (fr.x == 12) { month.abb } else if (fr.x == 4) { c("Qtr1", "Qtr2", "Qtr3", "Qtr4") } else { paste("p", 1L:fr.x, sep = "") } if (NROW(x) <= fr.x && start(x)[1L] == end(x)[1L]) { dn1 <- start(x)[1L] dn2 <- dn2[1 + (start(x)[2L] - 2 + seq_along(x)) %% fr.x] x <- matrix( format(x, ...), nrow = 1L, byrow = TRUE, dimnames = list(dn1, dn2) ) } else { start.pad <- start(x)[2L] - 1 end.pad <- fr.x - end(x)[2L] dn1 <- start(x)[1L]:end(x)[1L] x <- matrix( c(rep.int("", start.pad), format(x, ...), rep.int("", end.pad)), ncol = fr.x, byrow = TRUE, dimnames = list(dn1, dn2) ) } } else { attributes(x) <- NULL names(x) <- tx } } else { attr(x, "class") <- attr(x, "tsp") <- attr(x, "na.action") <- NULL } } else { if (calendar && fr.x > 1) { tm <- time(x) t2 <- cycle(x) p1 <- format(floor(tm + 1e-8)) rownames(x) <- if (fr.x == 12) { paste(month.abb[t2], p1, sep = " ") } else { paste( p1, if (fr.x == 4) { c("Q1", "Q2", "Q3", "Q4")[t2] } else { format(t2) }, sep = " " ) } } else { rownames(x) <- format(time(x), nsmall = nd) } attr(x, "class") <- attr(x, "tsp") <- attr(x, "na.action") <- NULL } return(as.data.frame(x)) } forecast/R/etsforecast.R0000644000176200001440000002635414272665773014763 0ustar liggesusers#' Forecasting using ETS models #' #' Returns forecasts and other information for univariate ETS models. #' #' #' @param object An object of class "\code{ets}". Usually the result of a call #' to \code{\link{ets}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param simulate If TRUE, prediction intervals are produced by simulation rather #' than using analytic formulae. Errors are assumed to be normally distributed. #' @param bootstrap If TRUE, then prediction intervals are produced by simulation using #' resampled errors (rather than normally distributed errors). #' @param npaths Number of sample paths used in computing simulated prediction #' intervals. #' @param PI If TRUE, prediction intervals are produced, otherwise only point #' forecasts are calculated. If \code{PI} is FALSE, then \code{level}, #' \code{fan}, \code{simulate}, \code{bootstrap} and \code{npaths} are all #' ignored. #' @param ... Other arguments. #' @inheritParams forecast.ts #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.ets}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' For models with additive errors, the residuals are x - fitted values. For #' models with multiplicative errors, the residuals are equal to x /(fitted #' values) - 1.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{ets}}, \code{\link{ses}}, \code{\link{holt}}, #' \code{\link{hw}}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(forecast(fit,h=48)) #' #' @export forecast.ets #' @export forecast.ets <- function(object, h=ifelse(object$m > 1, 2 * object$m, 10), level=c(80, 95), fan=FALSE, simulate=FALSE, bootstrap=FALSE, npaths=5000, PI=TRUE, lambda=object$lambda, biasadj=NULL, ...) { # Check inputs # if(h>2000 | h<=0) if (h <= 0) { stop("Forecast horizon out of bounds") } if (is.null(lambda)) { biasadj <- FALSE } else { if (is.null(biasadj)) { biasadj <- attr(lambda, "biasadj") } if (!is.logical(biasadj)) { warning("biasadj information not found, defaulting to FALSE.") biasadj <- FALSE } } if (!PI && !biasadj) { simulate <- bootstrap <- fan <- FALSE if (!biasadj) { npaths <- 2 } # Just to avoid errors level <- 90 } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Order levels level <- sort(level) n <- length(object$x) damped <- as.logical(object$components[4]) if (bootstrap) { simulate <- TRUE } if (simulate) { f <- pegelsfcast.C(h, object, level = level, bootstrap = bootstrap, npaths = npaths) } else if (object$components[1] == "A" && is.element(object$components[2], c("A", "N")) && is.element(object$components[3], c("N", "A"))) { f <- class1(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else if (object$components[1] == "M" && is.element(object$components[2], c("A", "N")) && is.element(object$components[3], c("N", "A"))) { f <- class2(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else if (object$components[1] == "M" && object$components[3] == "M" && object$components[2] != "M") { f <- class3(h, object$states[n + 1, ], object$components[2], object$components[3], damped, object$m, object$sigma2, object$par) } else { f <- pegelsfcast.C(h, object, level = level, bootstrap = bootstrap, npaths = npaths) } tsp.x <- tsp(object$x) if (!is.null(tsp.x)) { start.f <- tsp(object$x)[2] + 1 / object$m } else { start.f <- length(object$x) + 1 } out <- list( model = object, mean = future_msts(object$x, f$mu), level = level, x = object$x ) if (PI || biasadj) { if (!is.null(f$var)) { out$lower <- out$upper <- ts(matrix(NA, ncol = length(level), nrow = h)) colnames(out$lower) <- colnames(out$upper) <- paste(level, "%", sep = "") for (i in 1:length(level)) { marg.error <- sqrt(f$var) * abs(qnorm((100 - level[i]) / 200)) out$lower[, i] <- out$mean - marg.error out$upper[, i] <- out$mean + marg.error } out$lower <- copy_msts(out$mean, out$lower) out$upper <- copy_msts(out$mean, out$upper) } else if (!is.null(f$lower)) { out$lower <- copy_msts(out$mean, f$lower) out$upper <- copy_msts(out$mean, f$upper) } else if (PI) { warning("No prediction intervals for this model") } else if (any(biasadj)) { warning("No bias adjustment possible") } } out$fitted <- copy_msts(object$x, fitted(object)) out$method <- object$method if (!is.null(object$series)) { out$series <- object$series } else { out$series <- object$call$y } out$residuals <- copy_msts(object$x, residuals(object)) if (!is.null(lambda)) { # out$x <- InvBoxCox(object$x,lambda) # out$fitted <- InvBoxCox(out$fitted,lambda) out$mean <- InvBoxCox(out$mean, lambda, biasadj, out) if (PI) # PI = TRUE { out$lower <- InvBoxCox(out$lower, lambda) out$upper <- InvBoxCox(out$upper, lambda) } } if (!PI) { out$lower <- out$upper <- out$level <- NULL } return(structure(out, class = "forecast")) } pegelsfcast.C <- function(h, obj, npaths, level, bootstrap) { y.paths <- matrix(NA, nrow = npaths, ncol = h) obj$lambda <- NULL # No need to transform these here as we do it later. for (i in 1:npaths) y.paths[i, ] <- simulate.ets(obj, h, future = TRUE, bootstrap = bootstrap) y.f <- .C( "etsforecast", as.double(obj$states[length(obj$x) + 1, ]), as.integer(obj$m), as.integer(switch(obj$components[2], "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(obj$components[3], "N" = 0, "A" = 1, "M" = 2)), as.double(ifelse(obj$components[4] == "FALSE", 1, obj$par["phi"])), as.integer(h), as.double(numeric(h)), PACKAGE = "forecast" )[[7]] if (abs(y.f[1] + 99999) < 1e-7) { stop("Problem with multiplicative damped trend") } lower <- apply(y.paths, 2, quantile, 0.5 - level / 200, type = 8, na.rm = TRUE) upper <- apply(y.paths, 2, quantile, 0.5 + level / 200, type = 8, na.rm = TRUE) if (length(level) > 1) { lower <- t(lower) upper <- t(upper) } return(list(mu = y.f, lower = lower, upper = upper)) } class1 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { p <- length(last.state) H <- matrix(c(1, rep(0, p - 1)), nrow = 1) if (seasontype == "A") { H[1, p] <- 1 } if (trendtype == "A") { if (damped) { H[1, 2] <- par["phi"] } else { H[1, 2] <- 1 } } F <- matrix(0, p, p) F[1, 1] <- 1 if (trendtype == "A") { if (damped) { F[1, 2] <- F[2, 2] <- par["phi"] } else { F[1, 2] <- F[2, 2] <- 1 } } if (seasontype == "A") { F[p - m + 1, p] <- 1 F[(p - m + 2):p, (p - m + 1):(p - 1)] <- diag(m - 1) } G <- matrix(0, nrow = p, ncol = 1) G[1, 1] <- par["alpha"] if (trendtype == "A") { G[2, 1] <- par["beta"] } if (seasontype == "A") { G[3, 1] <- par["gamma"] } mu <- numeric(h) Fj <- diag(p) cj <- numeric(h - 1) if (h > 1) { for (i in 1:(h - 1)) { mu[i] <- H %*% Fj %*% last.state cj[i] <- H %*% Fj %*% G Fj <- Fj %*% F } cj2 <- cumsum(cj ^ 2) var <- sigma2 * c(1, 1 + cj2) } else { var <- sigma2 } mu[h] <- H %*% Fj %*% last.state return(list(mu = mu, var = var, cj = cj)) } class2 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { tmp <- class1(h, last.state, trendtype, seasontype, damped, m, sigma2, par) theta <- numeric(h) theta[1] <- tmp$mu[1] ^ 2 if (h > 1) { for (j in 2:h) theta[j] <- tmp$mu[j] ^ 2 + sigma2 * sum(tmp$cj[1:(j - 1)] ^ 2 * theta[(j - 1):1]) } var <- (1 + sigma2) * theta - tmp$mu ^ 2 return(list(mu = tmp$mu, var = var)) } class3 <- function(h, last.state, trendtype, seasontype, damped, m, sigma2, par) { p <- length(last.state) H1 <- matrix(rep(1, 1 + (trendtype != "N")), nrow = 1) H2 <- matrix(c(rep(0, m - 1), 1), nrow = 1) if (trendtype == "N") { F1 <- 1 G1 <- par["alpha"] } else { F1 <- rbind(c(1, 1), c(0, ifelse(damped, par["phi"], 1))) G1 <- rbind(c(par["alpha"], par["alpha"]), c(par["beta"], par["beta"])) } F2 <- rbind(c(rep(0, m - 1), 1), cbind(diag(m - 1), rep(0, m - 1))) G2 <- matrix(0, m, m) G2[1, m] <- par["gamma"] Mh <- matrix(last.state[1:(p - m)]) %*% matrix(last.state[(p - m + 1):p], nrow = 1) Vh <- matrix(0, length(Mh), length(Mh)) H21 <- H2 %x% H1 F21 <- F2 %x% F1 G21 <- G2 %x% G1 K <- (G2 %x% F1) + (F2 %x% G1) mu <- var <- numeric(h) for (i in 1:h) { mu[i] <- H1 %*% Mh %*% t(H2) var[i] <- (1 + sigma2) * H21 %*% Vh %*% t(H21) + sigma2 * mu[i] ^ 2 vecMh <- c(Mh) Vh <- F21 %*% Vh %*% t(F21) + sigma2 * (F21 %*% Vh %*% t(G21) + G21 %*% Vh %*% t(F21) + K %*% (Vh + vecMh %*% t(vecMh)) %*% t(K) + sigma2 * G21 %*% (3 * Vh + 2 * vecMh %*% t(vecMh)) %*% t(G21)) Mh <- F1 %*% Mh %*% t(F2) + G1 %*% Mh %*% t(G2) * sigma2 } return(list(mu = mu, var = var)) } # ses <- function(x,h=10,level=c(80,95),fan=FALSE,...) # { # fcast <- forecast(ets(x,"ANN"),h,level=level,fan=fan,...) # fcast$method <- "Simple exponential smoothing" # fcast$model$call <- match.call() # return(fcast) # } # holt <- function(x,h=10, damped=FALSE, level=c(80,95), fan=FALSE, ...) # { # junk <- forecast(ets(x,"AAN",damped=damped),h,level=level,fan=fan,...) # if(damped) # junk$method <- "Damped Holt's method" # else # junk$method <- "Holt's method" # junk$model$call <- match.call() # return(junk) # } # hw <- function(x,h=2*frequency(x),seasonal="additive",damped=FALSE,level=c(80,95), fan=FALSE, ...) # { # if(seasonal=="additive") # { # junk <- forecast(ets(x,"AAA",damped=damped),h,level=level,fan=fan,...) # junk$method <- "Holt-Winters' additive method" # } # else # { # junk <- forecast(ets(x,"MAM",damped=damped),h,level=level,fan=fan,...) # junk$method <- "Holt-Winters' multiplicative method" # } # junk$model$call <- match.call() # return(junk) # } forecast/R/guerrero.R0000644000176200001440000000765014323125536014252 0ustar liggesusers# This R script contains code for extracting the Box-Cox # parameter, lambda, using Guerrero's method (1993). # Written by Leanne Chhay # guer.cv computes the coefficient of variation # Input: # lam = lambda # x = original time series as a time series object # Output: coefficient of variation guer.cv <- function(lam, x, nonseasonal.length=2) { period <- round(max(nonseasonal.length, frequency(x))) nobsf <- length(x) nyr <- floor(nobsf / period) nobst <- floor(nyr * period) x.mat <- matrix(x[(nobsf - nobst + 1):nobsf], period, nyr) x.mean <- apply(x.mat, 2, mean, na.rm = TRUE) x.sd <- apply(x.mat, 2, sd, na.rm = TRUE) x.rat <- x.sd / x.mean ^ (1 - lam) return(sd(x.rat, na.rm = TRUE) / mean(x.rat, na.rm = TRUE)) } # guerrero extracts the required lambda # Input: x = original time series as a time series object # Output: lambda that minimises the coefficient of variation guerrero <- function(x, lower=-1, upper=2, nonseasonal.length=2) { if(any(x <= 0, na.rm = TRUE)) warning("Guerrero's method for selecting a Box-Cox parameter (lambda) is given for strictly positive data.") return(optimize( guer.cv, c(lower, upper), x = x, nonseasonal.length = nonseasonal.length )$minimum) } # Modified version of boxcox from MASS package bcloglik <- function(x, lower=-1, upper=2) { n <- length(x) if (any(x <= 0, na.rm = TRUE)) { stop("x must be positive") } logx <- log(na.omit(c(x))) xdot <- exp(mean(logx)) if (all(class(x) != "ts")) { fit <- lm(x ~ 1, data = data.frame(x = x), na.action = na.exclude) } else if (frequency(x) > 1) { fit <- tslm(x ~ trend + season, data = data.frame(x = x)) } else { fit <- tslm(x ~ trend, data = data.frame(x = x)) } xqr <- fit$qr lambda <- seq(lower, upper, by = .05) xl <- loglik <- as.vector(lambda) m <- length(xl) x <- na.omit(c(x)) for (i in 1L:m) { if (abs(la <- xl[i]) > 0.02) { xt <- (x ^ la - 1) / la } else { xt <- logx * (1 + (la * logx) / 2 * (1 + (la * logx) / 3 * (1 + (la * logx) / 4))) } loglik[i] <- -n / 2 * log(sum(qr.resid(xqr, xt / xdot ^ (la - 1)) ^ 2)) } return(xl[which.max(loglik)]) } #' Automatic selection of Box Cox transformation parameter #' #' If \code{method=="guerrero"}, Guerrero's (1993) method is used, where lambda #' minimizes the coefficient of variation for subseries of \code{x}. #' #' If \code{method=="loglik"}, the value of lambda is chosen to maximize the #' profile log likelihood of a linear model fitted to \code{x}. For #' non-seasonal data, a linear time trend is fitted while for seasonal data, a #' linear time trend with seasonal dummy variables is used. #' #' #' @param x a numeric vector or time series of class \code{ts} #' @param method Choose method to be used in calculating lambda. #' @param lower Lower limit for possible lambda values. #' @param upper Upper limit for possible lambda values. #' @return a number indicating the Box-Cox transformation parameter. #' @author Leanne Chhay and Rob J Hyndman #' @seealso \code{\link{BoxCox}} #' @references Box, G. E. P. and Cox, D. R. (1964) An analysis of #' transformations. \emph{JRSS B} \bold{26} 211--246. #' #' Guerrero, V.M. (1993) Time-series analysis supported by power #' transformations. \emph{Journal of Forecasting}, \bold{12}, 37--48. #' @keywords ts #' @examples #' #' lambda <- BoxCox.lambda(AirPassengers,lower=0) #' air.fit <- Arima(AirPassengers, order=c(0,1,1), #' seasonal=list(order=c(0,1,1),period=12), lambda=lambda) #' plot(forecast(air.fit)) #' #' @export BoxCox.lambda <- function(x, method=c("guerrero", "loglik"), lower=-1, upper=2) { if (any(x <= 0, na.rm = TRUE)) { lower <- max(lower, 0) } if (length(x) <= 2 * frequency(x)) { return(1) } # Not enough data to do much more than this # stop("All values must be positive") method <- match.arg(method) if (method == "loglik") { return(bcloglik(x, lower, upper)) } else { return(guerrero(x, lower, upper)) } } forecast/R/lm.R0000644000176200001440000004716214323125536013032 0ustar liggesusers#' Fit a linear model with time series components #' #' \code{tslm} is used to fit linear models to time series including trend and #' seasonality components. #' #' \code{tslm} is largely a wrapper for \code{\link[stats]{lm}()} except that #' it allows variables "trend" and "season" which are created on the fly from #' the time series characteristics of the data. The variable "trend" is a #' simple time trend and "season" is a factor indicating the season (e.g., the #' month or the quarter depending on the frequency of the data). #' #' @param formula an object of class "formula" (or one that can be coerced to #' that class): a symbolic description of the model to be fitted. #' @param data an optional data frame, list or environment (or object coercible #' by as.data.frame to a data frame) containing the variables in the model. If #' not found in data, the variables are taken from environment(formula), #' typically the environment from which lm is called. #' @param subset an optional subset containing rows of data to keep. For best #' results, pass a logical vector of rows to keep. Also supports #' \code{\link[base]{subset}()} functions. #' @inheritParams forecast.ts #' #' @param ... Other arguments passed to \code{\link[stats]{lm}()} #' @return Returns an object of class "lm". #' @author Mitchell O'Hara-Wild and Rob J Hyndman #' @seealso \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. #' @keywords stats #' @examples #' #' y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit <- tslm(y ~ trend + season) #' plot(forecast(fit, h=20)) #' #' @export tslm <- function(formula, data, subset, lambda=NULL, biasadj=FALSE, ...) { cl <- match.call() if (!("formula" %in% class(formula))) { formula <- stats::as.formula(formula) } if (missing(data)) { mt <- try(terms(formula)) if (is.element("try-error", class(mt))) { stop("Cannot extract terms from formula, please provide data argument.") } } else { mt <- terms(formula, data = data) } ## Categorise formula variables into time-series, functions, and data. vars <- attr(mt, "variables") # Check for time series variables tsvar <- match(c("trend", "season"), as.character(vars), 0L) # Check for functions (which should be evaluated later, in lm) fnvar <- NULL for (i in 2:length(vars)) { term <- vars[[i]] if (!is.symbol(term)) { if (typeof(eval(term[[1]])) == "closure") { # If this term is a function (alike fourier) fnvar <- c(fnvar, i) } } } ## Fix formula's environment for correct `...` scoping. attr(formula, ".Environment") <- environment() ## Ensure response variable is taken from dataset (including transformations) formula[[2]] <- as.symbol(deparse(formula[[2]])) if (sum(c(tsvar, fnvar)) > 0) { # Remove variables not needed in data (trend+season+functions) rmvar <- c(tsvar, fnvar) rmvar <- rmvar[rmvar != attr(mt, "response") + 1] # Never remove the reponse variable if (any(rmvar != 0)) { vars <- vars[-rmvar] } } ## Grab any variables missing from data if (!missing(data)) { # Check for any missing variables in data vars <- vars[c(TRUE, !as.character(vars[-1]) %in% colnames(data))] dataname <- substitute(data) } if (!missing(data)) { data <- datamat(do.call(datamat, as.list(vars[-1]), envir = parent.frame()), data) } else { data <- do.call(datamat, as.list(vars[-1]), envir = parent.frame()) } ## Set column name of univariate dataset if (is.null(dim(data)) && length(data) != 0) { cn <- as.character(vars)[2] } else { cn <- colnames(data) } ## Get time series attributes from the data if (is.null(tsp(data))) { if ((attr(mt, "response") + 1) %in% fnvar) { # Check unevaluated response variable tspx <- tsp(eval(attr(mt, "variables")[[attr(mt, "response") + 1]])) } tspx <- tsp(data[, 1]) # Check for complex ts data.frame } else { tspx <- tsp(data) } if (is.null(tspx)) { stop("Not time series data, use lm()") } tsdat <- match(c("trend", "season"), cn, 0L) ## Create trend and season if missing from the data if (tsdat[1] == 0) { # &tsvar[1]!=0){#If "trend" is not in data, but is in formula trend <- 1:NROW(data) cn <- c(cn, "trend") data <- cbind(data, trend) } if (tsdat[2] == 0) { # &tsvar[2]!=0){#If "season" is not in data, but is in formula if (tsvar[2] != 0 && tspx[3] <= 1) { # Nonseasonal data, and season requested stop("Non-seasonal data cannot be modelled using a seasonal factor") } season <- as.factor(cycle(data[, 1])) cn <- c(cn, "season") data <- cbind(data, season) } colnames(data) <- cn ## Subset the data according to subset argument if (!missing(subset)) { if (!is.logical(subset)) { stop("subset must be logical") } else if (NCOL(subset) > 1) { stop("subset must be a logical vector") } else if (NROW(subset) != NROW(data)) { stop("Subset must be the same length as the number of rows in the dataset") } warning("Subset has been assumed contiguous") timesx <- time(data[, 1])[subset] tspx <- recoverTSP(timesx) if (tspx[3] == 1 && tsdat[2] == 0 && tsvar[2] != 0) { stop("Non-seasonal data cannot be modelled using a seasonal factor") } data <- data[subset, ] # model.frame(formula,as.data.frame(data[subsetTF,])) } if (!is.null(lambda)) { resp_var <- deparse(attr(mt, "variables")[[attr(mt, "response") + 1]]) data[, resp_var] <- BoxCox(data[, resp_var], lambda) lambda <- attr(data[, resp_var], "lambda") } if (tsdat[2] == 0 && tsvar[2] != 0) { data$season <- factor(data$season) # fix for lost factor information, may not be needed? } ## Fit the model and prepare model structure fit <- lm(formula, data = data, na.action = na.exclude, ...) fit$data <- data responsevar <- deparse(formula[[2]]) fit$residuals <- ts(residuals(fit)) fit$x <- fit$residuals fit$x[!is.na(fit$x)] <- model.frame(fit)[, responsevar] fit$fitted.values <- ts(fitted(fit)) tsp(fit$residuals) <- tsp(fit$x) <- tsp(fit$fitted.values) <- tsp(data[, 1]) <- tspx fit$call <- cl fit$method <- "Linear regression model" if (exists("dataname")) { fit$call$data <- dataname } if (!is.null(lambda)) { attr(lambda, "biasadj") <- biasadj fit$lambda <- lambda fit$fitted.values <- InvBoxCox(fit$fitted.values, lambda, biasadj, var(fit$residuals)) fit$x <- InvBoxCox(fit$x, lambda) } class(fit) <- c("tslm", class(fit)) return(fit) } #' @export fitted.tslm <- function(object, ...){ object$fitted.values } #' Forecast a linear model with possible time series components #' #' \code{forecast.lm} is used to predict linear models, especially those #' involving trend and seasonality components. #' #' \code{forecast.lm} is largely a wrapper for #' \code{\link[stats]{predict.lm}()} except that it allows variables "trend" #' and "season" which are created on the fly from the time series #' characteristics of the data. Also, the output is reformatted into a #' \code{forecast} object. #' #' @param object Object of class "lm", usually the result of a call to #' \code{\link[stats]{lm}} or \code{\link{tslm}}. #' @param newdata An optional data frame in which to look for variables with #' which to predict. If omitted, it is assumed that the only variables are #' trend and season, and \code{h} forecasts are produced. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param h Number of periods for forecasting. Ignored if \code{newdata} #' present. #' @param ts If \code{TRUE}, the forecasts will be treated as time series #' provided the original data is a time series; the \code{newdata} will be #' interpreted as related to the subsequent time periods. If \code{FALSE}, any #' time series attributes of the original data will be ignored. #' @param ... Other arguments passed to \code{\link[stats]{predict.lm}()}. #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.lm}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The historical data for #' the response variable.} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values} #' @author Rob J Hyndman #' @seealso \code{\link{tslm}}, \code{\link[stats]{lm}}. #' @keywords stats #' @examples #' #' y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit <- tslm(y ~ trend + season) #' plot(forecast(fit, h=20)) #' #' @export forecast.lm <- function(object, newdata, h=10, level=c(80, 95), fan=FALSE, lambda=object$lambda, biasadj=NULL, ts=TRUE, ...) { if(h < 1) { stop("The forecast horizon must be at least 1.") } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } if (!is.null(object$data)) { origdata <- object$data } # no longer exists else if (!is.null(object$model)) { origdata <- object$model } else if (!is.null(object$call$data)) { origdata <- try(object$data <- eval(object$call$data), silent = TRUE) if (is.element("try-error", class(origdata))) { stop("Could not find data. Try training your model using tslm() or attach data directly to the object via object$data<-modeldata for some object<-lm(formula,modeldata).") } } else { origdata <- as.data.frame(fitted(object) + residuals(object)) } if (!is.element("data.frame", class(origdata))) { origdata <- as.data.frame(origdata) if (!is.element("data.frame", class(origdata))) { stop("Could not find data. Try training your model using tslm() or attach data directly to the object via object$data<-modeldata for some object<-lm(formula,modeldata).") } } # Check if the forecasts will be time series if (ts && is.element("ts", class(origdata))) { tspx <- tsp(origdata) timesx <- time(origdata) } else if (ts && is.element("ts", class(origdata[, 1]))) { tspx <- tsp(origdata[, 1]) timesx <- time(origdata[, 1]) } else if (ts && is.element("ts", class(fitted(object)))) { tspx <- tsp(fitted(object)) timesx <- time(fitted(object)) } else { tspx <- NULL } # if(!is.null(object$call$subset)) # { # j <- eval(object$call$subset) # origdata <- origdata[j,] # if(!is.null(tspx)) # { # # Try to figure out times for subset. Assume they are contiguous. # timesx <- timesx[j] # tspx <- tsp(origdata) <- c(min(timesx),max(timesx),tspx[3]) # } # } # Add trend and seasonal to data frame oldterms <- terms(object) # Adjust terms for function variables and rename datamat colnames to match. if (!missing(newdata)) { reqvars <- as.character(attr(object$terms, "variables")[-1])[-attr(object$terms, "response")] # Search for time series variables tsvar <- match(c("trend", "season"), reqvars, 0L) # Check if required variables are functions fnvar <- sapply(reqvars, function(x) !(is.symbol(parse(text = x)[[1]]) || typeof(eval(parse(text = x)[[1]][[1]])) != "closure")) if (!is.data.frame(newdata)) { newdata <- datamat(newdata) colnames(newdata)[1] <- ifelse(sum(tsvar > 0), reqvars[-tsvar][1], reqvars[1]) warning("newdata column names not specified, defaulting to first variable required.") } oldnewdata <- newdata newvars <- make.names(colnames(newdata)) # Check if variables are missing misvar <- match(make.names(reqvars), newvars, 0L) == 0L if (any(!misvar & !fnvar)) { # If any variables are not missing/functions, add them to data tmpdata <- datamat(newdata[reqvars[!misvar]]) rm1 <- FALSE } else { # Prefill the datamat tmpdata <- datamat(1:NROW(newdata)) rm1 <- TRUE } # Remove trend and seasonality from required variables if (sum(tsvar) > 0) { reqvars <- reqvars[-tsvar] fnvar <- fnvar[-tsvar] misvar <- match(make.names(reqvars), newvars, 0L) == 0L } if (any(misvar | fnvar)) { # If any variables are missing/functions reqvars <- reqvars[misvar | fnvar] # They are required fnvar <- fnvar[misvar | fnvar] # Update required function variables for (i in reqvars) { found <- FALSE subvars <- NULL for (j in 1:length(object$coefficients)) { subvars[j] <- pmatch(i, names(object$coefficients)[j]) } subvars <- !is.na(subvars) subvars <- names(object$coefficients)[subvars] # Detect if subvars if multivariate if (length(subvars) > 1) { # Extract prefix only subvars <- substr(subvars, nchar(i) + 1, 999L) fsub <- match(make.names(subvars), newvars, 0L) if (any(fsub == 0)) { # Check for misnamed columns fsub <- grep(paste(make.names(subvars), collapse = "|"), newvars) } if (all(fsub != 0) && length(fsub) == length(subvars)) { imat <- as.matrix(newdata[, fsub], ncol = length(fsub)) colnames(imat) <- subvars tmpdata[[length(tmpdata) + 1]] <- imat found <- TRUE } else { # Attempt to evaluate it as a function subvars <- i } } if (length(subvars) == 1) { # Check if it is a function if (fnvar[match(i, reqvars)]) { # Pre-evaluate function from data tmpdata[[length(tmpdata) + 1]] <- eval(parse(text = subvars)[[1]], newdata) found <- TRUE } } if (found) { names(tmpdata)[length(tmpdata)] <- paste0("solvedFN___", match(i, reqvars)) subvarloc <- match(i, lapply(attr(object$terms, "predvars"), deparse)) attr(object$terms, "predvars")[[subvarloc]] <- attr(object$terms, "variables")[[subvarloc]] <- parse(text = paste0("solvedFN___", match(i, reqvars)))[[1]] } else { warning(paste0("Could not find required variable ", i, " in newdata. Specify newdata as a named data.frame")) } } } if (rm1) { tmpdata[[1]] <- NULL } newdata <- cbind(newdata, tmpdata) h <- nrow(newdata) } if (!is.null(tspx)) { # Always generate trend series trend <- ifelse(is.null(origdata$trend), NCOL(origdata), max(origdata$trend)) + seq_len(h) if (!missing(newdata)) { newdata <- cbind(newdata, trend) } else { newdata <- datamat(trend) } # Always generate season series x <- ts(seq_len(h), start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) season <- as.factor(cycle(x)) newdata <- cbind(newdata, season) } newdata <- as.data.frame(newdata) if (!exists("oldnewdata")) { oldnewdata <- newdata } # If only one column, assume its name. if (ncol(newdata) == 1 && colnames(newdata)[1] == "newdata") { colnames(newdata) <- as.character(formula(object$model))[3] } # Check regressors included in newdata. # Not working so removed for now. # xreg <- attributes(terms(object$model))$term.labels # if(any(!is.element(xreg,colnames(newdata)))) # stop("Predictor variables not included") object$x <- getResponse(object) # responsevar <- as.character(formula(object$model))[2] # responsevar <- gsub("`","",responsevar) # object$x <- model.frame(object$model)[,responsevar] # Remove missing values from residuals predict_object <- object predict_object$residuals <- na.omit(as.numeric(object$residuals)) out <- list() nl <- length(level) for (i in 1:nl) out[[i]] <- predict(predict_object, newdata = newdata, se.fit = TRUE, interval = "prediction", level = level[i] / 100, ...) if (nrow(newdata) != length(out[[1]]$fit[, 1])) { stop("Variables not found in newdata") } object$terms <- oldterms if (is.null(object$series)) { # Model produced via lm(), add series attribute object$series <- deparse(attr(oldterms, "variables")[[1 + attr(oldterms, "response")]]) } fcast <- list( model = object, mean = out[[1]]$fit[, 1], lower = out[[1]]$fit[, 2], upper = out[[1]]$fit[, 3], level = level, x = object$x, series = object$series ) fcast$method <- "Linear regression model" fcast$newdata <- oldnewdata fcast$residuals <- residuals(object) fcast$fitted <- fitted(object) if (NROW(origdata) != NROW(fcast$x)) { # Give up on ts attributes as some data are missing tspx <- NULL } if (NROW(fcast$x) != NROW(fcast$residuals)) { tspx <- NULL } if (!is.null(tspx)) { fcast$x <- ts(fcast$x) fcast$residuals <- ts(fcast$residuals) fcast$fitted <- ts(fcast$fitted) tsp(fcast$x) <- tsp(fcast$residuals) <- tsp(fcast$fitted) <- tspx } if (nl > 1) { for (i in 2:nl) { fcast$lower <- cbind(fcast$lower, out[[i]]$fit[, 2]) fcast$upper <- cbind(fcast$upper, out[[i]]$fit[, 3]) } } if (!is.null(tspx)) { fcast$mean <- ts(fcast$mean, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) fcast$upper <- ts(fcast$upper, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) fcast$lower <- ts(fcast$lower, start = tspx[2] + 1 / tspx[3], frequency = tspx[3]) } if (!is.null(lambda)) { #fcast$x <- InvBoxCox(fcast$x, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) } return(structure(fcast, class = "forecast")) } #' @export summary.tslm <- function(object, ...) { # Remove NA from object structure as summary.lm() expects (#836) object$residuals <- na.omit(as.numeric(object$residuals)) object$fitted.values <- na.omit(as.numeric(object$fitted.values)) if(!is.null(object$lambda)) { object$fitted.values <- BoxCox(object$fitted.values, object$lambda) } NextMethod() } # Compute cross-validation and information criteria from a linear model #' Cross-validation statistic #' #' Computes the leave-one-out cross-validation statistic (the mean of PRESS #' -- prediction residual sum of squares), AIC, corrected AIC, BIC and adjusted #' R^2 values for a linear model. #' #' #' @param obj output from \code{\link[stats]{lm}} or \code{\link{tslm}} #' @return Numerical vector containing CV, AIC, AICc, BIC and AdjR2 values. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{AIC}} #' @keywords models #' @examples #' #' y <- ts(rnorm(120,0,3) + 20*sin(2*pi*(1:120)/12), frequency=12) #' fit1 <- tslm(y ~ trend + season) #' fit2 <- tslm(y ~ season) #' CV(fit1) #' CV(fit2) #' #' @export CV <- function(obj) { if (!is.element("lm", class(obj))) { stop("This function is for objects of class lm") } n <- length(obj$residuals) k <- extractAIC(obj)[1] - 1 # number of predictors (constant removed) aic <- extractAIC(obj)[2] + 2 # add 2 for the variance estimate aicc <- aic + 2 * (k + 2) * (k + 3) / (n - k - 3) bic <- aic + (k + 2) * (log(n) - 2) cv <- mean((residuals(obj) / (1 - hatvalues(obj))) ^ 2, na.rm = TRUE) adjr2 <- summary(obj)$adj out <- c(cv, aic, aicc, bic, adjr2) names(out) <- c("CV", "AIC", "AICc", "BIC", "AdjR2") return(out) } forecast/R/seasadj.R0000644000176200001440000000311714323125536014024 0ustar liggesusers## Generic seasadj functions #' Seasonal adjustment #' #' Returns seasonally adjusted data constructed by removing the seasonal #' component. #' #' #' @param object Object created by \code{\link[stats]{decompose}}, #' \code{\link[stats]{stl}} or \code{\link{tbats}}. #' @param ... Other arguments not currently used. #' @return Univariate time series. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, #' \code{\link{tbats}}. #' @keywords ts #' @examples #' plot(AirPassengers) #' lines(seasadj(decompose(AirPassengers,"multiplicative")),col=4) #' #' @export seasadj <- function(object, ...) UseMethod("seasadj") #' @rdname seasadj #' @export seasadj.stl <- function(object, ...) { return(trendcycle(object) + remainder(object)) } #' @rdname seasadj #' @export seasadj.mstl <- function(object, ...) { return(trendcycle(object) + remainder(object)) } #' @rdname seasadj #' @export seasadj.decomposed.ts <- function(object, ...) { if (object$type == "additive") { return(object$x - object$seasonal) } else { return(object$x / object$seasonal) } } #' @rdname seasadj #' @export seasadj.tbats <- function(object, ...) { return(object$y - seasonal(object)) # comp <- tbats.components(object) # scols <- grep("season",colnames(comp)) # sa <- comp[,"observed"] - rowSums(comp[,scols,drop=FALSE]) # # Back transform if necessary # if (!is.null(object$lambda)) # sa <- InvBoxCox(sa, object$lambda) # return(sa) } #' @rdname seasadj #' @export seasadj.seas <- function(object, ...) { return(seasextract_w_na_action(object, "final")) } forecast/R/forecastBATS.R0000644000176200001440000001707414323125536014701 0ustar liggesusers#' Forecasting using BATS and TBATS models #' #' Forecasts \code{h} steps ahead with a BATS model. Prediction intervals are #' also produced. #' #' @param object An object of class "\code{bats}". Usually the result of a call #' to \code{\link{bats}}. #' @param h Number of periods for forecasting. Default value is twice the #' largest seasonal period (for seasonal data) or ten (for non-seasonal data). #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to \code{seq(51,99,by=3)}. This is suitable #' for fan plots. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param ... Other arguments, currently ignored. #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.bats}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A copy of the \code{bats} object} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for #' prediction intervals} \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the time #' series used to create the model stored as \code{object}).} #' \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted #' values (one-step forecasts)} #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{bats}}, \code{\link{tbats}},\code{\link{forecast.ets}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- bats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- bats(taylor) #' plot(forecast(taylor.fit)) #' } #' #' @export forecast.bats <- function(object, h, level=c(80, 95), fan=FALSE, biasadj=NULL, ...) { # Set up the variables if (any(class(object$y) == "ts")) { ts.frequency <- frequency(object$y) } else { ts.frequency <- ifelse(!is.null(object$seasonal.periods), max(object$seasonal.periods), 1) } if (missing(h)) { if (is.null(object$seasonal.periods)) { h <- ifelse(ts.frequency == 1, 10, 2 * ts.frequency) } else { h <- 2 * max(object$seasonal.periods) } } else if (h <= 0) { stop("Forecast horizon out of bounds") } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } # Set up the matrices x <- matrix(0, nrow = nrow(object$x), ncol = h) y.forecast <- numeric(h) # w <- makeWMatrix(small.phi=object$damping.parameter, seasonal.periods=object$seasonal.periods, ar.coefs=object$ar.coefficients, ma.coefs=object$ma.coefficients) w <- .Call("makeBATSWMatrix", smallPhi_s = object$damping.parameter, sPeriods_s = object$seasonal.periods, arCoefs_s = object$ar.coefficients, maCoefs_s = object$ma.coefficients, PACKAGE = "forecast") # g <- makeGMatrix(alpha=object$alpha, beta=object$beta, gamma.vector=object$gamma.values, seasonal.periods=object$seasonal.periods, p=length(object$ar.coefficients), q=length(object$ma.coefficients)) g <- .Call("makeBATSGMatrix", object$alpha, object$beta, object$gamma.values, object$seasonal.periods, length(object$ar.coefficients), length(object$ma.coefficients), PACKAGE = "forecast") F <- makeFMatrix(alpha = object$alpha, beta = object$beta, small.phi = object$damping.parameter, seasonal.periods = object$seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = object$ar.coefficients, ma.coefs = object$ma.coefficients) # Do the forecast y.forecast[1] <- w$w.transpose %*% object$x[, ncol(object$x)] x[, 1] <- F %*% object$x[, ncol(object$x)] # + g$g %*% object$errors[length(object$errors)] if (h > 1) { for (t in 2:h) { x[, t] <- F %*% x[, (t - 1)] y.forecast[t] <- w$w.transpose %*% x[, (t - 1)] } } ## Make prediction intervals here lower.bounds <- upper.bounds <- matrix(NA, ncol = length(level), nrow = h) variance.multiplier <- numeric(h) variance.multiplier[1] <- 1 if (h > 1) { for (j in 1:(h - 1)) { if (j == 1) { f.running <- diag(ncol(F)) } else { f.running <- f.running %*% F } c.j <- w$w.transpose %*% f.running %*% g$g variance.multiplier[(j + 1)] <- variance.multiplier[j] + c.j ^ 2 } } variance <- object$variance * variance.multiplier # print(variance) st.dev <- sqrt(variance) for (i in 1:length(level)) { marg.error <- st.dev * abs(qnorm((100 - level[i]) / 200)) lower.bounds[, i] <- y.forecast - marg.error upper.bounds[, i] <- y.forecast + marg.error } # Inv Box Cox transform if required if (!is.null(object$lambda)) { y.forecast <- InvBoxCox(y.forecast, object$lambda, biasadj, list(level = level, upper = upper.bounds, lower = lower.bounds)) lower.bounds <- InvBoxCox(lower.bounds, object$lambda) if (object$lambda < 1) { lower.bounds <- pmax(lower.bounds, 0) } upper.bounds <- InvBoxCox(upper.bounds, object$lambda) } colnames(upper.bounds) <- colnames(lower.bounds) <- paste0(level, "%") forecast.object <- list( model = object, mean = future_msts(object$y, y.forecast), level = level, x = object$y, series = object$series, upper = future_msts(object$y, upper.bounds), lower = future_msts(object$y, lower.bounds), fitted = copy_msts(object$y, object$fitted.values), method = as.character(object), residuals = copy_msts(object$y, object$errors) ) if (is.null(object$series)) { forecast.object$series <- deparse(object$call$y) } class(forecast.object) <- "forecast" return(forecast.object) } #' @export as.character.bats <- function(x, ...) { name <- "BATS(" if (!is.null(x$lambda)) { name <- paste(name, round(x$lambda, digits = 3), sep = "") } else { name <- paste(name, "1", sep = "") } name <- paste(name, ", {", sep = "") if (!is.null(x$ar.coefficients)) { name <- paste(name, length(x$ar.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, ",", sep = "") if (!is.null(x$ma.coefficients)) { name <- paste(name, length(x$ma.coefficients), sep = "") } else { name <- paste(name, "0", sep = "") } name <- paste(name, "}, ", sep = "") if (!is.null(x$damping.parameter)) { name <- paste(name, round(x$damping.parameter, digits = 3), sep = "") } else { name <- paste(name, "-", sep = "") } name <- paste(name, ", ", sep = "") if (!is.null(x$seasonal.periods)) { name <- paste(name, "{", sep = "") for (i in x$seasonal.periods) { name <- paste(name, i, sep = "") if (i != x$seasonal.periods[length(x$seasonal.periods)]) { name <- paste(name, ",", sep = "") } else { name <- paste(name, "})", sep = "") } } } else { name <- paste(name, "-)", sep = "") } return(name) } forecast/R/makeParamVector.R0000644000176200001440000001734514323125536015503 0ustar liggesusers# TODO: Add comment # # Author: srazbash ############################################################################### unParameteriseTBATS <- function(param.vector, control) { # print(control) if (control$use.box.cox) { lambda <- param.vector[1] alpha <- param.vector[2] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[3] beta <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta <- NULL gamma.start <- 3 } if (control$length.gamma > 0) { gamma.one.vector <- param.vector[gamma.start:(gamma.start + (control$length.gamma / 2) - 1)] gamma.two.vector <- param.vector[(gamma.start + (control$length.gamma / 2)):(gamma.start + (control$length.gamma) - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.one.vector <- NULL gamma.two.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } else { lambda <- NULL alpha <- param.vector[1] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[2] beta <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta <- NULL gamma.start <- 2 } if (control$length.gamma > 0) { gamma.one.vector <- param.vector[gamma.start:(gamma.start + (control$length.gamma / 2) - 1)] gamma.two.vector <- param.vector[(gamma.start + (control$length.gamma / 2)):(gamma.start + (control$length.gamma) - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.one.vector <- NULL gamma.two.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } return(list(lambda = lambda, alpha = alpha, beta = beta, small.phi = small.phi, gamma.one.v = gamma.one.vector, gamma.two.v = gamma.two.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs)) } makeParscale <- function(control) { # print(control) if (control$use.box.cox) { parscale <- c(.001, .01) } else { parscale <- .01 } if (control$use.beta) { if (control$use.damping) { parscale <- c(parscale, 1e-2, 1e-2) } else { parscale <- c(parscale, 1e-2) } } if (control$length.gamma > 0) { parscale <- c(parscale, rep(1e-5, control$length.gamma)) } if ((control$p != 0) | (control$q != 0)) { parscale <- c(parscale, rep(1e-1, (control$p + control$q))) } # print(parscale) return(parscale) } ############################################################################################################################################################################################## ## BATS related stuff below ######################################## makeParscaleBATS <- function(control) { # print(control) if (control$use.box.cox) { parscale <- c(.001, .1) } else { parscale <- .1 } if (control$use.beta) { if (control$use.damping) { parscale <- c(parscale, 1e-2, 1e-2) } else { parscale <- c(parscale, 1e-2) } } if (control$length.gamma > 0) { parscale <- c(parscale, rep(1e-2, control$length.gamma)) } if ((control$p != 0) | (control$q != 0)) { parscale <- c(parscale, rep(1e-1, (control$p + control$q))) } # print(parscale) return(parscale) } parameterise <- function(alpha, beta.v=NULL, small.phi=1, gamma.v=NULL, lambda=NULL, ar.coefs=NULL, ma.coefs=NULL) { # print("urg") # print(lambda) if (!is.null(lambda)) { param.vector <- cbind(lambda, alpha) use.box.cox <- TRUE } else { # print("hello") param.vector <- alpha use.box.cox <- FALSE # print(use.box.cox) } if (!is.null(beta.v)) { use.beta <- TRUE if (is.null(small.phi)) { use.damping <- FALSE } else if (small.phi != 1) { param.vector <- cbind(param.vector, small.phi) use.damping <- TRUE } else { use.damping <- FALSE } param.vector <- cbind(param.vector, beta.v) } else { use.beta <- FALSE use.damping <- FALSE } if (!is.null(gamma.v)) { gamma.v <- matrix(gamma.v, nrow = 1, ncol = length(gamma.v)) param.vector <- cbind(param.vector, gamma.v) length.gamma <- length(gamma.v) } else { length.gamma <- 0 } if (!is.null(ar.coefs)) { ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = length(ar.coefs)) param.vector <- cbind(param.vector, ar.coefs) p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = length(ma.coefs)) param.vector <- cbind(param.vector, ma.coefs) q <- length(ma.coefs) } else { q <- 0 } # print(use.box.cox) control <- list(use.beta = use.beta, use.box.cox = use.box.cox, use.damping = use.damping, length.gamma = length.gamma, p = p, q = q) return(list(vect = as.numeric(param.vector), control = control)) } unParameterise <- function(param.vector, control) { # print(control) if (control$use.box.cox) { lambda <- param.vector[1] alpha <- param.vector[2] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[3] beta <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta <- NULL gamma.start <- 3 } if (control$length.gamma > 0) { gamma.vector <- param.vector[gamma.start:(gamma.start + control$length.gamma - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } else { lambda <- NULL alpha <- param.vector[1] if (control$use.beta) { if (control$use.damping) { small.phi <- param.vector[2] beta <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta <- NULL gamma.start <- 2 } if (control$length.gamma > 0) { gamma.vector <- param.vector[gamma.start:(gamma.start + control$length.gamma - 1)] final.gamma.pos <- gamma.start + control$length.gamma - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (control$p != 0) { ar.coefs <- param.vector[(final.gamma.pos + 1):(final.gamma.pos + control$p)] } else { ar.coefs <- NULL } if (control$q != 0) { ma.coefs <- param.vector[(final.gamma.pos + control$p + 1):length(param.vector)] } else { ma.coefs <- NULL } } return(list(lambda = lambda, alpha = alpha, beta = beta, small.phi = small.phi, gamma.v = gamma.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs)) } forecast/R/components.R0000644000176200001440000000712314150370574014602 0ustar liggesusers# Functions to extract components from time series decomposition # These should match corresponding functions in the seasonal package # providing similar functional for stl, decomposed.ts and tbats objects #' Extract components from a time series decomposition #' #' Returns a univariate time series equal to either a seasonal component, #' trend-cycle component or remainder component from a time series #' decomposition. #' #' @param object Object created by \code{\link[stats]{decompose}}, #' \code{\link[stats]{stl}} or \code{\link{tbats}}. #' @return Univariate time series. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, #' \code{\link{tbats}}, \code{\link{seasadj}}. #' @keywords ts #' @examples #' plot(USAccDeaths) #' fit <- stl(USAccDeaths, s.window="periodic") #' lines(trendcycle(fit),col="red") #' #' library(ggplot2) #' autoplot(cbind( #' Data=USAccDeaths, #' Seasonal=seasonal(fit), #' Trend=trendcycle(fit), #' Remainder=remainder(fit)), #' facets=TRUE) + #' ylab("") + xlab("Year") #' #' @export seasonal <- function(object) { if ("mstl" %in% class(object)) { cols <- grep("Season", colnames(object)) return(object[, cols]) } else if ("stl" %in% class(object)) { return(object$time.series[, "seasonal"]) } else if ("decomposed.ts" %in% class(object)) { return(object$seasonal) } else if ("tbats" %in% class(object)) { comp <- tbats.components(object) scols <- grep("season", colnames(comp)) season <- ts(rowSums(comp[, scols, drop = FALSE])) if (!is.null(object$lambda)) { season <- InvBoxCox(season, object$lambda) } tsp(season) <- tsp(comp) return(season) } else if ("seas" %in% class(object)) { return(object$data[, "seasonal"]) } else { stop("Unknown object type") } } #' @rdname seasonal #' @export trendcycle <- function(object) { if ("mstl" %in% class(object)) { return(object[, "Trend"]) } else if ("stl" %in% class(object)) { return(object$time.series[, "trend"]) } else if ("decomposed.ts" %in% class(object)) { return(object$trend) } # else if("tbats" %in% class(object)) # { # trnd <- tbats.components(object)[,"level"] # if (!is.null(object$lambda)) # trnd <- InvBoxCox(trnd, object$lambda) # return(trnd) # } else if ("seas" %in% class(object)) { return(seasextract_w_na_action(object, "trend")) } else { stop("Unknown object type") } } #' @rdname seasonal #' @export remainder <- function(object) { if ("mstl" %in% class(object)) { return(object[, "Remainder"]) } else if ("stl" %in% class(object)) { return(object$time.series[, "remainder"]) } else if ("decomposed.ts" %in% class(object)) { return(object$random) } # else if("tbats" %in% class(object)) # { # comp <- tbats.components(object) # trnd <- comp[,"level"] # scols <- grep("season",colnames(comp)) # season <- rowSums(comp[,scols,drop=FALSE]) # irreg <- ts(comp[,'observed'] - trnd - season) # tsp(irreg) <- tsp(comp) # return(irreg) # } else if ("seas" %in% class(object)) { return(seasextract_w_na_action(object, "irregular")) } else { stop("Unknown object type") } } ## Copied from seasonal:::extract_w_na_action ## Importing is problematic due to issues with ARM processors seasextract_w_na_action <- function(x, name) { if (is.null(x$data)) { return(NULL) } z <- na.omit(x$data[, name]) if (!is.null(x$na.action)) { if (attr(x$na.action, "class") == "exclude") { z <- ts(stats::napredict(x$na.action, z)) tsp(z) <- tsp(x$x) } } z } forecast/R/newarima2.R0000644000176200001440000010012714323125536014276 0ustar liggesusers#' Fit best ARIMA model to univariate time series #' #' Returns best ARIMA model according to either AIC, AICc or BIC value. The #' function conducts a search over possible model within the order constraints #' provided. #' #' The default arguments are designed for rapid estimation of models for many time series. #' If you are analysing just one time series, and can afford to take some more time, it #' is recommended that you set \code{stepwise=FALSE} and \code{approximation=FALSE}. #' #' Non-stepwise selection can be slow, especially for seasonal data. The stepwise #' algorithm outlined in Hyndman & Khandakar (2008) is used except that the default #' method for selecting seasonal differences is now based on an estimate of seasonal #' strength (Wang, Smith & Hyndman, 2006) rather than the Canova-Hansen test. #' There are also some other minor variations to the algorithm described in #' Hyndman and Khandakar (2008). #' #' @inheritParams stats::arima #' @param y a univariate time series #' @param d Order of first-differencing. If missing, will choose a value based #' on \code{test}. #' @param D Order of seasonal-differencing. If missing, will choose a value #' based on \code{season.test}. #' @param max.p Maximum value of p #' @param max.q Maximum value of q #' @param max.P Maximum value of P #' @param max.Q Maximum value of Q #' @param max.order Maximum value of p+q+P+Q if model selection is not #' stepwise. #' @param max.d Maximum number of non-seasonal differences #' @param max.D Maximum number of seasonal differences #' @param start.p Starting value of p in stepwise procedure. #' @param start.q Starting value of q in stepwise procedure. #' @param start.P Starting value of P in stepwise procedure. #' @param start.Q Starting value of Q in stepwise procedure. #' @param stationary If \code{TRUE}, restricts search to stationary models. #' @param seasonal If \code{FALSE}, restricts search to non-seasonal models. #' @param ic Information criterion to be used in model selection. #' @param stepwise If \code{TRUE}, will do stepwise selection (faster). #' Otherwise, it searches over all models. Non-stepwise selection can be very #' slow, especially for seasonal models. #' @param nmodels Maximum number of models considered in the stepwise search. #' @param trace If \code{TRUE}, the list of ARIMA models considered will be #' reported. #' @param approximation If \code{TRUE}, estimation is via conditional sums of #' squares and the information criteria used for model selection are #' approximated. The final model is still computed using maximum likelihood #' estimation. Approximation should be used for long time series or a high #' seasonal period to avoid excessive computation times. #' @param truncate An integer value indicating how many observations to use in #' model selection. The last \code{truncate} values of the series are used to #' select a model when \code{truncate} is not \code{NULL} and #' \code{approximation=TRUE}. All observations are used if either #' \code{truncate=NULL} or \code{approximation=FALSE}. #' @param xreg Optionally, a numerical vector or matrix of external regressors, which #' must have the same number of rows as \code{y}. (It should not be a data frame.) #' @param test Type of unit root test to use. See \code{\link{ndiffs}} for #' details. #' @param test.args Additional arguments to be passed to the unit root test. #' @param seasonal.test This determines which method is used to select the number of seasonal differences. #' The default method is to use a measure of seasonal strength computed from an STL decomposition. #' Other possibilities involve seasonal unit root tests. #' @param seasonal.test.args Additional arguments to be passed to the seasonal #' unit root test. #' See \code{\link{nsdiffs}} for details. #' @param allowdrift If \code{TRUE}, models with drift terms are considered. #' @param allowmean If \code{TRUE}, models with a non-zero mean are considered. #' @param parallel If \code{TRUE} and \code{stepwise = FALSE}, then the #' specification search is done in parallel. This can give a significant #' speedup on multicore machines. #' @param num.cores Allows the user to specify the amount of parallel processes #' to be used if \code{parallel = TRUE} and \code{stepwise = FALSE}. If #' \code{NULL}, then the number of logical cores is automatically detected and #' all available cores are used. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Additional arguments to be passed to \code{\link[stats]{arima}}. #' @inheritParams forecast.ts #' #' @return Same as for \code{\link{Arima}} #' @author Rob J Hyndman #' @seealso \code{\link{Arima}} #' @references Hyndman, RJ and Khandakar, Y (2008) "Automatic time series #' forecasting: The forecast package for R", \emph{Journal of Statistical #' Software}, \bold{26}(3). #' #' Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering #' for time series data", \emph{Data Mining and Knowledge Discovery}, #' \bold{13}(3), 335-364. #' @keywords ts #' @examples #' fit <- auto.arima(WWWusage) #' plot(forecast(fit,h=20)) #' #' @export auto.arima <- function(y, d=NA, D=NA, max.p=5, max.q=5, max.P=2, max.Q=2, max.order=5, max.d=2, max.D=1, start.p=2, start.q=2, start.P=1, start.Q=1, stationary=FALSE, seasonal=TRUE, ic=c("aicc", "aic", "bic"), stepwise=TRUE, nmodels = 94, trace=FALSE, approximation=(length(x) > 150 | frequency(x) > 12), method = NULL, truncate=NULL, xreg=NULL, test=c("kpss", "adf", "pp"), test.args = list(), seasonal.test=c("seas", "ocsb", "hegy", "ch"), seasonal.test.args = list(), allowdrift=TRUE, allowmean=TRUE, lambda=NULL, biasadj=FALSE, parallel=FALSE, num.cores=2, x=y, ...) { # Only non-stepwise parallel implemented so far. if (stepwise && parallel) { warning("Parallel computer is only implemented when stepwise=FALSE, the model will be fit in serial.") parallel <- FALSE } if (trace && parallel) { message("Tracing model searching in parallel is not supported.") trace <- FALSE } series <- deparse(substitute(y)) x <- as.ts(x) if (NCOL(x) > 1) { stop("auto.arima can only handle univariate time series") } # Trim leading NAs and find length of non-missing data orig.x <- x missing <- is.na(x) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) serieslength <- sum(!missing[firstnonmiss:lastnonmiss]) # Trim initial missing values x <- subset(x, start=firstnonmiss) if(!is.null(xreg)) { if(!is.numeric(xreg)) stop("xreg should be a numeric matrix or a numeric vector") xreg <- as.matrix(xreg) xreg <- xreg[firstnonmiss:NROW(xreg),,drop=FALSE] } # Check for constant data if (is.constant(x)) { if(all(is.na(x))) stop("All data are missing") if (allowmean) { fit <- Arima(x, order = c(0, 0, 0), fixed = mean(x, na.rm = TRUE), ...) } else { fit <- Arima(x, order = c(0, 0, 0), include.mean = FALSE, ...) } fit$x <- orig.x fit$series <- series fit$call <- match.call() fit$call$x <- data.frame(x = x) fit$constant <- TRUE return(fit) } ic <- match.arg(ic) test <- match.arg(test) seasonal.test <- match.arg(seasonal.test) # Only consider non-seasonal models if (seasonal) { m <- frequency(x) } else { m <- 1 } if (m < 1) { # warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") m <- 1 } else { m <- round(m) } # Avoid non-integer seasonal periods max.p <- min(max.p, floor(serieslength / 3)) max.q <- min(max.q, floor(serieslength / 3)) max.P <- min(max.P, floor(serieslength / 3 / m)) max.Q <- min(max.Q, floor(serieslength / 3 / m)) # Use AIC if npar <= 3 # AICc won't work for tiny samples. if (serieslength <= 3L) { ic <- "aic" } # Transform data if requested if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") attr(lambda, "biasadj") <- biasadj } # Check xreg and do regression if necessary if (!is.null(xreg)) { if (is.null(colnames(xreg))) { colnames(xreg) <- if (ncol(xreg) == 1) "xreg" else paste("xreg", 1:ncol(xreg), sep = "") } xregg <- xreg xx <- x # Check that xreg is not rank deficient # First check if any columns are constant constant_columns <- apply(xregg, 2, is.constant) if (all(constant_columns)) { xregg <- NULL } else{ if (any(constant_columns)) { xregg <- xregg[, -which(constant_columns), drop = FALSE] } # Now check if it is rank deficient sv <- svd(na.omit(cbind(rep(1, NROW(xregg)), xregg)))$d if (min(sv) / sum(sv) < .Machine$double.eps) { stop("xreg is rank deficient") } # Finally find residuals from regression in order # to estimate appropriate level of differencing j <- !is.na(x) & !is.na(rowSums(xregg)) xx[j] <- residuals(lm(x ~ xregg)) } } else { xx <- x xregg <- NULL } # Choose order of differencing if (stationary) { d <- D <- 0 } if (m == 1) { D <- max.P <- max.Q <- 0 } else if(is.na(D) & length(xx) <= 2*m) { D <- 0 } else if(is.na(D)) { D <- do.call("nsdiffs", c(list(xx, test=seasonal.test, max.D=max.D), seasonal.test.args)) # Make sure xreg is not null after differencing if (D > 0 && !is.null(xregg)) { diffxreg <- diff(xregg, differences = D, lag = m) if (any(apply(diffxreg, 2, is.constant))) { D <- D - 1 } } # Make sure xx is not all missing after differencing if (D > 0) { dx <- diff(xx, differences = D, lag = m) if (all(is.na(dx))) D <- D - 1 } } if (D > 0) { dx <- diff(xx, differences = D, lag = m) } else { dx <- xx } if (!is.null(xregg)) { if (D > 0) { diffxreg <- diff(xregg, differences = D, lag = m) } else { diffxreg <- xregg } } if (is.na(d)) { d <- do.call("ndiffs", c(list(dx, test = test, max.d = max.d), test.args)) # Make sure xreg is not null after differencing if (d > 0 && !is.null(xregg)) { diffxreg <- diff(diffxreg, differences = d, lag = 1) if (any(apply(diffxreg, 2, is.constant))) { d <- d - 1 } } # Make sure dx is not all missing after differencing if (d > 0) { diffdx <- diff(dx, differences=d, lag=1) if(all(is.na(diffdx))) d <- d - 1 } } # Check number of differences selected if (D >= 2) { warning("Having more than one seasonal differences is not recommended. Please consider using only one seasonal difference.") } else if (D + d > 2) { warning("Having 3 or more differencing operations is not recommended. Please consider reducing the total number of differences.") } if (d > 0) { dx <- diff(dx, differences = d, lag = 1) } if(length(dx) == 0L) stop("Not enough data to proceed") else if (is.constant(dx)) { if (is.null(xreg)) { if (D > 0 && d == 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), include.constant = TRUE, fixed = mean(dx/m, na.rm = TRUE), method = method, ...) } else if (D > 0 && d > 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), method = method, ...) } else if (d == 2) { fit <- Arima(x, order = c(0, d, 0), method = method, ...) } else if (d < 2) { fit <- Arima(x, order = c(0, d, 0), include.constant = TRUE, fixed = mean(dx, na.rm = TRUE), method = method, ...) } else { stop("Data follow a simple polynomial and are not suitable for ARIMA modelling.") } } else # Perfect regression { if (D > 0) { fit <- Arima(x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), xreg = xreg, method = method, ...) } else { fit <- Arima(x, order = c(0, d, 0), xreg = xreg, method = method, ...) } } fit$x <- orig.x fit$series <- series fit$call <- match.call() fit$call$x <- data.frame(x = x) return(fit) } if (m > 1) { if (max.P > 0) { max.p <- min(max.p, m - 1) } if (max.Q > 0) { max.q <- min(max.q, m - 1) } } # Find constant offset for AIC calculation using white noise model if (approximation) { if (!is.null(truncate)) { tspx <- tsp(x) if (length(x) > truncate) { x <- ts(tail(x, truncate), end = tspx[2], frequency = tspx[3]) } } if (D == 0) { fit <- try(stats::arima(x, order = c(0, d, 0), xreg = xreg, ...), silent = TRUE) } else { fit <- try(stats::arima( x, order = c(0, d, 0), seasonal = list(order = c(0, D, 0), period = m), xreg = xreg, ... ), silent = TRUE) } if (!is.element("try-error", class(fit))) { offset <- -2 * fit$loglik - serieslength * log(fit$sigma2) } else # Not sure this should ever happen { # warning("Unable to calculate AIC offset") offset <- 0 } } else { offset <- 0 } allowdrift <- allowdrift & (d + D) == 1 allowmean <- allowmean & (d + D) == 0 constant <- allowdrift | allowmean if (approximation && trace) { cat("\n Fitting models using approximations to speed things up...\n") } if (!stepwise) { bestfit <- search.arima( x, d, D, max.p, max.q, max.P, max.Q, max.order, stationary, ic, trace, approximation, method = method, xreg = xreg, offset = offset, allowdrift = allowdrift, allowmean = allowmean, parallel = parallel, num.cores = num.cores, ... ) bestfit$call <- match.call() bestfit$call$x <- data.frame(x = x) bestfit$lambda <- lambda bestfit$x <- orig.x bestfit$series <- series bestfit$fitted <- fitted.Arima(bestfit) if (trace) { cat("\n\n Best model:", arima.string(bestfit, padding = TRUE), "\n\n") } return(bestfit) } # Starting model if (length(x) < 10L) { start.p <- min(start.p, 1L) start.q <- min(start.q, 1L) start.P <- 0L start.Q <- 0L } p <- start.p <- min(start.p, max.p) q <- start.q <- min(start.q, max.q) P <- start.P <- min(start.P, max.P) Q <- start.Q <- min(start.Q, max.Q) results <- matrix(NA, nrow = nmodels, ncol = 8) bestfit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[1, ] <- c(p, d, q, P, D, Q, constant, bestfit$ic) # Null model with possible constant fit <- myarima(x, order = c(0, d, 0), seasonal = c(0, D, 0), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[2, ] <- c(0, d, 0, 0, D, 0, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- q <- P <- Q <- 0 } k <- 2 # Basic AR model if (max.p > 0 || max.P > 0) { fit <- myarima(x, order = c(max.p > 0, d, 0), seasonal = c((m > 1) & (max.P > 0), D, 0), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(max.p > 0, d, 0, (m > 1) & (max.P > 0), D, 0, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (max.p > 0) P <- (m > 1) & (max.P > 0) q <- Q <- 0 } k <- k + 1 } # Basic MA model if (max.q > 0 || max.Q > 0) { fit <- myarima(x, order = c(0, d, max.q > 0), seasonal = c(0, D, (m > 1) & (max.Q > 0)), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(0, d, max.q > 0, 0, D, (m > 1) & (max.Q > 0), constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- P <- 0 Q <- (m > 1) & (max.Q > 0) q <- (max.q > 0) } k <- k + 1 } # Null model with no constant if (constant) { fit <- myarima(x, order = c(0, d, 0), seasonal = c(0, D, 0), constant = FALSE, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k+1, ] <- c(0, d, 0, 0, D, 0, 0, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- q <- P <- Q <- 0 } k <- k + 1 } startk <- 0 while (startk < k && k < nmodels) { startk <- k if (P > 0 && newmodel(p, d, q, P - 1, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit P <- (P - 1) next } } if (Q > 0 && newmodel(p, d, q, P, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) next } } if (P < max.P && newmodel(p, d, q, P + 1, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit P <- (P + 1) next } } if (Q < max.Q && newmodel(p, d, q, P, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) next } } if (Q > 0 && P > 0 && newmodel(p, d, q, P - 1, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) P <- (P - 1) next } } if (Q < max.Q && P > 0 && newmodel(p, d, q, P - 1, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P - 1, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P - 1, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) P <- (P - 1) next } } if (Q > 0 && P < max.P && newmodel(p, d, q, P + 1, D, Q - 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q - 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q - 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q - 1) P <- (P + 1) next } } if (Q < max.Q && P < max.P && newmodel(p, d, q, P + 1, D, Q + 1, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P + 1, D, Q + 1), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P + 1, D, Q + 1, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit Q <- (Q + 1) P <- (P + 1) next } } if (p > 0 && newmodel(p - 1, d, q, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (p - 1) next } } if (q > 0 && newmodel(p, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) next } } if (p < max.p && newmodel(p + 1, d, q, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit p <- (p + 1) next } } if (q < max.q && newmodel(p, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) next } } if (q > 0 && p > 0 && newmodel(p - 1, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) p <- (p - 1) next } } if (q < max.q && p > 0 && newmodel(p - 1, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p - 1, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p - 1, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) p <- (p - 1) next } } if (q > 0 && p < max.p && newmodel(p + 1, d, q - 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q - 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q - 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q - 1) p <- (p + 1) next } } if (q < max.q && p < max.p && newmodel(p + 1, d, q + 1, P, D, Q, constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p + 1, d, q + 1), seasonal = c(P, D, Q), constant = constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p + 1, d, q + 1, P, D, Q, constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit q <- (q + 1) p <- (p + 1) next } } if (allowdrift || allowmean) { if (newmodel(p, d, q, P, D, Q, !constant, results[1:k, ])) { k <- k + 1; if(k>nmodels) next fit <- myarima(x, order = c(p, d, q), seasonal = c(P, D, Q), constant = !constant, ic, trace, approximation, method = method, offset = offset, xreg = xreg, ...) results[k, ] <- c(p, d, q, P, D, Q, !constant, fit$ic) if (fit$ic < bestfit$ic) { bestfit <- fit constant <- !constant } } } } if(k > nmodels){ warning(sprintf("Stepwise search was stopped early due to reaching the model number limit: `nmodels = %i`", nmodels)) } # Refit using ML if approximation used for IC if (approximation && !is.null(bestfit$arma)) { if (trace) { cat("\n\n Now re-fitting the best model(s) without approximations...\n") } icorder <- order(results[, 8]) nmodels <- sum(!is.na(results[, 8])) for (i in seq(nmodels)) { k <- icorder[i] fit <- myarima( x, order = c(results[k, 1], d, results[k, 3]), seasonal = c(results[k, 4], D, results[k, 6]), constant = results[k, 7] == 1, ic, trace, approximation = FALSE, method = method, xreg = xreg, ... ) if (fit$ic < Inf) { bestfit <- fit break } } } # Nothing fitted if (bestfit$ic == Inf && !isTRUE(method=="CSS")) { if (trace) { cat("\n") } stop("No suitable ARIMA model found") } # Return best fit bestfit$x <- orig.x bestfit$series <- series bestfit$ic <- NULL bestfit$call <- match.call() bestfit$call$x <- data.frame(x = x) bestfit$lambda <- lambda bestfit$fitted <- fitted.Arima(bestfit) if (trace) { cat("\n\n Best model:", arima.string(bestfit, padding = TRUE), "\n\n") } return(bestfit) } # Calls arima from stats package and adds data to the returned object # Also allows refitting to new data # and drift terms to be included. myarima <- function(x, order = c(0, 0, 0), seasonal = c(0, 0, 0), constant=TRUE, ic="aic", trace=FALSE, approximation=FALSE, offset=0, xreg=NULL, method = NULL, ...) { # Length of non-missing interior missing <- is.na(x) firstnonmiss <- head(which(!missing),1) lastnonmiss <- tail(which(!missing),1) n <- sum(!missing[firstnonmiss:lastnonmiss]) m <- frequency(x) use.season <- (sum(seasonal) > 0) & m > 0 diffs <- order[2] + seasonal[2] if(is.null(method)){ if (approximation) { method <- "CSS" } else { method <- "CSS-ML" } } if (diffs == 1 && constant) { xreg <- `colnames<-`(cbind(drift = 1:length(x), xreg), make.unique(c("drift", if(is.null(colnames(xreg)) && !is.null(xreg)) rep("", NCOL(xreg)) else colnames(xreg)))) if (use.season) { suppressWarnings(fit <- try(stats::arima(x = x, order = order, seasonal = list(order = seasonal, period = m), xreg = xreg, method = method, ...), silent = TRUE)) } else { suppressWarnings(fit <- try(stats::arima(x = x, order = order, xreg = xreg, method = method, ...), silent = TRUE)) } } else { if (use.season) { suppressWarnings(fit <- try(stats::arima(x = x, order = order, seasonal = list(order = seasonal, period = m), include.mean = constant, method = method, xreg = xreg, ...), silent = TRUE)) } else { suppressWarnings(fit <- try(stats::arima(x = x, order = order, include.mean = constant, method = method, xreg = xreg, ...), silent = TRUE)) } } if (is.null(xreg)) { nxreg <- 0 } else { nxreg <- ncol(as.matrix(xreg)) } if (!is.element("try-error", class(fit))) { nstar <- n - order[2] - seasonal[2] * m if (diffs == 1 && constant) { # fitnames <- names(fit$coef) # fitnames[length(fitnames)-nxreg] <- "drift" # names(fit$coef) <- fitnames fit$xreg <- xreg } npar <- length(fit$coef[fit$mask]) + 1 if (method == "CSS") { fit$aic <- offset + nstar * log(fit$sigma2) + 2 * npar } if (!is.na(fit$aic)) { fit$bic <- fit$aic + npar * (log(nstar) - 2) fit$aicc <- fit$aic + 2 * npar * (npar + 1) / (nstar - npar - 1) fit$ic <- switch(ic, bic = fit$bic, aic = fit$aic, aicc = fit$aicc) } else { fit$aic <- fit$bic <- fit$aicc <- fit$ic <- Inf } # Adjust residual variance to be unbiased fit$sigma2 <- sum(fit$residuals ^ 2, na.rm = TRUE) / (nstar - npar + 1) # Check for unit roots minroot <- 2 if (order[1] + seasonal[1] > 0) { testvec <- fit$model$phi k <- abs(testvec) > 1e-8 if (sum(k) > 0) { last.nonzero <- max(which(k)) } else { last.nonzero <- 0 } if (last.nonzero > 0) { testvec <- testvec[1:last.nonzero] proots <- try(polyroot(c(1,-testvec))) if (!is.element("try-error", class(proots))) { minroot <- min(minroot, abs(proots)) } else fit$ic <- Inf } } if (order[3] + seasonal[3] > 0 & fit$ic < Inf) { testvec <- fit$model$theta k <- abs(testvec) > 1e-8 if (sum(k) > 0) { last.nonzero <- max(which(k)) } else { last.nonzero <- 0 } if (last.nonzero > 0) { testvec <- testvec[1:last.nonzero] proots <- try(polyroot(c(1,testvec))) if (!is.element("try-error", class(proots))) { minroot <- min(minroot, abs(proots)) } else fit$ic <- Inf } } # Avoid bad models if (minroot < 1 + 1e-2 | checkarima(fit)) { fit$ic <- Inf } fit$xreg <- xreg if (trace) { cat("\n", arima.string(fit, padding = TRUE), ":", fit$ic) } return(structure(fit, class = c("forecast_ARIMA", "ARIMA", "Arima"))) } else { # Catch errors due to unused arguments if (length(grep("unused argument", fit)) > 0L) { stop(fit[1]) } if (trace) { cat("\n ARIMA(", order[1], ",", order[2], ",", order[3], ")", sep = "") if (use.season) { cat("(", seasonal[1], ",", seasonal[2], ",", seasonal[3], ")[", m, "]", sep = "") } if (constant && (order[2] + seasonal[2] == 0)) { cat(" with non-zero mean") } else if (constant && (order[2] + seasonal[2] == 1)) { cat(" with drift ") } else if (!constant && (order[2] + seasonal[2] == 0)) { cat(" with zero mean ") } else { cat(" ") } cat(" :", Inf) } return(list(ic = Inf)) } } newmodel <- function(p, d, q, P, D, Q, constant, results) { n <- nrow(results) for (i in 1:n) { if(!all(is.na(results[i, seq(7)]))) { if (all(c(p, d, q, P, D, Q, constant) == results[i, 1:7])) { return(FALSE) } } } return(TRUE) } arima.string <- function(object, padding=FALSE) { order <- object$arma[c(1, 6, 2, 3, 7, 4, 5)] m <- order[7] result <- paste("ARIMA(", order[1], ",", order[2], ",", order[3], ")", sep = "") if (m > 1 && sum(order[4:6]) > 0) { result <- paste(result, "(", order[4], ",", order[5], ",", order[6], ")[", m, "]", sep = "") } if (padding && m > 1 && sum(order[4:6]) == 0) { result <- paste(result, " ", sep = "") if (m <= 9) { result <- paste(result, " ", sep = "") } else if (m <= 99) { result <- paste(result, " ", sep = "") } else { result <- paste(result, " ", sep = "") } } if (!is.null(object$xreg)) { if (NCOL(object$xreg) == 1 && is.element("drift", names(object$coef))) { result <- paste(result, "with drift ") } else { result <- paste("Regression with", result, "errors") } } else { if (is.element("constant", names(object$coef)) || is.element("intercept", names(object$coef))) { result <- paste(result, "with non-zero mean") } else if (order[2] == 0 && order[5] == 0) { result <- paste(result, "with zero mean ") } else { result <- paste(result, " ") } } if (!padding) { # Strip trailing spaces result <- gsub("[ ]*$", "", result) } return(result) } #' @export summary.Arima <- function(object, ...) { class(object) <- c("summary.Arima", class(object)) object } #' @export print.summary.Arima <- function(x, ...) { NextMethod() cat("\nTraining set error measures:\n") print(accuracy(x)) } # Check that Arima object has positive coefficient variances without returning warnings checkarima <- function(object) { suppressWarnings(test <- any(is.nan(sqrt(diag(object$var.coef))))) return(test) } #' Is an object constant? #' #' Returns true if the object's numerical values do not vary. #' #' #' @param x object to be tested #' @export is.constant <- function(x) { x <- as.numeric(x) y <- rep(x[1], length(x)) return(isTRUE(all.equal(x, y))) } forecast/R/bootstrap.R0000644000176200001440000000720114323125536014425 0ustar liggesusers# Bootstrap functions # Trend estimation like STL without seasonality. # Non-robust version tl <- function(x, ...) { x <- as.ts(x) tspx <- tsp(x) n <- length(x) tt <- 1:n fit <- supsmu(tt, x) out <- ts(cbind(trend = fit$y, remainder = x - fit$y)) tsp(out) <- tsp(x) out <- structure(list(time.series = out), class = "stl") return(out) } # Function to return some bootstrap samples of x # based on LPB lpb <- function(x, nsim=100) { n <- length(x) meanx <- mean(x) y <- x - meanx gamma <- wacf(y, lag.max = n)$acf[, , 1] s <- length(gamma) Gamma <- matrix(1, s, s) d <- row(Gamma) - col(Gamma) for (i in 1:(s - 1)) Gamma[d == i | d == (-i)] <- gamma[i + 1] L <- t(chol(Gamma)) W <- solve(L) %*% matrix(y, ncol = 1) out <- ts(L %*% matrix(sample(W, n * nsim, replace = TRUE), nrow = n, ncol = nsim) + meanx) tsp(out) <- tsp(x) return(out) } # Bootstrapping time series (based on Bergmeir et al., 2016, IJF paper) # Author: Fotios Petropoulos MBB <- function(x, window_size) { bx <- array(0, (floor(length(x) / window_size) + 2) * window_size) for (i in 1:(floor(length(x) / window_size) + 2)) { c <- sample(1:(length(x) - window_size + 1), 1) bx[((i - 1) * window_size + 1):(i * window_size)] <- x[c:(c + window_size - 1)] } start_from <- sample(0:(window_size - 1), 1) + 1 bx[start_from:(start_from + length(x) - 1)] } #' Box-Cox and Loess-based decomposition bootstrap. #' #' Generates bootstrapped versions of a time series using the Box-Cox and #' Loess-based decomposition bootstrap. #' #' The procedure is described in Bergmeir et al. Box-Cox decomposition is #' applied, together with STL or Loess (for non-seasonal time series), and the #' remainder is bootstrapped using a moving block bootstrap. #' #' @param x Original time series. #' @param num Number of bootstrapped versions to generate. #' @param block_size Block size for the moving block bootstrap. #' @return A list with bootstrapped versions of the series. The first series in #' the list is the original series. #' @author Christoph Bergmeir, Fotios Petropoulos #' @seealso \code{\link{baggedETS}}. #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' bootstrapped_series <- bld.mbb.bootstrap(WWWusage, 100) #' #' @export bld.mbb.bootstrap <- function(x, num, block_size=NULL) { if(length(x) <= 1L) return(rep(list(x), num)) freq <- frequency(x) if(length(x) <= 2*freq) freq <- 1L if (is.null(block_size)) { block_size <- ifelse(freq > 1, 2 * freq, min(8, floor(length(x) / 2))) } xs <- list() xs[[1]] <- x # the first series is the original one if (num > 1) { # Box-Cox transformation if (min(x) > 1e-6) { lambda <- BoxCox.lambda(x, lower = 0, upper = 1) } else { lambda <- 1 } x.bc <- BoxCox(x, lambda) lambda <- attr(x.bc, "lambda") if (freq > 1) { # STL decomposition x.stl <- stl(ts(x.bc, frequency = freq), "per")$time.series seasonal <- x.stl[, 1] trend <- x.stl[, 2] remainder <- x.stl[, 3] } else { # Loess trend <- 1:length(x) suppressWarnings( x.loess <- loess(ts(x.bc, frequency = 1) ~ trend, span = 6 / length(x), degree = 1) ) seasonal <- rep(0, length(x)) trend <- x.loess$fitted remainder <- x.loess$residuals } } # Bootstrap some series, using MBB for (i in 2:num) { xs[[i]] <- ts(InvBoxCox(trend + seasonal + MBB(remainder, block_size), lambda)) tsp(xs[[i]]) <- tsp(x) } xs } forecast/R/armaroots.R0000644000176200001440000001112214633662406014422 0ustar liggesusers# Functions to plot the roots of an ARIMA model # Compute AR roots arroots <- function(object) { if (!any(is.element(class(object), c("Arima", "ar")))) { stop("object must be of class Arima or ar") } if (is.element("Arima", class(object))) { parvec <- object$model$phi } else { parvec <- object$ar } if (length(parvec) > 0) { last.nonzero <- max(which(abs(parvec) > 1e-08)) if (last.nonzero > 0) { return(structure(list( roots = polyroot(c(1, -parvec[1:last.nonzero])), type = "AR" ), class = "armaroots")) } } return(structure(list(roots = numeric(0), type = "AR"), class = "armaroots")) } # Compute MA roots maroots <- function(object) { if (!is.element("Arima", class(object))) { stop("object must be of class Arima") } parvec <- object$model$theta if (length(parvec) > 0) { last.nonzero <- max(which(abs(parvec) > 1e-08)) if (last.nonzero > 0) { return(structure(list( roots = polyroot(c(1, parvec[1:last.nonzero])), type = "MA" ), class = "armaroots")) } } return(structure(list(roots = numeric(0), type = "MA"), class = "armaroots")) } #' @export plot.armaroots <- function(x, xlab, ylab, main, ...) { if (missing(main)) { main <- paste("Inverse", x$type, "roots") } oldpar <- par(pty = "s") on.exit(par(oldpar)) plot( c(-1, 1), c(-1, 1), xlab = xlab, ylab = ylab, type = "n", bty = "n", xaxt = "n", yaxt = "n", main = main, ... ) axis(1, at = c(-1, 0, 1), line = 0.5, tck = -0.025) axis(2, at = c(-1, 0, 1), labels = c("-i", "0", "i"), line = 0.5, tck = -0.025) circx <- seq(-1, 1, length.out = 501) circy <- sqrt(1 - circx^2) lines(c(circx, circx), c(circy, -circy), col = "gray") lines(c(-2, 2), c(0, 0), col = "gray") lines(c(0, 0), c(-2, 2), col = "gray") if (length(x$roots) > 0) { inside <- abs(x$roots) > 1 points(1 / x$roots[inside], pch = 19, col = "black") if (sum(!inside) > 0) { points(1 / x$roots[!inside], pch = 19, col = "red") } } } #' Plot characteristic roots from ARIMA model #' #' Produces a plot of the inverse AR and MA roots of an ARIMA model. Inverse #' roots outside the unit circle are shown in red. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Object of class \dQuote{Arima} or \dQuote{ar}. #' @param object Object of class \dQuote{Arima} or \dQuote{ar}. Used for ggplot #' graphics (S3 method consistency). #' @param type Determines if both AR and MA roots are plotted, of if just one #' set is plotted. #' @param main Main title. Default is "Inverse AR roots" or "Inverse MA roots". #' @param xlab X-axis label. #' @param ylab Y-axis label. #' @param ... Other plotting parameters passed to \code{\link[graphics]{par}}. #' @return None. Function produces a plot #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{Arima}}, \code{\link[stats]{ar}} #' @keywords hplot #' @examples #' #' library(ggplot2) #' #' fit <- Arima(WWWusage, order = c(3, 1, 0)) #' plot(fit) #' autoplot(fit) #' #' fit <- Arima(woolyrnq, order = c(2, 0, 0), seasonal = c(2, 1, 1)) #' plot(fit) #' autoplot(fit) #' #' plot(ar.ols(gold[1:61])) #' autoplot(ar.ols(gold[1:61])) #' @export plot.Arima <- function(x, type = c("both", "ar", "ma"), main, xlab = "Real", ylab = "Imaginary", ...) { type <- match.arg(type) if (!is.element("Arima", class(x))) { stop("This function is for objects of class 'Arima'.") } q <- p <- 0 # AR component if (length(x$model$phi) > 0) { test <- abs(x$model$phi) > 1e-09 if (any(test)) { p <- max(which(test)) } } # MA component if (length(x$model$theta) > 0) { test <- abs(x$model$theta) > 1e-09 if (any(test)) { q <- max(which(test)) } } # Check for MA parts if (type == "both") { if (p == 0) { type <- "ma" } else if (q == 0) { type <- "ar" } } if ((type == "ar" && (p == 0)) || (type == "ma" && (q == 0)) || (p == 0 && q == 0)) { warning("No roots to plot") if (missing(main)) { main <- "No AR or MA roots" } } if (type == "both") { oldpar <- par(mfrow = c(1, 2)) on.exit(par(oldpar)) } if (type != "ma") { plot(arroots(x), main = main, xlab = xlab, ylab = ylab, ...) } if (type != "ar") { plot(maroots(x), main = main, xlab = xlab, ylab = ylab, ...) } } #' @rdname plot.Arima #' @export plot.ar <- function(x, main, xlab = "Real", ylab = "Imaginary", ...) { if (!is.element("ar", class(x))) { stop("This function is for objects of class 'ar'.") } plot(arroots(x), main = main, xlab = xlab, ylab = ylab, ...) } forecast/R/ggplot.R0000644000176200001440000024153114634700303013706 0ustar liggesusersglobalVariables(".data") #' @inherit ggplot2::autolayer #' @export autolayer <- function(object, ...){ UseMethod("autolayer") } #' @importFrom ggplot2 autoplot #' @export ggplot2::autoplot ggAddExtras <- function(xlab=NA, ylab=NA, main=NA) { dots <- eval.parent(quote(list(...))) extras <- list() if ("xlab" %in% names(dots) || is.null(xlab) || any(!is.na(xlab))) { if ("xlab" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::xlab(dots$xlab) } else { extras[[length(extras) + 1]] <- ggplot2::xlab(paste0(xlab[!is.na(xlab)], collapse = " ")) } } if ("ylab" %in% names(dots) || is.null(ylab) || any(!is.na(ylab))) { if ("ylab" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ylab(dots$ylab) } else { extras[[length(extras) + 1]] <- ggplot2::ylab(paste0(ylab[!is.na(ylab)], collapse = " ")) } } if ("main" %in% names(dots) || is.null(main) || any(!is.na(main))) { if ("main" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ggtitle(dots$main) } else { extras[[length(extras) + 1]] <- ggplot2::ggtitle(paste0(main[!is.na(main)], collapse = " ")) } } if ("xlim" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::xlim(dots$xlim) } if ("ylim" %in% names(dots)) { extras[[length(extras) + 1]] <- ggplot2::ylim(dots$ylim) } return(extras) } ggtsbreaks <- function(x) { # Make x axis contain only whole numbers (e.g., years) return(unique(round(pretty(floor(x[1]):ceiling(x[2]))))) } #' ggplot (Partial) Autocorrelation and Cross-Correlation Function Estimation #' and Plotting #' #' Produces a ggplot object of their equivalent Acf, Pacf, Ccf, taperedacf and #' taperedpacf functions. #' #' If \code{autoplot} is given an \code{acf} or \code{mpacf} object, then an #' appropriate ggplot object will be created. #' #' ggtaperedpacf #' @param object Object of class \dQuote{\code{acf}}. #' @param x a univariate or multivariate (not Ccf) numeric time series object #' or a numeric vector or matrix. #' @param y a univariate numeric time series object or a numeric vector. #' @param ci coverage probability for confidence interval. Plotting of the #' confidence interval is suppressed if ci is zero or negative. #' @param lag.max maximum lag at which to calculate the acf. #' @param type character string giving the type of acf to be computed. Allowed #' values are "\code{correlation}" (the default), \dQuote{\code{covariance}} or #' \dQuote{\code{partial}}. #' @param plot logical. If \code{TRUE} (the default) the resulting ACF, PACF or #' CCF is plotted. #' @param na.action function to handle missing values. Default is #' \code{\link[stats]{na.contiguous}}. Useful alternatives are #' \code{\link[stats]{na.pass}} and \code{\link{na.interp}}. #' @param demean Should covariances be about the sample means? #' @param calc.ci If \code{TRUE}, confidence intervals for the ACF/PACF #' estimates are calculated. #' @param level Percentage level used for the confidence intervals. #' @param nsim The number of bootstrap samples used in estimating the #' confidence intervals. #' @param ... Other plotting parameters to affect the plot. #' @return A ggplot object. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.acf}}, \code{\link{Acf}}, #' \code{\link[stats]{acf}}, \code{\link{taperedacf}} #' @examples #' #' library(ggplot2) #' ggAcf(wineind) #' wineind %>% Acf(plot=FALSE) %>% autoplot #' \dontrun{ #' wineind %>% taperedacf(plot=FALSE) %>% autoplot #' ggtaperedacf(wineind) #' ggtaperedpacf(wineind)} #' ggCcf(mdeaths, fdeaths) #' #' @export autoplot.acf <- function(object, ci=0.95, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "acf")) { stop("autoplot.acf requires a acf object, use object=object") } acf <- `dimnames<-`(object$acf, list(NULL, object$snames, object$snames)) lag <- `dimnames<-`(object$lag, list(NULL, object$snames, object$snames)) data <- as.data.frame.table(acf)[-1] data$lag <- as.numeric(lag) if (object$type == "correlation" & is.null(object$ccf)) { data <- data[data$lag != 0, ] } # Initialise ggplot object p <- ggplot2::ggplot( ggplot2::aes(x = .data[["lag"]], xend = .data[["lag"]], y = 0, yend = .data[["Freq"]]), data = data ) p <- p + ggplot2::geom_hline(yintercept = 0) # Add data p <- p + ggplot2::geom_segment(lineend = "butt", ...) # Add ci lines (assuming white noise input) ci <- qnorm((1 + ci) / 2) / sqrt(object$n.used) p <- p + ggplot2::geom_hline(yintercept = c(-ci, ci), colour = "blue", linetype = "dashed") # Add facets if needed if(any(dim(object$acf)[2:3] != c(1,1))){ p <- p + ggplot2::facet_grid( as.formula(paste0(colnames(data)[1:2], collapse = "~")) ) } # Prepare graph labels if (!is.null(object$ccf)) { ylab <- "CCF" ticktype <- "ccf" main <- paste("Series:", object$snames) nlags <- round(dim(object$lag)[1] / 2) } else if (object$type == "partial") { ylab <- "PACF" ticktype <- "acf" main <- paste("Series:", object$series) nlags <- dim(object$lag)[1] } else if (object$type == "correlation") { ylab <- "ACF" ticktype <- "acf" main <- paste("Series:", object$series) nlags <- dim(object$lag)[1] } else { ylab <- NULL } # Add seasonal x-axis # Change ticks to be seasonal and prepare default title if (!is.null(object$tsp)) { freq <- object$tsp[3] } else { freq <- 1 } if (!is.null(object$periods)) { periods <- object$periods periods <- periods[periods != freq] minorbreaks <- periods * seq(-20:20) } else { minorbreaks <- NULL } p <- p + ggplot2::scale_x_continuous(breaks = seasonalaxis( freq, nlags, type = ticktype, plot = FALSE ), minor_breaks = minorbreaks) p <- p + ggAddExtras(ylab = ylab, xlab = "Lag", main = main) return(p) } } #' @rdname autoplot.acf #' @export ggAcf <- function(x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf(x, lag.max = lag.max, type = type, na.action = na.action, demean = demean, plot = FALSE) object$tsp <- tsp(x) object$periods <- attributes(x)$msts object$series <- deparse(substitute(x)) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggPacf <- function(x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean=TRUE, ...) { object <- Acf(x, lag.max = lag.max, type = "partial", na.action = na.action, demean = demean, plot = FALSE) object$tsp <- tsp(x) object$periods <- attributes(x)$msts object$series <- deparse(substitute(x)) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggCcf <- function(x, y, lag.max=NULL, type=c("correlation", "covariance"), plot=TRUE, na.action=na.contiguous, ...) { object <- Ccf(x, y, lag.max = lag.max, type = type, na.action = na.action, plot = FALSE) object$snames <- paste(deparse(substitute(x)), "&", deparse(substitute(y))) object$ccf <- TRUE if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export autoplot.mpacf <- function(object, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "mpacf")) { stop("autoplot.mpacf requires a mpacf object, use object=object") } if (!is.null(object$lower)) { data <- data.frame(Lag = 1:object$lag, z = object$z, sig = (object$lower < 0 & object$upper > 0)) cidata <- data.frame(Lag = rep(1:object$lag, each = 2) + c(-0.5, 0.5), z = rep(object$z, each = 2), upper = rep(object$upper, each = 2), lower = rep(object$lower, each = 2)) plotpi <- TRUE } else { data <- data.frame(Lag = 1:object$lag, z = object$z) plotpi <- FALSE } # Initialise ggplot object p <- ggplot2::ggplot() p <- p + ggplot2::geom_hline(ggplot2::aes(yintercept = 0), linewidth = 0.2) # Add data if (plotpi) { p <- p + ggplot2::geom_ribbon(ggplot2::aes(x = .data[["Lag"]], ymin = .data[["lower"]], ymax = .data[["upper"]]), data = cidata, fill = "grey50") } p <- p + ggplot2::geom_line(ggplot2::aes(x = .data[["Lag"]], y = .data[["z"]]), data = data) if (plotpi) { p <- p + ggplot2::geom_point(ggplot2::aes(x = .data[["Lag"]], y = .data[["z"]], colour = .data[["sig"]]), data = data) } # Change ticks to be seasonal freq <- frequency(object$x) msts <- is.element("msts", class(object$x)) # Add seasonal x-axis if (msts) { periods <- attributes(object$x)$msts periods <- periods[periods != freq] minorbreaks <- periods * seq(-20:20) } else { minorbreaks <- NULL } p <- p + ggplot2::scale_x_continuous( breaks = seasonalaxis(frequency(object$x), length(data$Lag), type = "acf", plot = FALSE), minor_breaks = minorbreaks ) if (object$type == "partial") { ylab <- "PACF" } else if (object$type == "correlation") { ylab <- "ACF" } p <- p + ggAddExtras(ylab = ylab) return(p) } } #' @rdname autoplot.acf #' @export ggtaperedacf <- function(x, lag.max=NULL, type=c("correlation", "partial"), plot=TRUE, calc.ci=TRUE, level=95, nsim=100, ...) { cl <- match.call() if (plot) { cl$plot <- FALSE } cl[[1]] <- quote(taperedacf) object <- eval.parent(cl) if (plot) { return(autoplot(object, ...)) } else { return(object) } } #' @rdname autoplot.acf #' @export ggtaperedpacf <- function(x, ...) { ggtaperedacf(x, type = "partial", ...) } #' @rdname plot.Arima #' @export autoplot.Arima <- function(object, type = c("both", "ar", "ma"), ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (is.Arima(object)) { # Detect type type <- match.arg(type) q <- p <- 0 if (length(object$model$phi) > 0) { test <- abs(object$model$phi) > 1e-09 if (any(test)) { p <- max(which(test)) } } if (length(object$model$theta) > 0) { test <- abs(object$model$theta) > 1e-09 if (any(test)) { q <- max(which(test)) } } if (type == "both") { type <- c("ar", "ma") } } else if (inherits(object, "ar")) { type <- "ar" p <- length(arroots(object)$roots) q <- 0 } else { stop("autoplot.Arima requires an Arima object") } # Remove NULL type type <- intersect(type, c("ar", "ma")[c(p > 0, q > 0)]) # Prepare data arData <- maData <- NULL allRoots <- data.frame(roots = numeric(0), type = character(0)) if ("ar" %in% type && p > 0) { arData <- arroots(object) allRoots <- rbind(allRoots, data.frame(roots = arData$roots, type = arData$type)) } if ("ma" %in% type && q > 0) { maData <- maroots(object) allRoots <- rbind(allRoots, data.frame(roots = maData$roots, type = maData$type)) } allRoots$Real <- Re(1 / allRoots$roots) allRoots$Imaginary <- Im(1 / allRoots$roots) allRoots$UnitCircle <- factor(ifelse((abs(allRoots$roots) > 1), "Within", "Outside")) # Initialise general ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["Real"]], y = .data[["Imaginary"]], colour = .data[["UnitCircle"]]), data = allRoots) p <- p + ggplot2::coord_fixed(ratio = 1) p <- p + ggplot2::annotate( "path", x = cos(seq(0, 2 * pi, length.out = 100)), y = sin(seq(0, 2 * pi, length.out = 100)) ) p <- p + ggplot2::geom_vline(xintercept = 0) p <- p + ggplot2::geom_hline(yintercept = 0) p <- p + ggAddExtras(xlab = "Real", ylab = "Imaginary") if (NROW(allRoots) == 0) { return(p + ggAddExtras(main = "No AR or MA roots")) } p <- p + ggplot2::geom_point(size = 3) if (length(type) == 1) { p <- p + ggAddExtras(main = paste("Inverse", toupper(type), "roots")) } else { p <- p + ggplot2::facet_wrap(~ type, labeller = function(labels) lapply(labels, function(x) paste("Inverse", as.character(x), "roots"))) } } return(p) } #' @rdname plot.Arima #' @export autoplot.ar <- function(object, ...) { autoplot.Arima(object, ...) } #' @rdname autoplot.seas #' @export autoplot.decomposed.ts <- function(object, labels=NULL, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "decomposed.ts")) { stop("autoplot.decomposed.ts requires a decomposed.ts object") } if (is.null(labels)) { labels <- c("trend", "seasonal", "remainder") } cn <- c("data", labels) data <- data.frame( datetime = rep(time(object$x), 4), y = c(object$x, object$trend, object$seasonal, object$random), parts = factor(rep(cn, each = NROW(object$x)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = data) # Add data int <- as.numeric(object$type == "multiplicative") p <- p + ggplot2::geom_line(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = subset(data, data$parts != cn[4]), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes(x = .data[["datetime"]], xend = .data[["datetime"]], y = int, yend = .data[["y"]]), data = subset(data, data$parts == cn[4]), lineend = "butt", na.rm = TRUE ) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes(yintercept = .data[["y"]]), data = data.frame(y = int, parts = factor(cn[4], levels = cn))) if (is.null(range.bars)) { range.bars <- object$type == "additive" } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect( ggplot2::aes( xmin = .data[["left"]], xmax = .data[["right"]], ymax = .data[["top"]], ymin = .data[["bottom"]] ), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras( main = paste("Decomposition of", object$type, "time series"), xlab = "Time", ylab = "" ) # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } } #' @rdname plot.ets #' @export autoplot.ets <- function(object, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.ets(object)) { stop("autoplot.ets requires an ets object, use object=object") } names <- c(y = "observed", l = "level", b = "slope", s1 = "season") data <- cbind(object$x, object$states[, colnames(object$states) %in% names(names)]) cn <- c("y", c(colnames(object$states))) colnames(data) <- cn <- names[stats::na.exclude(match(cn, names(names)))] # Convert to longform data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = data, ylab = "") # Add data p <- p + ggplot2::geom_line(na.rm = TRUE) p <- p + ggplot2::facet_grid(parts ~ ., scales = "free_y", switch = "y") if (is.null(range.bars)) { range.bars <- is.null(object$lambda) } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect( ggplot2::aes( xmin = .data[["left"]], xmax = .data[["right"]], ymax = .data[["top"]], ymin = .data[["bottom"]] ), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } p <- p + ggAddExtras(xlab = NULL, ylab = "", main = paste("Components of", object$method, "method")) return(p) } } #' @rdname plot.bats #' @export autoplot.tbats <- function(object, range.bars = FALSE, ...) { cl <- match.call() cl[[1]] <- quote(autoplot.bats) eval.parent(cl) } #' @rdname plot.bats #' @export autoplot.bats <- function(object, range.bars = FALSE, ...) { data <- tbats.components(object) cn <- colnames(data) # Convert to longform data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = data, ylab = "") # Add data p <- p + ggplot2::geom_line(na.rm = TRUE) p <- p + ggplot2::facet_grid(parts ~ ., scales = "free_y", switch = "y") if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect( ggplot2::aes( xmin = .data[["left"]], xmax = .data[["right"]], ymax = .data[["top"]], ymin = .data[["bottom"]] ), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } p <- p + ggAddExtras(xlab = NULL, ylab = "", main = paste("Components of", object$method, "method")) return(p) } #' @rdname plot.forecast #' @export autoplot.forecast <- function(object, include, PI=TRUE, shadecols=c("#596DD5", "#D5DBFF"), fcol="#0000AA", flwd=0.5, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.forecast(object)) { stop("autoplot.forecast requires a forecast object, use object=object") } if (is.null(object$lower) || is.null(object$upper) || is.null(object$level)) { PI <- FALSE } else if (!is.finite(max(object$upper))) { PI <- FALSE } if (!is.null(object$model$terms) && !is.null(object$model$model)) { # Initialise original dataset mt <- object$model$terms if (!is.null(object$series)) { yvar <- object$series } else { yvar <- deparse(mt[[2]]) } # Perhaps a better way to do this xvar <- attr(mt, "term.labels") vars <- c(yvar = yvar, xvar = xvar) data <- object$model$model colnames(data) <- names(vars)[match(colnames(data), vars)] if (!is.null(object$model$lambda)) { data$yvar <- InvBoxCox(data$yvar, object$model$lambda) } } else { if (!is.null(object$x)) { data <- data.frame(yvar = c(object$x)) } else if (!is.null(object$residuals) && !is.null(object$fitted)) { data <- data.frame(yvar = c(object$residuals + object$fitted)) } else { stop("Could not find data") } if (!is.null(object$series)) { vars <- c(yvar = object$series) } else if (!is.null(object$model$call)) { vars <- c(yvar = deparse(object$model$call$y)) if (vars == "object") { vars <- c(yvar = "y") } } else { vars <- c(yvar = "y") } } # Initialise ggplot object p <- ggplot2::ggplot() # Cross sectional forecasts if (!is.element("ts", class(object$mean))) { if (length(xvar) > 1) { stop("Forecast plot for regression models only available for a single predictor") } if (NCOL(object$newdata) == 1) { # Make sure column has correct name colnames(object$newdata) <- xvar } flwd <- 2 * flwd # Scale for points # Data points p <- p + ggplot2::geom_point(ggplot2::aes(x = .data[["xvar"]], y = .data[["yvar"]]), data = data) p <- p + ggplot2::labs(y = vars["yvar"], x = vars["xvar"]) # Forecasted intervals if (PI) { levels <- NROW(object$level) interval <- data.frame(xpred = rep(object$newdata[[1]], levels), lower = c(object$lower), upper = c(object$upper), level = rep(object$level, each = NROW(object$newdata[[1]]))) interval <- interval[order(interval$level, decreasing = TRUE), ] # Must be ordered for gg z-index p <- p + ggplot2::geom_linerange(ggplot2::aes(x = .data[["xpred"]], ymin = .data[["lower"]], ymax = .data[["upper"]], colour = .data[["level"]]), data = interval, linewidth = flwd) if (length(object$level) <= 5) { p <- p + ggplot2::scale_colour_gradientn(breaks = object$level, colours = shadecols, guide = "legend") } else { p <- p + ggplot2::scale_colour_gradientn(colours = shadecols, guide = "colourbar") } } # Forecasted points predicted <- data.frame(object$newdata, object$mean) colnames(predicted) <- c("xpred", "ypred") p <- p + ggplot2::geom_point(ggplot2::aes(x = .data[["xpred"]], y = .data[["ypred"]]), data = predicted, color = fcol, size = flwd) # Line of best fit coef <- data.frame(int = 0, m = 0) i <- match("(Intercept)", names(object$model$coefficients)) if (i != 0) { coef$int <- object$model$coefficients[i] if (NROW(object$model$coefficients) == 2) { coef$m <- object$model$coefficients[-i] } } else { if (NROW(object$model$coefficients) == 1) { coef$m <- object$model$coefficients } } p <- p + ggplot2::geom_abline(intercept = coef$int, slope = coef$m) } else { # Time series objects (assumed) if(!missing(shadecols)){ warning( "The `schadecols` argument is deprecated for time series forecasts. Interval shading is now done automatically based on the level and `fcol`.", call. = FALSE) } # Data points if (!is.null(time(object$x))) { timex <- time(object$x) } else if (!is.null(time(object$model$residuals))) { timex <- time(object$model$residuals) } data <- data.frame(yvar = as.numeric(data$yvar), datetime = as.numeric(timex)) if (!missing(include)) { data <- tail(data, include) } p <- p + ggplot2::scale_x_continuous() p <- p + ggplot2::geom_line(ggplot2::aes(x = .data[["datetime"]], y = .data[["yvar"]]), data = data) + ggplot2::labs(y = vars["yvar"], x = "Time") # Forecasted intervals p <- p + autolayer(object, PI = PI, colour = fcol, size = flwd) # predicted <- data.frame(xvar = time(object$mean), yvar = object$mean) # colnames(predicted) <- c("datetime", "ypred") # if (PI) { # levels <- NROW(object$level) # interval <- data.frame(datetime = rep(predicted$datetime, levels), lower = c(object$lower), upper = c(object$upper), level = rep(object$level, each = NROW(object$mean))) # interval <- interval[order(interval$level, decreasing = TRUE), ] # Must be ordered for gg z-index # p <- p + ggplot2::geom_ribbon(ggplot2::aes_(x = ~datetime, ymin = ~lower, ymax = ~upper, group = ~-level, fill = ~level), data = interval) # if (min(object$level) < 50) { # scalelimit <- c(1, 99) # } # else { # scalelimit <- c(50, 99) # } # if (length(object$level) <= 5) { # p <- p + ggplot2::scale_fill_gradientn(breaks = object$level, colours = shadecols, limit = scalelimit, guide = "legend") # } # else { # p <- p + ggplot2::scale_fill_gradientn(colours = shadecols, limit = scalelimit) # } # # Negative group is a work around for missing z-index # } # # Forecasted points # p <- p + ggplot2::geom_line(ggplot2::aes_(x = ~datetime, y = ~ypred), data = predicted, color = fcol, size = flwd) } p <- p + ggAddExtras(main = paste("Forecasts from ", object$method, sep = "")) return(p) } } #' @rdname plot.mforecast #' @export autoplot.mforecast <- function(object, PI = TRUE, facets = TRUE, colour = FALSE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.mforecast(object)) { stop("autoplot.mforecast requires a mforecast object, use object=object") } if (is.ts(object$forecast[[1]]$mean)) { # ts forecasts p <- autoplot(getResponse(object), facets = facets, colour = colour) + autolayer(object, ...) if (facets) { p <- p + ggplot2::facet_wrap( ~ series, labeller = function(labels) { if (!is.null(object$method)) { lapply(labels, function(x) paste0(as.character(x), "\n", object$method[as.character(x)])) } else { lapply(labels, function(x) paste0(as.character(x))) } }, ncol = 1, scales = "free_y" ) } p <- p + ggAddExtras(ylab = NULL) return(p) } else { # lm forecasts if (!requireNamespace("grid")) { stop("grid is needed for this function to work. Install it via install.packages(\"grid\")", call. = FALSE) } K <- length(object$forecast) if (K < 2) { warning("Expected at least two plots but forecast required less.") } # Set up vector arguments if (missing(PI)) { PI <- rep(TRUE, K) } # Set up grid # ncol: Number of columns of plots # nrow: Number of rows needed, calculated from # of cols gridlayout <- matrix(seq(1, K), ncol = 1, nrow = K) grid::grid.newpage() grid::pushViewport(grid::viewport(layout = grid::grid.layout(nrow(gridlayout), ncol(gridlayout)))) for (i in 1:K) { partialfcast <- object$forecast[[i]] partialfcast$model <- mlmsplit(object$model, index = i) matchidx <- as.data.frame(which(gridlayout == i, arr.ind = TRUE)) print( autoplot( structure(partialfcast, class = "forecast"), PI = PI[i], ... ) + ggAddExtras(ylab = names(object$forecast)[i]), vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) } } } } #' @rdname tsdisplay #' #' @examples #' library(ggplot2) #' ggtsdisplay(USAccDeaths, plot.type="scatter", theme=theme_bw()) #' #' @export ggtsdisplay <- function(x, plot.type=c("partial", "histogram", "scatter", "spectrum"), points=TRUE, smooth=FALSE, lag.max, na.action=na.contiguous, theme=NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else if (!requireNamespace("grid", quietly = TRUE)) { stop("grid is needed for this function to work. Install it via install.packages(\"grid\")", call. = FALSE) } else { if (NCOL(x) > 1) { stop("ggtsdisplay is only for univariate time series") } plot.type <- match.arg(plot.type) main <- deparse(substitute(x)) if (!is.ts(x)) { x <- ts(x) } if (missing(lag.max)) { lag.max <- round(min(max(10 * log10(length(x)), 3 * frequency(x)), length(x) / 3)) } dots <- list(...) if (is.null(dots$xlab)) { dots$xlab <- "" } if (is.null(dots$ylab)) { dots$ylab <- "" } labs <- match(c("xlab", "ylab", "main"), names(dots), nomatch = 0) # Set up grid for plots gridlayout <- matrix(c(1, 2, 1, 3), nrow = 2) grid::grid.newpage() grid::pushViewport(grid::viewport(layout = grid::grid.layout(nrow(gridlayout), ncol(gridlayout)))) # Add ts plot with points matchidx <- as.data.frame(which(gridlayout == 1, arr.ind = TRUE)) tsplot <- do.call(ggplot2::autoplot, c(object = quote(x), dots[labs])) if (points) { tsplot <- tsplot + ggplot2::geom_point(size = 0.5) } if (smooth) { tsplot <- tsplot + ggplot2::geom_smooth(method = "loess", se = FALSE) } if (is.null(tsplot$labels$title)) { # Add title if missing tsplot <- tsplot + ggplot2::ggtitle(main) } if (!is.null(theme)) { tsplot <- tsplot + theme } print( tsplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) # Prepare Acf plot acfplot <- do.call(ggAcf, c(x = quote(x), lag.max = lag.max, na.action = na.action, dots[-labs])) + ggplot2::ggtitle(NULL) if (!is.null(theme)) { acfplot <- acfplot + theme } # Prepare last plot (variable) if (plot.type == "partial") { lastplot <- ggPacf(x, lag.max = lag.max, na.action = na.action) + ggplot2::ggtitle(NULL) # Match y-axis acfplotrange <- ggplot2::layer_scales(acfplot)$y$range$range pacfplotrange <- ggplot2::layer_scales(lastplot)$y$range$range yrange <- range(c(acfplotrange, pacfplotrange)) acfplot <- acfplot + ggplot2::ylim(yrange) lastplot <- lastplot + ggplot2::ylim(yrange) } else if (plot.type == "histogram") { lastplot <- gghistogram(x, add.normal = TRUE, add.rug = TRUE) + ggplot2::xlab(main) } else if (plot.type == "scatter") { scatterData <- data.frame(y = x[2:NROW(x)], x = x[1:NROW(x) - 1]) lastplot <- ggplot2::ggplot(ggplot2::aes(y = .data[["y"]], x = .data[["x"]]), data = scatterData) + ggplot2::geom_point() + ggplot2::labs(x = expression(Y[t - 1]), y = expression(Y[t])) } else if (plot.type == "spectrum") { specData <- spec.ar(x, plot = FALSE) specData <- data.frame(spectrum = specData$spec, frequency = specData$freq) lastplot <- ggplot2::ggplot(ggplot2::aes(y = .data[["spectrum"]], x = .data[["frequency"]]), data = specData) + ggplot2::geom_line() + ggplot2::scale_y_log10() } if (!is.null(theme)) { lastplot <- lastplot + theme } # Add ACF plot matchidx <- as.data.frame(which(gridlayout == 2, arr.ind = TRUE)) print( acfplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) # Add last plot matchidx <- as.data.frame(which(gridlayout == 3, arr.ind = TRUE)) print( lastplot, vp = grid::viewport( layout.pos.row = matchidx$row, layout.pos.col = matchidx$col ) ) } } #' Time series lag ggplots #' #' Plots a lag plot using ggplot. #' #' \dQuote{gglagplot} will plot time series against lagged versions of #' themselves. Helps visualising 'auto-dependence' even when auto-correlations #' vanish. #' #' \dQuote{gglagchull} will layer convex hulls of the lags, layered on a single #' plot. This helps visualise the change in 'auto-dependence' as lags increase. #' #' @param x a time series object (type \code{ts}). #' @param lags number of lag plots desired, see arg set.lags. #' @param set.lags vector of positive integers specifying which lags to use. #' @param diag logical indicating if the x=y diagonal should be drawn. #' @param diag.col color to be used for the diagonal if(diag). #' @param do.lines if TRUE, lines will be drawn, otherwise points will be #' drawn. #' @param colour logical indicating if lines should be coloured. #' @param continuous Should the colour scheme for years be continuous or #' discrete? #' @param labels logical indicating if labels should be used. #' @param seasonal Should the line colour be based on seasonal characteristics #' (TRUE), or sequential (FALSE). #' @param \dots Not used (for consistency with lag.plot) #' @return None. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{lag.plot}} #' @examples #' #' gglagplot(woolyrnq) #' gglagplot(woolyrnq,seasonal=FALSE) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' gglagplot(lungDeaths, lags=2) #' gglagchull(lungDeaths, lags=6) #' #' @export gglagplot <- function(x, lags=ifelse(frequency(x) > 9, 16, 9), set.lags = 1:lags, diag=TRUE, diag.col="gray", do.lines = TRUE, colour = TRUE, continuous = frequency(x) > 12, labels = FALSE, seasonal = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { freq <- frequency(x) if (freq > 1) { linecol <- cycle(x) if (freq > 24) { continuous <- TRUE } } else { seasonal <- FALSE continuous <- TRUE } if (!seasonal) { continuous <- TRUE } # Make sure lags is evaluated tmp <- lags x <- as.matrix(x) # Prepare data for plotting n <- NROW(x) data <- data.frame() for (i in 1:NCOL(x)) { for (lagi in set.lags) { sname <- colnames(x)[i] if (is.null(sname)) { sname <- deparse(match.call()$x) } data <- rbind( data, data.frame( lagnum = 1:(n - lagi), freqcur = if(seasonal) linecol[(lagi + 1):n] else (lagi + 1):n, orig = x[(lagi + 1):n, i], lagged = x[1:(n - lagi), i], lagVal = rep(lagi, n - lagi), series = factor(rep(sname, n - lagi)) ) ) } } if (!continuous) { data$freqcur <- factor(data$freqcur) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["lagged"]], y = .data[["orig"]]), data = data) if (diag) { p <- p + ggplot2::geom_abline(colour = diag.col, linetype = "dashed") } if (labels) { linesize <- 0.25 * (2 - do.lines) } else { linesize <- 0.5 * (2 - do.lines) } plottype <- if (do.lines) { function(...) ggplot2::geom_path(..., linewidth = linesize) } else { function(...) ggplot2::geom_point(..., size = linesize) } if (colour) { p <- p + plottype(ggplot2::aes(colour = .data[["freqcur"]])) } else { p <- p + plottype() } if (labels) { p <- p + ggplot2::geom_text(ggplot2::aes(label = .data[["lagnum"]])) } # Ensure all facets are of size size (if extreme values are excluded in lag specification) if (max(set.lags) > NROW(x) / 2) { axissize <- rbind(aggregate(orig ~ series, data = data, min), aggregate(orig~ series, data = data, max)) axissize <- data.frame(series = rep(axissize$series, length(set.lags)), orig = rep(axissize$orig, length(set.lags)), lagVal = rep(set.lags, each = NCOL(x))) p <- p + ggplot2::geom_blank(ggplot2::aes(x = .data[["orig"]], y = .data[["orig"]]), data = axissize) } # Facet labellerFn <- function(labels) { if (!is.null(labels$series)) { # Multivariate labels labels$series <- as.character(labels$series) } labels$lagVal <- paste("lag", labels$lagVal) return(labels) } if (NCOL(x) > 1) { p <- p + ggplot2::facet_wrap(~series + lagVal, scales = "free", labeller = labellerFn) } else { p <- p + ggplot2::facet_wrap(~lagVal, labeller = labellerFn) } p <- p + ggplot2::theme(aspect.ratio = 1) if (colour) { if (seasonal) { if (freq == 4L) { title <- "Quarter" } else if (freq == 12L) { title <- "Month" } else if (freq == 7L) { title <- "Day" } else if (freq == 24L) { title <- "Hour" } else { title <- "Season" } } else { title <- "Time" } if (continuous) { p <- p + ggplot2::guides(colour = ggplot2::guide_colourbar(title = title)) } else { p <- p + ggplot2::guides(colour = ggplot2::guide_legend(title = title)) } } p <- p + ggAddExtras(ylab = NULL, xlab = NULL) return(p) } } #' @rdname gglagplot #' #' @examples #' gglagchull(woolyrnq) #' #' @export gglagchull <- function(x, lags=ifelse(frequency(x) > 1, min(12, frequency(x)), 4), set.lags = 1:lags, diag=TRUE, diag.col="gray", ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { # Make sure lags is evaluated tmp <- lags x <- as.matrix(x) # Prepare data for plotting n <- NROW(x) data <- data.frame() for (i in 1:NCOL(x)) { for (lag in set.lags) { sname <- colnames(x)[i] if (is.null(sname)) { sname <- deparse(substitute(x)) } data <- rbind(data, data.frame(orig = x[(lag + 1):n, i], lagged = x[1:(n - lag), i], lag = rep(lag, n - lag), series = rep(sname, n - lag))[grDevices::chull(x[(lag + 1):n, i], x[1:(n - lag), i]), ]) } } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["orig"]], y = .data[["lagged"]]), data = data) if (diag) { p <- p + ggplot2::geom_abline(colour = diag.col, linetype = "dashed") } p <- p + ggplot2::geom_polygon(ggplot2::aes(group = .data[["lag"]], colour = .data[["lag"]], fill = .data[["lag"]]), alpha = 1 / length(set.lags)) p <- p + ggplot2::guides(colour = ggplot2::guide_colourbar(title = "lag")) p <- p + ggplot2::theme(aspect.ratio = 1) # Facet if (NCOL(x) > 1) { p <- p + ggplot2::facet_wrap(~series, scales = "free") } p <- p + ggAddExtras(ylab = "lagged", xlab = "original") return(p) } } #' Create a seasonal subseries ggplot #' #' Plots a subseries plot using ggplot. Each season is plotted as a separate #' mini time series. The blue lines represent the mean of the observations #' within each season. #' #' The \code{ggmonthplot} function is simply a wrapper for #' \code{ggsubseriesplot} as a convenience for users familiar with #' \code{\link[stats]{monthplot}}. #' #' @param x a time series object (type \code{ts}). #' @param labels A vector of labels to use for each 'season' #' @param times A vector of times for each observation #' @param phase A vector of seasonal components #' @param \dots Not used (for consistency with monthplot) #' @return Returns an object of class \code{ggplot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{monthplot}} #' @examples #' #' ggsubseriesplot(AirPassengers) #' ggsubseriesplot(woolyrnq) #' #' @export ggmonthplot <- function(x, labels = NULL, times = time(x), phase = cycle(x), ...) { ggsubseriesplot(x, labels, times, phase, ...) } #' @rdname ggmonthplot #' @export ggsubseriesplot <- function(x, labels = NULL, times = time(x), phase = cycle(x), ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(x, "ts")) { stop("ggsubseriesplot requires a ts object, use x=object") } if (round(frequency(x)) <= 1) { stop("Data are not seasonal") } if("1" %in% dimnames(table(table(phase)))[[1]]){ stop(paste("Each season requires at least 2 observations.", ifelse(frequency(x)%%1 == 0, "Your series length may be too short for this graphic.", "This may be caused from specifying a time-series with non-integer frequency.") ) ) } data <- data.frame(y = as.numeric(x), year = trunc(time(x)), season = as.numeric(phase)) seasonwidth <- (max(data$year) - min(data$year)) * 1.05 data$time <- data$season + 0.025 + (data$year - min(data$year)) / seasonwidth avgLines <- stats::aggregate(data$y, by = list(data$season), FUN = mean) colnames(avgLines) <- c("season", "avg") data <- merge(data, avgLines, by = "season") # Initialise ggplot object # p <- ggplot2::ggplot(ggplot2::aes_(x=~interaction(year, season), y=~y, group=~season), data=data, na.rm=TRUE) p <- ggplot2::ggplot( ggplot2::aes(x = .data[["time"]], y = .data[["y"]], group = .data[["season"]]), data = data, na.rm = TRUE ) # Remove vertical break lines p <- p + ggplot2::theme(panel.grid.major.x = ggplot2::element_blank()) # Add data p <- p + ggplot2::geom_line() # Add average lines p <- p + ggplot2::geom_line(ggplot2::aes(y = .data[["avg"]]), col = "#0000AA") # Create x-axis labels xfreq <- frequency(x) if (!is.null(labels)) { if (xfreq != length(labels)) { stop("The number of labels supplied is not the same as the number of seasons.") } else { xbreaks <- labels } } else if (xfreq == 4) { xbreaks <- c("Q1", "Q2", "Q3", "Q4") xlab <- "Quarter" } else if (xfreq == 7) { xbreaks <- c( "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ) xlab <- "Day" } else if (xfreq == 12) { xbreaks <- month.abb xlab <- "Month" } else { xbreaks <- 1:frequency(x) xlab <- "Season" } # X-axis p <- p + ggplot2::scale_x_continuous(breaks = 0.5 + (1:xfreq), labels = xbreaks) # Graph labels p <- p + ggAddExtras(ylab = deparse(substitute(x)), xlab = xlab) return(p) } } #' @rdname seasonplot #' #' @param continuous Should the colour scheme for years be continuous or #' discrete? #' @param polar Plot the graph on seasonal coordinates #' #' @examples #' ggseasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) #' ggseasonplot(AirPassengers, year.labels=TRUE, continuous=TRUE) #' #' @export ggseasonplot <- function(x, season.labels=NULL, year.labels=FALSE, year.labels.left=FALSE, type=NULL, col=NULL, continuous=FALSE, polar=FALSE, labelgap=0.04, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } if (!inherits(x, "ts")) { stop("autoplot.seasonplot requires a ts object, use x=object") } if (!is.null(type)) { message("Plot types are not yet supported for seasonplot()") } # Check data are seasonal and convert to integer seasonality s <- round(frequency(x)) if (s <= 1) { stop("Data are not seasonal") } # Grab name for plot title xname <- deparse(substitute(x)) tspx <- tsp(x) x <- ts(x, start = tspx[1], frequency = s) data <- data.frame( y = as.numeric(x), year = trunc(round(time(x), 8)), cycle = as.numeric(cycle(x)), time = as.numeric((cycle(x) - 1) / s) ) data$year <- if (continuous) { as.numeric(data$year) } else { as.factor(data$year) } if (polar) { startValues <- data[data$cycle == 1, ] if (data$cycle[1] == 1) { startValues <- startValues[-1, ] } startValues$time <- 1 - .Machine$double.eps levels(startValues$year) <- as.numeric(levels(startValues$year)) - 1 data <- rbind(data, startValues) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["time"]], y = .data[["y"]], group = .data[["year"]], colour = .data[["year"]]), data = data, na.rm = TRUE) # p <- p + ggplot2::scale_x_continuous() # Add data p <- p + ggplot2::geom_line() if (!is.null(col)) { if(is.numeric(col)){ col <- palette()[(col-1)%%(length(palette())) + 1] } if (continuous) { p <- p + ggplot2::scale_color_gradientn(colours = col) } else { ncol <- length(unique(data$year)) if (length(col) == 1) { p <- p + ggplot2::scale_color_manual(guide = "none", values = rep(col, ncol)) } else { p <- p + ggplot2::scale_color_manual(values = rep(col, ceiling(ncol / length(col)))[1:ncol]) } } } if (year.labels) { yrlab <- stats::aggregate(time ~ year, data = data, FUN = max) yrlab <- cbind(yrlab, offset = labelgap) } if (year.labels.left) { yrlabL <- stats::aggregate(time ~ year, data = data, FUN = min) yrlabL <- cbind(yrlabL, offset = -labelgap) if (year.labels) { yrlab <- rbind(yrlab, yrlabL) } } if (year.labels || year.labels.left) { yrlab <- merge(yrlab, data) yrlab$time <- yrlab$time + yrlab$offset p <- p + ggplot2::guides(colour = "none") p <- p + ggplot2::geom_text(ggplot2::aes(x = .data[["time"]], y = .data[["y"]], label = .data[["year"]]), data = yrlab) } # Add seasonal labels if (s == 12) { labs <- month.abb xLab <- "Month" } else if (s == 4) { labs <- paste("Q", 1:4, sep = "") xLab <- "Quarter" } else if (s == 7) { labs <- c("Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat") xLab <- "Day" } else if (s == 52) { labs <- 1:s xLab <- "Week" } else if (s == 24) { labs <- 0:(s - 1) xLab <- "Hour" } else if (s == 48) { labs <- seq(0, 23.5, by = 0.5) xLab <- "Half-hour" } else { labs <- 1:s xLab <- "Season" } if (!is.null(season.labels)) { if (length(season.labels) != length(labs)) { warning(paste0("Provided season.labels have length ", length(season.labels), ", but ", length(labs), " are required. Ignoring season.labels.")) } else { labs <- season.labels } } breaks <- sort(unique(data$time)) if (polar) { breaks <- head(breaks, -1) p <- p + ggplot2::coord_polar() } p <- p + ggplot2::scale_x_continuous(breaks = breaks, minor_breaks = NULL, labels = labs) # Graph title and axes p <- p + ggAddExtras(main = paste("Seasonal plot:", xname), xlab = xLab, ylab = NULL) return(p) } #' @rdname plot.forecast #' @export autoplot.splineforecast <- function(object, PI=TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { p <- autoplot(object$x) + autolayer(object) p <- p + ggplot2::geom_point(size = 2) fit <- data.frame(datetime = as.numeric(time(object$fitted)), y = as.numeric(object$fitted)) p <- p + ggplot2::geom_line(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), colour = "red", data = fit) p <- p + ggAddExtras(ylab = deparse(object$model$call$x)) if (!is.null(object$series)) { p <- p + ggplot2::ylab(object$series) } return(p) } } #' @rdname autoplot.seas #' @export autoplot.stl <- function(object, labels = NULL, range.bars = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "stl")) { stop("autoplot.stl requires a stl object, use x=object") } # Re-order series as trend, seasonal, remainder object$time.series <- object$time.series[, c("trend", "seasonal", "remainder")] if (is.null(labels)) { labels <- colnames(object$time.series) } data <- object$time.series cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data) + 1), y = c(rowSums(data), data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = data) # Add data # Time series lines p <- p + ggplot2::geom_line(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = subset(data, data$parts != cn[4]), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes(x = .data[["datetime"]], xend = .data[["datetime"]], y = 0, yend = .data[["y"]]), data = subset(data, data$parts == cn[4]), lineend = "butt" ) # Rangebars if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes(xmin = .data[["left"]], xmax = .data[["right"]], ymax = .data[["top"]], ymin = .data[["bottom"]]), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Remainder p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes(yintercept = .data[["y"]]), data = data.frame(y = 0, parts = factor(cn[4], levels = cn))) # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) # ^^ Remove rightmost x axis gap with `expand=c(0.05, 0, 0, 0)` argument when assymetric `expand` feature is supported # issue: tidyverse/ggplot2#1669 return(p) } } #' @rdname autoplot.seas #' @export autoplot.StructTS <- function(object, labels = NULL, range.bars = TRUE, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!inherits(object, "StructTS")) { stop("autoplot.StructTS requires a StructTS object.") } if (is.null(labels)) { labels <- colnames(object$fitted) } data <- object$fitted cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data) + 1), y = c(object$data, data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = data) # Add data p <- p + ggplot2::geom_line(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), na.rm = TRUE) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes(xmin = .data[["left"]], xmax = .data[["right"]], ymax = .data[["top"]], ymin = .data[["bottom"]]), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } } #' Plot time series decomposition components using ggplot #' #' Produces a ggplot object of seasonally decomposed time series for objects of #' class \dQuote{\code{stl}} (created with \code{\link[stats]{stl}}), class #' \dQuote{\code{seas}} (created with \code{\link[seasonal]{seas}}), or class #' \dQuote{\code{decomposed.ts}} (created with \code{\link[stats]{decompose}}). #' #' @param object Object of class \dQuote{\code{seas}}, \dQuote{\code{stl}}, or #' \dQuote{\code{decomposed.ts}}. #' @param labels Labels to replace \dQuote{seasonal}, \dQuote{trend}, and #' \dQuote{remainder}. #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If \code{NULL}, automatic selection #' takes place. #' @param ... Other plotting parameters to affect the plot. #' @return Returns an object of class \code{ggplot}. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[seasonal]{seas}}, \code{\link[stats]{stl}}, #' \code{\link[stats]{decompose}}, \code{\link[stats]{StructTS}}, #' \code{\link[stats]{plot.stl}}. #' @examples #' #' library(ggplot2) #' co2 %>% #' decompose() %>% #' autoplot() #' nottem %>% #' stl(s.window = "periodic") %>% #' autoplot() #' \dontrun{ #' library(seasonal) #' seas(USAccDeaths) %>% autoplot() #' } #' #' @export autoplot.seas <- function(object, labels = NULL, range.bars = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } if (!inherits(object, "seas")) { stop("autoplot.seas requires a seas object") } if (is.null(labels)) { if ("seasonal" %in% colnames(object$data)) { labels <- c("trend", "seasonal", "irregular") } else { labels <- c("trend", "irregular") } } data <- cbind(object$x, object$data[, labels]) colnames(data) <- cn <- c("data", labels) data <- data.frame( datetime = rep(time(data), NCOL(data)), y = c(data), parts = factor(rep(cn, each = NROW(data)), levels = cn) ) # Is it additive or multiplicative? freq <- frequency(object$data) sum_first_year <- try(sum(seasonal(object)[seq(freq)]), silent=TRUE) if(!inherits(sum_first_year, "try-error")) { int <- as.integer(sum_first_year > 0.5) # Closer to 1 than 0. } else { int <- 0 } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = data) # Add data p <- p + ggplot2::geom_line(ggplot2::aes(x = .data[["datetime"]], y = .data[["y"]]), data = subset(data, data$parts != tail(cn,1)), na.rm = TRUE) p <- p + ggplot2::geom_segment( ggplot2::aes(x = .data[["datetime"]], xend = .data[["datetime"]], y = int, yend = .data[["y"]]), data = subset(data, data$parts == tail(cn,1)), lineend = "butt" ) p <- p + ggplot2::facet_grid("parts ~ .", scales = "free_y", switch = "y") p <- p + ggplot2::geom_hline(ggplot2::aes(yintercept = .data[["y"]]), data = data.frame(y = int, parts = factor(tail(cn,1), levels = cn)) ) # Rangebars if (is.null(range.bars)) { range.bars <- object$spc$transform$`function` == "none" } if (range.bars) { yranges <- vapply(split(data$y, data$parts), function(x) range(x, na.rm = TRUE), numeric(2)) xranges <- range(data$datetime) barmid <- apply(yranges, 2, mean) barlength <- min(apply(yranges, 2, diff)) barwidth <- (1 / 64) * diff(xranges) barpos <- data.frame( left = xranges[2] + barwidth, right = xranges[2] + barwidth * 2, top = barmid + barlength / 2, bottom = barmid - barlength / 2, parts = factor(colnames(yranges), levels = cn), datetime = xranges[2], y = barmid ) p <- p + ggplot2::geom_rect(ggplot2::aes(xmin = .data[["left"]], xmax = .data[["right"]], ymax = .data[["top"]], ymin = .data[["bottom"]]), data = barpos, fill = "gray75", colour = "black", size = 1 / 3) } # Add axis labels p <- p + ggAddExtras(xlab = "Time", ylab = "") # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = unique(round(pretty(data$datetime)))) return(p) } #' @rdname autoplot.ts #' @export autolayer.mts <- function(object, colour = TRUE, series = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { cl <- match.call() cl[[1]] <- quote(autolayer) cl$object <- quote(object[, i]) if (length(series) != NCOL(object)) { if (colour) { message("For a multivariate time series, specify a seriesname for each time series. Defaulting to column names.") } series <- colnames(object) } out <- list() for (i in 1:NCOL(object)) { cl$series <- series[i] out[[i]] <- eval(cl) } return(out) } } #' @rdname autoplot.ts #' @export autolayer.msts <- function(object, series = NULL, ...) { if (NCOL(object) > 1) { class(object) <- c("mts", "ts", "matrix") } else { if (is.null(series)) { series <- deparse(substitute(series)) } class(object) <- c("ts") } attr(object, "msts") <- NULL autolayer(object, series = series, ...) } #' @rdname autoplot.ts #' @export autolayer.ts <- function(object, colour=TRUE, series=NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { tsdata <- data.frame( timeVal = as.numeric(time(object)), series = ifelse(is.null(series), deparse(substitute(object)), series), seriesVal = as.numeric(object) ) if (colour) { ggplot2::geom_line(ggplot2::aes(x = .data[["timeVal"]], y = .data[["seriesVal"]], group = .data[["series"]], colour = .data[["series"]]), data = tsdata, ..., inherit.aes = FALSE) } else { ggplot2::geom_line(ggplot2::aes(x = .data[["timeVal"]], y = .data[["seriesVal"]], group = .data[["series"]]), data = tsdata, ..., inherit.aes = FALSE) } } } #' @rdname plot.forecast #' @export autolayer.forecast <- function(object, series = NULL, PI = TRUE, showgap = TRUE, ...) { PI <- PI & !is.null(object$level) data <- forecast2plotdf(object, PI = PI, showgap = showgap) mapping <- ggplot2::aes(x = .data[["x"]], y = .data[["y"]]) if (!is.null(object$series)) { data[["series"]] <- object$series } if (!is.null(series)) { data[["series"]] <- series mapping$colour <- quote(series) } if (PI) { mapping$level <- quote(level) mapping$ymin <- quote(ymin) mapping$ymax <- quote(ymax) } geom_forecast(mapping = mapping, data = data, stat = "identity", ..., inherit.aes = FALSE) } #' @rdname plot.mforecast #' @export autolayer.mforecast <- function(object, series = NULL, PI = TRUE, ...) { cl <- match.call() cl[[1]] <- quote(autolayer) cl$object <- quote(object$forecast[[i]]) if (!is.null(series)) { if (length(series) != length(object$forecast)) { series <- names(object$forecast) } } out <- list() for (i in 1:length(object$forecast)) { cl$series <- series[i] out[[i]] <- eval(cl) } return(out) } #' Automatically create a ggplot for time series objects #' #' \code{autoplot} takes an object of type \code{ts} or \code{mts} and creates #' a ggplot object suitable for usage with \code{stat_forecast}. #' #' \code{fortify.ts} takes a \code{ts} object and converts it into a data frame #' (for usage with ggplot2). #' #' @param object Object of class \dQuote{\code{ts}} or \dQuote{\code{mts}}. #' @param series Identifies the time series with a colour, which integrates well #' with the functionality of \link{geom_forecast}. #' @param facets If TRUE, multiple time series will be faceted (and unless #' specified, colour is set to FALSE). If FALSE, each series will be assigned a #' colour. #' @param colour If TRUE, the time series will be assigned a colour aesthetic #' @param model Object of class \dQuote{\code{ts}} to be converted to #' \dQuote{\code{data.frame}}. #' @param data Not used (required for \code{\link[ggplot2]{fortify}} method) #' @param ... Other plotting parameters to affect the plot. #' @inheritParams plot.forecast #' @return None. Function produces a ggplot graph. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[stats]{plot.ts}}, \code{\link[ggplot2]{fortify}} #' @examples #' #' library(ggplot2) #' autoplot(USAccDeaths) #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) #' autoplot(lungDeaths, facets=TRUE) #' #' @export autoplot.ts <- function(object, series=NULL, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!is.ts(object)) { stop("autoplot.ts requires a ts object, use object=object") } # Create data frame with time as a column labelled x # and time series as a column labelled y. data <- data.frame(y = as.numeric(object), x = as.numeric(time(object))) if (!is.null(series)) { data <- transform(data, series = series) } # Initialise ggplot object p <- ggplot2::ggplot(ggplot2::aes(y = .data[["y"]], x = .data[["x"]]), data = data) # Add data if (!is.null(series)) { p <- p + ggplot2::geom_line(ggplot2::aes(group = .data[["series"]], colour = .data[["series"]]), na.rm = TRUE, ...) } else { p <- p + ggplot2::geom_line(na.rm = TRUE, ...) } # Add labels p <- p + ggAddExtras(xlab = xlab, ylab = ylab, main = main) # Make x axis contain only whole numbers (e.g., years) p <- p + ggplot2::scale_x_continuous(breaks = ggtsbreaks) return(p) } } #' @rdname autoplot.ts #' @export autoplot.mts <- function(object, colour=TRUE, facets=FALSE, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ...) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (!stats::is.mts(object)) { stop("autoplot.mts requires a mts object, use x=object") } if (NCOL(object) <= 1) { return(autoplot.ts(object, ...)) } cn <- colnames(object) if (is.null(cn)) { cn <- paste("Series", seq_len(NCOL(object))) } data <- data.frame( y = as.numeric(c(object)), x = rep(as.numeric(time(object)), NCOL(object)), series = factor(rep(cn, each = NROW(object)), levels = cn) ) # Initialise ggplot object mapping <- ggplot2::aes(y = .data[["y"]], x = .data[["x"]], group = .data[["series"]]) if (colour && (!facets || !missing(colour))) { mapping$colour <- quote(series) } p <- ggplot2::ggplot(mapping, data = data) p <- p + ggplot2::geom_line(na.rm = TRUE, ...) if (facets) { p <- p + ggplot2::facet_grid(series~., scales = "free_y") } p <- p + ggAddExtras(xlab = xlab, ylab = ylab, main = main) return(p) } } #' @rdname autoplot.ts #' @export autoplot.msts <- function(object, ...) { sname <- deparse(substitute(object)) if (NCOL(object) > 1) { class(object) <- c("mts", "ts", "matrix") } else { class(object) <- c("ts") } attr(object, "msts") <- NULL autoplot(object, ...) + ggAddExtras(ylab = sname) } #' @rdname autoplot.ts #' @export fortify.ts <- function(model, data, ...) { # Use ggfortify version if it is loaded # to prevent cran errors if (exists("ggfreqplot")) { tsp <- attr(model, which = "tsp") dtindex <- time(model) if (any(tsp[3] == c(4, 12))) { dtindex <- zoo::as.Date.yearmon(dtindex) } model <- data.frame(Index = dtindex, Data = as.numeric(model)) return(ggplot2::fortify(model)) } else { model <- cbind(x = as.numeric(time(model)), y = as.numeric(model)) as.data.frame(model) } } forecast2plotdf <- function(model, data=as.data.frame(model), PI=TRUE, showgap=TRUE, ...) { # Time series forecasts if (is.element("ts", class(model$mean))) { xVals <- as.numeric(time(model$mean)) # x axis is time } # Cross-sectional forecasts else if (!is.null(model[["newdata"]])) { xVals <- as.numeric(model[["newdata"]][, 1]) # Only display the first column of newdata, should be generalised. if (NCOL(model[["newdata"]]) > 1) { message("Note: only extracting first column of data") } } else { stop("Could not find forecast x axis") } Hiloc <- grep("Hi ", names(data)) Loloc <- grep("Lo ", names(data)) if (PI && !is.null(model$level)) { # PI if (length(Hiloc) == length(Loloc)) { if (length(Hiloc) > 0) { out <- data.frame( x = rep(xVals, length(Hiloc) + 1), y = c(rep(NA, NROW(data) * (length(Hiloc))), data[, 1]), level = c(as.numeric(rep(gsub("Hi ", "", names(data)[Hiloc]), each = NROW(data))), rep(NA, NROW(data))), ymax = c(unlist(data[, Hiloc]), rep(NA, NROW(data))), ymin = c(unlist(data[, Loloc]), rep(NA, NROW(data))) ) numInterval <- length(model$level) } } else { warning("missing intervals detected, plotting point predictions only") PI <- FALSE } } if (!PI) { # No PI out <- data.frame(x = xVals, y = as.numeric(model$mean), level = rep(NA, NROW(model$mean)), ymax = rep(NA, NROW(model$mean)), ymin = rep(NA, NROW(model$mean))) numInterval <- 0 } if (!showgap) { if (is.null(model$x)) { warning("Removing the gap requires historical data, provide this via model$x. Defaulting showgap to TRUE.") } else { intervalGap <- data.frame( x = rep(time(model$x)[length(model$x)], numInterval + 1), y = c(model$x[length(model$x)], rep(NA, numInterval)), level = c(NA, model$level)[seq_along(1:(numInterval + 1))], ymax = c(NA, rep(model$x[length(model$x)], numInterval)), ymin = c(NA, rep(model$x[length(model$x)], numInterval)) ) out <- rbind(intervalGap, out) } } return(out) } #' @rdname geom_forecast #' @export StatForecast <- ggplot2::ggproto( "StatForecast", ggplot2::Stat, required_aes = c("x", "y"), compute_group = function(data, scales, params, PI=TRUE, showgap=TRUE, series=NULL, h=NULL, level=c(80, 95), fan=FALSE, robust=FALSE, lambda=NULL, find.frequency=FALSE, allow.multiplicative.trend=FALSE, ...) { ## TODO: Rewrite tspx <- recoverTSP(data$x) if (is.null(h)) { h <- ifelse(tspx[3] > 1, 2 * tspx[3], 10) } tsdat <- ts(data = data$y, start = tspx[1], frequency = tspx[3]) fcast <- forecast( tsdat, h = h, level = level, fan = fan, robust = robust, lambda = lambda, find.frequency = find.frequency, allow.multiplicative.trend = allow.multiplicative.trend ) fcast <- forecast2plotdf(fcast, PI = PI, showgap = showgap) # Add ggplot & series information extraInfo <- as.list(data[1, !colnames(data) %in% colnames(fcast)]) extraInfo$`_data` <- quote(fcast) if (!is.null(series)) { if (data$group[1] > length(series)) { message("Recycling series argument, please provide a series name for each time series") } extraInfo[["series"]] <- series[(abs(data$group[1]) - 1) %% length(series) + 1] } do.call("transform", extraInfo) } ) #' @rdname geom_forecast #' @export GeomForecast <- ggplot2::ggproto( "GeomForecast", ggplot2::Geom, # Produces both point forecasts and intervals on graph required_aes = c("x", "y"), optional_aes = c("ymin", "ymax", "level"), default_aes = ggplot2::aes( colour = "blue", fill = "grey60", size = .5, linetype = 1, weight = 1, alpha = 1, level = NA ), draw_key = function(data, params, size) { lwd <- min(data$size, min(size) / 4) # Calculate and set colour linecol <- blendHex(data$col, "gray30", 1) fillcol <- blendHex(data$col, "#CCCCCC", 0.8) grid::grobTree( grid::rectGrob( width = grid::unit(1, "npc") - grid::unit(lwd, "mm"), height = grid::unit(1, "npc") - grid::unit(lwd, "mm"), gp = grid::gpar( col = fillcol, fill = scales::alpha(fillcol, data$alpha), lty = data$linetype, lwd = lwd * ggplot2::.pt, linejoin = "mitre" ) ), grid::linesGrob( x = c(0, 0.4, 0.6, 1), y = c(0.2, 0.6, 0.4, 0.9), gp = grid::gpar( col = linecol, fill = scales::alpha(linecol, data$alpha), lty = data$linetype, lwd = lwd * ggplot2::.pt, linejoin = "mitre" ) ) ) }, handle_na = function(self, data, params) { ## TODO: Consider removing/changing data }, draw_group = function(data, panel_scales, coord) { data <- if (!is.null(data$level)) { split(data, !is.na(data$level)) } else { list(data) } # Draw forecasted points and intervals if (length(data) == 1) { # PI=FALSE ggplot2:::ggname( "geom_forecast", GeomForecastPoint$draw_panel(data[[1]], panel_scales, coord) ) } else { # PI=TRUE ggplot2:::ggname( "geom_forecast", grid::addGrob( GeomForecastInterval$draw_group(data[[2]], panel_scales, coord), GeomForecastPoint$draw_panel(data[[1]], panel_scales, coord) ) ) } } ) GeomForecastPoint <- ggplot2::ggproto( "GeomForecastPoint", GeomForecast, ## Produces only point forecasts required_aes = c("x", "y"), setup_data = function(data, params) { data[!is.na(data$y), ] # Extract only forecast points }, draw_group = function(data, panel_scales, coord) { linecol <- blendHex(data$colour[1], "gray30", 1) # Compute alpha transparency data$alpha <- grDevices::col2rgb(linecol, alpha = TRUE)[4, ] / 255 * data$alpha # Select appropriate Geom and set defaults if (NROW(data) == 0) { # Blank ggplot2::GeomBlank$draw_panel } else if (NROW(data) == 1) { # Point GeomForecastPointGeom <- ggplot2::GeomPoint$draw_panel pointpred <- transform(data, fill = NA, colour = linecol, size = 1, shape = 19, stroke = 0.5) } else { # Line GeomForecastPointGeom <- ggplot2::GeomLine$draw_panel pointpred <- transform(data, fill = NA, colour = linecol, linewidth = size, size = NULL) } # Draw forecast points ggplot2:::ggname( "geom_forecast_point", grid::grobTree(GeomForecastPointGeom(pointpred, panel_scales, coord)) ) } ) blendHex <- function(mixcol, seqcol, alpha=1) { requireNamespace("methods") if (is.na(seqcol)) { return(mixcol) } # transform to hue/lightness/saturation colorspace seqcol <- grDevices::col2rgb(seqcol, alpha = TRUE) mixcol <- grDevices::col2rgb(mixcol, alpha = TRUE) seqcolHLS <- suppressWarnings(methods::coerce(colorspace::RGB(R = seqcol[1, ] / 255, G = seqcol[2, ] / 255, B = seqcol[3, ] / 255), structure(NULL, class = "HLS"))) mixcolHLS <- suppressWarnings(methods::coerce(colorspace::RGB(R = mixcol[1, ] / 255, G = mixcol[2, ] / 255, B = mixcol[3, ] / 255), structure(NULL, class = "HLS"))) # copy luminence mixcolHLS@coords[, "L"] <- seqcolHLS@coords[, "L"] mixcolHLS@coords[, "S"] <- alpha * mixcolHLS@coords[, "S"] + (1 - alpha) * seqcolHLS@coords[, "S"] mixcolHex <- suppressWarnings(methods::coerce(mixcolHLS, structure(NULL, class = "RGB"))) mixcolHex <- colorspace::hex(mixcolHex) mixcolHex <- ggplot2::alpha(mixcolHex, mixcol[4, ] / 255) return(mixcolHex) } GeomForecastInterval <- ggplot2::ggproto( "GeomForecastInterval", GeomForecast, ## Produces only forecasts intervals on graph required_aes = c("x", "ymin", "ymax"), setup_data = function(data, params) { data[is.na(data$y), ] # Extract only forecast intervals }, draw_group = function(data, panel_scales, coord) { # If level scale from fabletools is not loaded, convert to colour if(is.numeric(data$level)){ leveldiff <- diff(range(data$level)) if (leveldiff == 0) { leveldiff <- 1 } shadeVal <- (data$level - min(data$level)) / leveldiff * 0.2 + 8 / 15 data$level <- rgb(shadeVal, shadeVal, shadeVal) } intervalGrobList <- lapply( split(data, data$level), FUN = function(x) { # Calculate colour fillcol <- blendHex(x$colour[1], x$level[1], 0.7) # Compute alpha transparency x$alpha <- grDevices::col2rgb(fillcol, alpha = TRUE)[4, ] / 255 * x$alpha # Select appropriate Geom and set defaults if (NROW(x) == 0) { # Blank ggplot2::GeomBlank$draw_panel } else if (NROW(x) == 1) { # Linerange GeomForecastIntervalGeom <- ggplot2::GeomLinerange$draw_panel x <- transform(x, colour = fillcol, fill = NA, linewidth = 1) } else { # Ribbon GeomForecastIntervalGeom <- ggplot2::GeomRibbon$draw_group x <- transform(x, colour = NA, fill = fillcol, linewidth = size, size = NULL) } # Create grob return(GeomForecastIntervalGeom(x, panel_scales, coord)) ## Create list pair with average ymin/ymax to order layers } ) # Draw forecast intervals ggplot2:::ggname("geom_forecast_interval", do.call(grid::grobTree, rev(intervalGrobList))) # TODO: Find reliable method to stacking them correctly } ) #' Forecast plot #' #' Generates forecasts from \code{forecast.ts} and adds them to the plot. #' Forecasts can be modified via sending forecast specific arguments above. #' #' Multivariate forecasting is supported by having each time series on a #' different group. #' #' You can also pass \code{geom_forecast} a \code{forecast} object to add it to #' the plot. #' #' The aesthetics required for the forecasting to work includes forecast #' observations on the y axis, and the \code{time} of the observations on the x #' axis. Refer to the examples below. To automatically set up aesthetics, use #' \code{autoplot}. #' #' @inheritParams ggplot2::layer #' @param data The data to be displayed in this layer. There are three options: #' #' If \code{NULL}, the default, the data is inherited from the plot data as #' specified in the call to \code{\link[ggplot2]{ggplot}}. #' #' A \code{data.frame}, or other object, will override the plot data. All #' objects will be fortified to produce a data frame. See \code{\link[ggplot2]{fortify}} #' for which variables will be created. #' #' A \code{function} will be called with a single argument, the plot data. The #' return value must be a \code{data.frame}, and will be used as the layer #' data. #' @param stat The stat object to use calculate the data. #' @param position Position adjustment, either as a string, or the result of a #' call to a position adjustment function. #' @param na.rm If \code{FALSE} (the default), removes missing values with a #' warning. If \code{TRUE} silently removes missing values. #' @param show.legend logical. Should this layer be included in the legends? #' \code{NA}, the default, includes if any aesthetics are mapped. \code{FALSE} #' never includes, and \code{TRUE} always includes. #' @param inherit.aes If \code{FALSE}, overrides the default aesthetics, rather #' than combining with them. This is most useful for helper functions that #' define both data and aesthetics and shouldn't inherit behaviour from the #' default plot specification, e.g. \code{\link[ggplot2]{borders}}. #' @param PI If \code{FALSE}, confidence intervals will not be plotted, giving #' only the forecast line. #' @param showgap If \code{showgap=FALSE}, the gap between the historical #' observations and the forecasts is removed. #' @param series Matches an unidentified forecast layer with a coloured object #' on the plot. #' @param ... Additional arguments for \code{\link{forecast.ts}}, other #' arguments are passed on to \code{\link[ggplot2]{layer}}. These are often aesthetics, #' used to set an aesthetic to a fixed value, like \code{color = "red"} or #' \code{alpha = .5}. They may also be parameters to the paired geom/stat. #' @return A layer for a ggplot graph. #' @author Mitchell O'Hara-Wild #' @seealso \code{\link[generics]{forecast}}, \code{\link[ggplot2]{ggproto}} #' @examples #' #' \dontrun{ #' library(ggplot2) #' autoplot(USAccDeaths) + geom_forecast() #' #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) + geom_forecast() #' #' # Using fortify.ts #' p <- ggplot(aes(x=x, y=y), data=USAccDeaths) #' p <- p + geom_line() #' p + geom_forecast() #' #' # Without fortify.ts #' data <- data.frame(USAccDeaths=as.numeric(USAccDeaths), time=as.numeric(time(USAccDeaths))) #' p <- ggplot(aes(x=time, y=USAccDeaths), data=data) #' p <- p + geom_line() #' p + geom_forecast() #' #' p + geom_forecast(h=60) #' p <- ggplot(aes(x=time, y=USAccDeaths), data=data) #' p + geom_forecast(level=c(70,98)) #' p + geom_forecast(level=c(70,98),colour="lightblue") #' #' #Add forecasts to multivariate series with colour groups #' lungDeaths <- cbind(mdeaths, fdeaths) #' autoplot(lungDeaths) + geom_forecast(forecast(mdeaths), series="mdeaths") #' } #' #' @export geom_forecast <- function(mapping = NULL, data = NULL, stat = "forecast", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, PI=TRUE, showgap=TRUE, series=NULL, ...) { if (is.forecast(mapping) || is.mforecast(mapping)) { warning("Use autolayer instead of geom_forecast to add a forecast layer to your ggplot object.") cl <- match.call() cl[[1]] <- quote(autolayer) names(cl)[names(cl) == "mapping"] <- "object" return(eval.parent(cl)) } if (is.ts(mapping)) { data <- data.frame(y = as.numeric(mapping), x = as.numeric(time(mapping))) mapping <- ggplot2::aes(y = .data[["y"]], x = .data[["x"]]) } if (stat == "forecast") { paramlist <- list(na.rm = na.rm, PI = PI, showgap = showgap, series = series, ...) if (!is.null(series)) { if (inherits(mapping, "uneval")) { mapping$colour <- quote(ggplot2::after_stat(series)) } else { mapping <- ggplot2::aes(colour = ggplot2::after_stat(series)) } } } else { paramlist <- list(na.rm = na.rm, ...) } ggplot2::layer( geom = GeomForecast, mapping = mapping, data = data, stat = stat, position = position, show.legend = show.legend, inherit.aes = inherit.aes, params = paramlist ) } # Produce nice histogram with appropriately chosen bin widths # Designed to work with time series data without issuing warnings. #' Histogram with optional normal and kernel density functions #' #' Plots a histogram and density estimates using ggplot. #' #' #' @param x a numerical vector. #' @param add.normal Add a normal density function for comparison #' @param add.kde Add a kernel density estimate for comparison #' @param add.rug Add a rug plot on the horizontal axis #' @param bins The number of bins to use for the histogram. Selected by default #' using the Friedman-Diaconis rule given by \code{\link[grDevices]{nclass.FD}} #' @param boundary A boundary between two bins. #' @return None. #' @author Rob J Hyndman #' @seealso \code{\link[graphics]{hist}}, \code{\link[ggplot2]{geom_histogram}} #' @examples #' #' gghistogram(lynx, add.kde=TRUE) #' #' @export gghistogram <- function(x, add.normal=FALSE, add.kde=FALSE, add.rug=TRUE, bins, boundary=0) { if (!requireNamespace("ggplot2", quietly = TRUE)) { stop("ggplot2 is needed for this function to work. Install it via install.packages(\"ggplot2\")", call. = FALSE) } else { if (missing(bins)) { bins <- min(500, grDevices::nclass.FD(na.exclude(x))) } data <- data.frame(x = as.numeric(c(x))) # Initialise ggplot object and plot histogram binwidth <- (max(x, na.rm = TRUE) - min(x, na.rm = TRUE)) / bins p <- ggplot2::ggplot() + ggplot2::geom_histogram(ggplot2::aes(x), data = data, binwidth = binwidth, boundary = boundary) + ggplot2::xlab(deparse(substitute(x))) # Add normal density estimate if (add.normal || add.kde) { xmin <- min(x, na.rm = TRUE) xmax <- max(x, na.rm = TRUE) if (add.kde) { h <- stats::bw.SJ(x) xmin <- xmin - 3 * h xmax <- xmax + 3 * h } if (add.normal) { xmean <- mean(x, na.rm = TRUE) xsd <- sd(x, na.rm = TRUE) xmin <- min(xmin, xmean - 3 * xsd) xmax <- max(xmax, xmean + 3 * xsd) } xgrid <- seq(xmin, xmax, length.out = 512) if (add.normal) { df <- data.frame(x = xgrid, y = length(x) * binwidth * stats::dnorm(xgrid, xmean, xsd)) p <- p + ggplot2::geom_line(ggplot2::aes(df$x, df$y), col = "#ff8a62") } if (add.kde) { kde <- stats::density(x, bw = h, from = xgrid[1], to = xgrid[512], n = 512) p <- p + ggplot2::geom_line(ggplot2::aes(x = kde$x, y = length(x) * binwidth * kde$y), col = "#67a9ff") } } if (add.rug) { p <- p + ggplot2::geom_rug(ggplot2::aes(x)) } return(p) } } forecast/R/checkAdmissibility.R0000644000176200001440000000235114150370574016217 0ustar liggesusers# Author: srazbash and Rob J Hyndman ############################################################################### checkAdmissibility <- function(opt.env, box.cox=NULL, small.phi=NULL, ar.coefs=NULL, ma.coefs=NULL, tau=0, bc.lower=0, bc.upper=1) { # Check the range of the Box-Cox parameter if (!is.null(box.cox)) { if ((box.cox <= bc.lower) | (box.cox >= bc.upper)) { return(FALSE) } } # Check the range of small.phi if (!is.null(small.phi)) { if (((small.phi < .8) | (small.phi > 1))) { return(FALSE) } } # Check AR part for stationarity if (!is.null(ar.coefs)) { arlags <- which(abs(ar.coefs) > 1e-08) if (length(arlags) > 0L) { p <- max(arlags) if (min(Mod(polyroot(c(1, -ar.coefs[1L:p])))) < 1 + 1e-2) { return(FALSE) } } } # Check MA part for invertibility if (!is.null(ma.coefs)) { malags <- which(abs(ma.coefs) > 1e-08) if (length(malags) > 0L) { q <- max(malags) if (min(Mod(polyroot(c(1, ma.coefs[1L:q])))) < 1 + 1e-2) { return(FALSE) } } } # Check the eigen values of the D matrix D.eigen.values <- eigen(opt.env$D, symmetric = FALSE, only.values = TRUE)$values return(all(abs(D.eigen.values) < 1 + 1e-2)) } forecast/R/simulate_tbats.R0000644000176200001440000000302514207263356015434 0ustar liggesusers#' @rdname simulate.ets #' @export simulate.tbats <- function(object, nsim=length(object$y), seed = NULL, future=TRUE, bootstrap=FALSE, innov = NULL, ...) { if (is.null(innov)) { if (!exists(".Random.seed", envir = .GlobalEnv)) { runif(1) } if (is.null(seed)) { RNGstate <- .Random.seed } else { R.seed <- .Random.seed set.seed(seed) RNGstate <- structure(seed, kind = as.list(RNGkind())) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } } else { nsim <- length(innov) } if (bootstrap) { res <- residuals(object) res <- na.omit(res - mean(res, na.rm = TRUE)) e <- sample(res, nsim, replace = TRUE) } else if (is.null(innov)) { e <- rnorm(nsim, 0, sqrt(object$variance)) } else { e <- innov } x <- getResponse(object) y <- numeric(nsim) if(future) { dataplusy <- x } else { # Start somewhere in the original series dataplusy <- ts(sample(x, 1), start=-1/frequency(x), frequency = frequency(x)) } fitplus <- object for(i in seq_along(y)) { fc <- forecast(fitplus, h=1, biasadj=FALSE)$mean if(is.null(object$lambda)) { y[i] <- fc + e[i] } else { y[i] <- InvBoxCox(BoxCox(fc, object$lambda) + e[i], object$lambda) } dataplusy <- ts(c(dataplusy, y[i]), start=start(dataplusy), frequency=frequency(dataplusy)) fitplus <- tbats(dataplusy, model=fitplus) } return(tail(dataplusy, nsim)) } forecast/R/msts.R0000644000176200001440000000730414207263356013406 0ustar liggesusers#' Multi-Seasonal Time Series #' #' msts is an S3 class for multi seasonal time series objects, intended to be #' used for models that support multiple seasonal periods. The msts class #' inherits from the ts class and has an additional "msts" attribute which #' contains the vector of seasonal periods. All methods that work on a ts #' class, should also work on a msts class. #' #' @aliases print.msts window.msts `[.msts` #' #' @param data A numeric vector, ts object, matrix or data frame. It is #' intended that the time series data is univariate, otherwise treated the same #' as ts(). #' @param seasonal.periods A vector of the seasonal periods of the msts. #' @param ts.frequency The seasonal period that should be used as frequency of #' the underlying ts object. The default value is \code{max(seasonal.periods)}. #' @param ... Arguments to be passed to the underlying call to \code{ts()}. For #' example \code{start=c(1987,5)}. #' @return An object of class \code{c("msts", "ts")}. If there is only one #' seasonal period (i.e., \code{length(seasonal.periods)==1}), then the object #' is of class \code{"ts"}. #' @author Slava Razbash and Rob J Hyndman #' @keywords ts #' @examples #' #' x <- msts(taylor, seasonal.periods=c(2*24,2*24*7,2*24*365), start=2000+22/52) #' y <- msts(USAccDeaths, seasonal.periods=12, start=1949) #' #' @export msts <- function(data, seasonal.periods, ts.frequency=floor(max(seasonal.periods)), ...) { # if(!is.element(ts.frequency, round(seasonal.periods-0.5+1e-12))) # stop("ts.frequency should be one of the seasonal periods") if (inherits(data, "ts") && frequency(data) == ts.frequency && length(list(...)) == 0) { object <- data } else { object <- ts(data = data, frequency = ts.frequency, ...) } if (length(seasonal.periods) > 1L) { class(object) <- c("msts", "ts") attr(object, "msts") <- sort(seasonal.periods) } return(object) } #' @export print.msts <- function(x, ...) { cat("Multi-Seasonal Time Series:\n") cat("Start: ") cat(start(x)) # cat("\nEnd: ") # cat(x$end) cat("\nSeasonal Periods: ") cat(attr(x, "msts")) cat("\nData:\n") xx <- unclass(x) # handles both univariate and multivariate ts attr(xx, "tsp") <- attr(xx, "msts") <- NULL print(xx) # print(matrix(x, ncol=length(x)), nrow=1) cat("\n") } #' @export window.msts <- function(x, ...) { seasonal.periods <- attr(x, "msts") class(x) <- c("ts") x <- window(x, ...) class(x) <- c("msts", "ts") attr(x, "msts") <- seasonal.periods return(x) } #' @export `[.msts` <- function(x, i, j, drop = TRUE) { y <- NextMethod("[") if(!inherits(y, "ts")) return(y) class(y) <- c("msts", class(y)) attr(y, "msts") <- attr(x, "msts") y } # Copy msts attributes from x to y copy_msts <- function(x, y) { if(NROW(x) > NROW(y)) { # Pad y with initial NAs if(NCOL(y) == 1) { y <- c(rep(NA, NROW(x) - NROW(y)), y) } else { y <- rbind(matrix(NA, ncol=NCOL(y), nrow = NROW(x) - NROW(y)), y) } } else if(NROW(x) != NROW(y)) { stop("x and y should have the same number of observations") } if(NCOL(y) > 1) { class(y) <- c("mts", "ts", "matrix") } else { class(y) <- "ts" } if("msts" %in% class(x)) class(y) <- c("msts", class(y)) attr <- attributes(x) attributes(y)$tsp <- attr$tsp attributes(y)$msts <- attr$msts return(y) } # Copy msts attributes from x to y shifted to forecast period future_msts <- function(x, y) { if(NCOL(y) > 1) { class(y) <- c("mts", "ts", "matrix") } else { class(y) <- "ts" } if("msts" %in% class(x)) class(y) <- c("msts", class(y)) attr <- attributes(x) attr$tsp[1:2] <- attr$tsp[2] + c(1,NROW(y))/attr$tsp[3] attributes(y)$tsp <- attr$tsp attributes(y)$msts <- attr$msts return(y) } forecast/R/subset.R0000644000176200001440000001277014634676727013746 0ustar liggesusers#' Subsetting a time series #' #' Various types of subsetting of a time series. Allows subsetting by index #' values (unlike \code{\link[stats]{window}}). Also allows extraction of the #' values of a specific season or subset of seasons in each year. For example, #' to extract all values for the month of May from a time series. #' #' If character values for months are used, either upper or lower case may be #' used, and partial unambiguous names are acceptable. Possible character #' values for quarters are \code{"Q1"}, \code{"Q2"}, \code{"Q3"}, and #' \code{"Q4"}. #' #' @param x a univariate time series to be subsetted #' @param subset optional logical expression indicating elements to keep; #' missing values are taken as false. \code{subset} must be the same length as #' \code{x}. #' @param month Numeric or character vector of months to retain. Partial #' matching on month names used. #' @param quarter Numeric or character vector of quarters to retain. #' @param season Numeric vector of seasons to retain. #' @param start Index of start of contiguous subset. #' @param end Index of end of contiguous subset. #' @param ... Other arguments, unused. #' @return If \code{subset} is used, a numeric vector is returned with no ts #' attributes. If \code{start} and/or \code{end} are used, a ts object is #' returned consisting of x[start:end], with the appropriate time series #' attributes retained. Otherwise, a ts object is returned with frequency equal #' to the length of \code{month}, \code{quarter} or \code{season}. #' @author Rob J Hyndman #' @seealso \code{\link[base]{subset}}, \code{\link[stats]{window}} #' @keywords ts #' @examples #' plot(subset(gas,month="November")) #' subset(woolyrnq,quarter=3) #' subset(USAccDeaths, start=49) #' #' @export subset.ts <- function(x, subset=NULL, month=NULL, quarter=NULL, season=NULL, start=NULL, end=NULL, ...) { if (!is.null(subset)) { if (NROW(subset) != NROW(x)) { stop("subset must be the same length as x") } if (NCOL(subset) != 1) { stop("subset must be a vector of rows to keep") } if ("mts" %in% class(x)) { return(subset.matrix(x, subset)) } else { return(subset.default(x, subset)) } } else if (!is.null(start) | !is.null(end)) { if (is.null(start)) { start <- 1 } if (is.null(end)) { end <- NROW(x) } if ("mts" %in% class(x)) { xsub <- x[start:end, , drop=FALSE] } else { xsub <- x[start:end] } tspx <- tsp(x) return(ts(xsub, frequency = tspx[3], start = tspx[1L] + (start - 1) / tspx[3L])) } else if (frequency(x) <= 1) { stop("Data must be seasonal") } if (!is.null(month)) { if (frequency(x) != 12) { stop("Data is not monthly") } if (is.character(month)) { season <- pmatch(tolower(month), tolower(month.name), duplicates.ok = TRUE) } else { season <- month } season <- na.omit(season) if (length(season) == 0L) { stop("No recognizable months") } if (min(season) < 1L | max(season) > 12L) { stop("Months must be between 1 and 12") } } else if (!is.null(quarter)) { if (frequency(x) != 4) { stop("Data is not quarterly") } if (is.character(quarter)) { season <- pmatch(tolower(quarter), paste("q", 1:4, sep = ""), duplicates.ok = TRUE) } else { season <- quarter } season <- na.omit(season) if (length(season) == 0L) { stop("No recognizable quarters") } if (min(season) < 1L | max(season) > 4L) { stop("Quarters must be between 1 and 4") } } else if (is.null(season)) { stop("No subset specified") } else if (min(season) < 1L | max(season) > frequency(x)) { stop(paste("Seasons must be between 1 and", frequency(x))) } start <- utils::head(time(x)[is.element(cycle(x), season)], 1) if ("mts" %in% class(x)) { x <- subset.matrix(x, is.element(cycle(x), season)) } else { x <- subset.default(x, is.element(cycle(x), season)) } return(ts(x, frequency = length(season), start = start)) } # head.ts and tail.ts only defined/exported for R < 4.5.0 # due to new base R functions. #' @importFrom utils head.matrix #' @importFrom utils tail.matrix #' @rawNamespace if (getRversion() < "4.5.0") S3method(head, ts) #' @rawNamespace if (getRversion() < "4.5.0") S3method(tail, ts) if(getRversion() < "4.5.0") { head.ts <- function(x, n=6L, ...) { attr_x <- attributes(x) attr_x$names <- NULL if (NCOL(x) > 1) { hx <- head.matrix(as.matrix(x), n = n, ...) } else if ((length(x) + n) > 0) { hx <- head(c(x), n = n, ...) } else { return(numeric(0)) } attr_x$tsp[2] <- attr_x$tsp[1] + (NROW(hx) - 1) / attr_x$tsp[3] if (!is.null(dim(x))) { attr_x$dim[1] <- NROW(hx) } attributes(hx) <- attr_x return(hx) } tail.ts <- function(x, n=6L, ...) { attr_x <- attributes(x) attr_x$names <- NULL if (NCOL(x) > 1) { hx <- tail.matrix(as.matrix(x), n = n, ...) } else if ((length(x) + n) > 0) { hx <- tail(c(x), n = n, ...) } else { return(numeric(0)) } attr_x$tsp[1] <- attr_x$tsp[2] - (NROW(hx) - 1) / attr_x$tsp[3] if (!is.null(dim(x))) { attr_x$dim[1] <- NROW(hx) } attributes(hx) <- attr_x return(hx) } } #' @rdname subset.ts #' @export subset.msts <- function(x, subset=NULL, start=NULL, end=NULL, ...) { out <- subset.ts(x, start = start, end = end, ...) tspx <- tsp(out) msts( out, seasonal.periods = attr(x, "msts"), start = tspx[1], ts.frequency = tspx[3] ) } forecast/R/fitTBATS.R0000644000176200001440000005051714323125536014000 0ustar liggesusersfitPreviousTBATSModel <- function(y, model, biasadj=FALSE) { seasonal.periods <- model$seasonal.periods if (is.null(seasonal.periods) == FALSE) { seasonal.periods <- sort(seasonal.periods) } # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(model$parameters$vect, model$parameters$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta if (!is.null(beta.v)) { adj.beta <- 1 } else { adj.beta <- 0 } small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } if (!is.null(seasonal.periods)) { tau <- as.integer(2 * sum(model$k.vector)) gamma.bold <- matrix(0, nrow = 1, ncol = (2 * sum(model$k.vector))) } else { tau <- as.integer(0) gamma.bold <- NULL } g <- matrix(0, nrow = ((2 * sum(model$k.vector)) + 1 + adj.beta + p + q), ncol = 1) if (p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (q != 0) { g[(1 + adj.beta + tau + p + 1), 1] <- 1 } y.touse <- y if (is.null(lambda) == FALSE) { y.touse <- BoxCox(y, lambda = lambda) lambda <- attr(y.touse, "lambda") } ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = model$k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = model$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") F <- makeTBATSFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, k.vector = model$k.vector, gamma.bold.matrix = gamma.bold, ar.coefs = ar.coefs, ma.coefs = ma.coefs) .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! fitted.values.and.errors <- calcModel(y.touse, model$seed.states, F, g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) if (!is.null(lambda)) { fitted.values <- InvBoxCox(fitted.values, lambda = lambda, biasadj, variance) } model.for.output <- model model.for.output$variance <- variance model.for.output$fitted.values <- ts(c(fitted.values)) model.for.output$errors <- ts(c(e)) tsp(model.for.output$fitted.values) <- tsp(model.for.output$errors) <- tsp(y) model.for.output$x <- fitted.values.and.errors$x model.for.output$y <- y return(model.for.output) } fitSpecificTBATS <- function(y, use.box.cox, use.beta, use.damping, seasonal.periods=NULL, k.vector=NULL, starting.params=NULL, x.nought=NULL, ar.coefs=NULL, ma.coefs=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { if (!is.null(seasonal.periods)) { seasonal.periods <- sort(seasonal.periods) } ## Meaning/purpose of the first if() statement: If this is the first pass, then use default starting values. Else if it is the second pass, then use the values form the first pass as starting values. if (is.null(starting.params)) { ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } # Calculate starting values: alpha <- 0.09 if (use.beta) { adj.beta <- 1 beta.v <- 0.05 b <- 0.00 if (use.damping) { small.phi <- .999 } else { small.phi <- 1 } } else { adj.beta <- 0 beta.v <- NULL b <- NULL small.phi <- NULL use.damping <- FALSE } if (!is.null(seasonal.periods)) { gamma.one.v <- rep(0, length(k.vector)) gamma.two.v <- rep(0, length(k.vector)) s.vector <- numeric(2 * sum(k.vector)) k.vector <- as.integer(k.vector) } else { gamma.one.v <- NULL gamma.two.v <- NULL s.vector <- NULL } if (use.box.cox) { if (!is.null(init.box.cox)) { lambda <- init.box.cox } else { lambda <- BoxCox.lambda(y, lower = 0, upper = 1.5) } y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") } else { # the "else" is not needed at the moment lambda <- NULL } } else { paramz <- unParameteriseTBATS(starting.params$vect, starting.params$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta if (!is.null(beta.v)) { adj.beta <- 1 } else { adj.beta <- 0 } b <- 0 small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(seasonal.periods)) { s.vector <- numeric(2 * sum(k.vector)) } else { s.vector <- NULL } # ar.coefs <- paramz$ar.coefs # ma.coefs <- paramz$ma.coefs ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } } if (is.null(x.nought)) { # Start with the seed states equal to zero if (!is.null(ar.coefs)) { d.vector <- numeric(length(ar.coefs)) } else { d.vector <- NULL } if (!is.null(ma.coefs)) { epsilon.vector <- numeric(length(ma.coefs)) } else { epsilon.vector <- NULL } x.nought <- makeXMatrix(l = 0, b = b, s.vector = s.vector, d.vector = d.vector, epsilon.vector = epsilon.vector)$x } # Make the parameter vector parameterise param.vector <- parameterise(alpha = alpha, beta.v = beta.v, small.phi = small.phi, gamma.v = cbind(gamma.one.v, gamma.two.v), lambda = lambda, ar.coefs = ar.coefs, ma.coefs = ma.coefs) par.scale <- makeParscale(param.vector$control) if (!is.null(seasonal.periods)) { tau <- as.integer(2 * sum(k.vector)) } else { tau <- as.integer(0) } w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(seasonal.periods)) { gamma.bold <- matrix(0, nrow = 1, ncol = (2 * sum(k.vector))) .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } else { gamma.bold <- NULL } g <- matrix(0, nrow = ((2 * sum(k.vector)) + 1 + adj.beta + p + q), ncol = 1) if (p != 0) { g[(1 + adj.beta + tau + 1), 1] <- 1 } if (q != 0) { g[(1 + adj.beta + tau + p + 1), 1] <- 1 } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") F <- makeTBATSFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, k.vector = k.vector, gamma.bold.matrix = gamma.bold, ar.coefs = ar.coefs, ma.coefs = ma.coefs) D <- F - g %*% w$w.transpose #### # Set up environment opt.env <- new.env() assign("F", F, envir = opt.env) assign("w.transpose", w$w.transpose, envir = opt.env) assign("g", g, envir = opt.env) assign("gamma.bold", gamma.bold, envir = opt.env) assign("k.vector", k.vector, envir = opt.env) assign("y", matrix(y, nrow = 1, ncol = length(y)), envir = opt.env) assign("y.hat", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("e", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("x", matrix(0, nrow = length(x.nought), ncol = length(y)), envir = opt.env) ## Set up matrices to find the seed states if (use.box.cox) { y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") .Call("calcTBATSFaster", ys = matrix(y.transformed, nrow = 1, ncol = length(y.transformed)), yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") y.tilda <- opt.env$e } else { .Call("calcTBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") y.tilda <- opt.env$e } w.tilda.transpose <- matrix(0, nrow = length(y), ncol = ncol(w$w.transpose)) w.tilda.transpose[1, ] <- w$w.transpose w.tilda.transpose <- .Call("calcWTilda", wTildaTransposes = w.tilda.transpose, Ds = D, PACKAGE = "forecast") # Remove the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } x.nought <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients x.nought <- matrix(x.nought, nrow = length(x.nought), ncol = 1) ## Replace the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) x.nought <- rbind(x.nought, arma.seed.states) } ## Optimisation if (use.box.cox) { # Un-transform the seed states assign("x.nought.untransformed", InvBoxCox(x.nought, lambda = lambda), envir = opt.env) # Optimise the likelihood function optim.like <- optim( par = param.vector$vect, fn = calcLikelihoodTBATS, method = "Nelder-Mead", opt.env = opt.env, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale) ) # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } # Transform the seed states x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = lambda) lambda <- attr(x.nought, "lambda") ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") fitted.values.and.errors <- calcModel(y.transformed, x.nought, F, g, w) e <- fitted.values.and.errors$e variance <- sum((e * e)) / length(y) fitted.values <- InvBoxCox(fitted.values.and.errors$y.hat, lambda = lambda, biasadj, variance) attr(lambda, "biasadj") <- biasadj # e <- InvBoxCox(e, lambda=lambda) ee <- y - fitted.values } else { # else if we are not using the Box-Cox transformation # Optimise the likelihood function if (length(param.vector$vect) > 1) { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformedTBATS, method = "Nelder-Mead", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) } else { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformedTBATS, method = "BFGS", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, param.control = param.vector$control, p = p, q = q, tau = tau, control = list(parscale = par.scale)) } # Get the parameters out of the param.vector paramz <- unParameteriseTBATS(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeTBATSWMatrix", smallPhi_s = small.phi, kVector_s = k.vector, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, tau_s = tau, PACKAGE = "forecast") if (!is.null(gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = gamma.bold, kVector_s = k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v, PACKAGE = "forecast") } .Call("updateTBATSGMatrix", g_s = g, gammaBold_s = gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", F, small.phi, alpha, beta.v, gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") # 2. Calculate! fitted.values.and.errors <- calcModel(y, x.nought, F, g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) } # Get the likelihood likelihood <- optim.like$value # Calculate the AIC aic <- likelihood + 2 * (length(param.vector$vect) + nrow(x.nought)) # Make a list object fits <- ts(c(fitted.values)) e <- ts(c(e)) tsp(fits) <- tsp(e) <- tsp(y) model.for.output <- list( lambda = lambda, alpha = alpha, beta = beta.v, damping.parameter = small.phi, gamma.one.values = gamma.one.v, gamma.two.values = gamma.two.v, ar.coefficients = ar.coefs, ma.coefficients = ma.coefs, likelihood = likelihood, optim.return.code = optim.like$convergence, variance = variance, AIC = aic, parameters = list(vect = optim.like$par, control = param.vector$control), seed.states = x.nought, fitted.values = fits, errors = e, x = fitted.values.and.errors$x, seasonal.periods = seasonal.periods, k.vector = k.vector, y = y, p = p, q = q ) class(model.for.output) <- c("tbats", "bats") return(model.for.output) } calcLikelihoodTBATS <- function(param.vector, opt.env, use.beta, use.small.phi, seasonal.periods, param.control, p=0, q=0, tau=0, bc.lower=0, bc.upper=1) { # param vector should be as follows: Box-Cox.parameter, alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables paramz <- unParameteriseTBATS(param.vector, param.control) box.cox.parameter <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = box.cox.parameter) lambda <- attr(x.nought, "lambda") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") if (!is.null(opt.env$gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = opt.env$gamma.bold, kVector_s = opt.env$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v) } .Call("updateTBATSGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") mat.transformed.y <- BoxCox(opt.env$y, box.cox.parameter) lambda <- attr(mat.transformed.y, "lambda") n <- ncol(opt.env$y) .Call("calcTBATSFaster", ys = mat.transformed.y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e ^ 2)) - 2 * (box.cox.parameter - 1) * sum(log(opt.env$y)) if (is.na(log.likelihood)) { # Not sure why this would occur return(Inf) } assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env, box.cox = box.cox.parameter, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = sum(seasonal.periods), bc.lower = bc.lower, bc.upper = bc.upper)) { return(log.likelihood) } else { return(Inf) } } calcLikelihoodNOTransformedTBATS <- function(param.vector, opt.env, x.nought, use.beta, use.small.phi, seasonal.periods, param.control, p=0, q=0, tau=0) { # The likelihood function without the Box-Cox Transformation # param vector should be as follows: alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables paramz <- unParameteriseTBATS(param.vector, param.control) box.cox.parameter <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.one.v <- paramz$gamma.one.v gamma.two.v <- paramz$gamma.two.v if (!is.null(paramz$ar.coefs)) { p <- length(paramz$ar.coefs) ar.coefs <- matrix(paramz$ar.coefs, nrow = 1, ncol = p) } else { ar.coefs <- NULL p <- 0 } if (!is.null(paramz$ma.coefs)) { q <- length(paramz$ma.coefs) ma.coefs <- matrix(paramz$ma.coefs, nrow = 1, ncol = q) } else { ma.coefs <- NULL q <- 0 } .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") if (!is.null(opt.env$gamma.bold)) { .Call("updateTBATSGammaBold", gammaBold_s = opt.env$gamma.bold, kVector_s = opt.env$k.vector, gammaOne_s = gamma.one.v, gammaTwo_s = gamma.two.v) } .Call("updateTBATSGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold, alpha_s = alpha, beta_s = beta.v, PACKAGE = "forecast") .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") n <- ncol(opt.env$y) .Call("calcTBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e * opt.env$e)) if (is.na(log.likelihood)) { # Not sure why this would occur return(Inf) } assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env = opt.env, box.cox = NULL, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau)) { return(log.likelihood) } else { return(Inf) } } forecast/R/ets.R0000644000176200001440000012077214633662406013222 0ustar liggesusers#' Exponential smoothing state space model #' #' Returns ets model applied to \code{y}. #' #' Based on the classification of methods as described in Hyndman et al (2008). #' #' The methodology is fully automatic. The only required argument for ets is #' the time series. The model is chosen automatically if not specified. This #' methodology performed extremely well on the M3-competition data. (See #' Hyndman, et al, 2002, below.) #' #' @aliases print.ets summary.ets as.character.ets coef.ets tsdiag.ets #' #' @param y a numeric vector or time series of class \code{ts} #' @param model Usually a three-character string identifying method using the #' framework terminology of Hyndman et al. (2002) and Hyndman et al. (2008). #' The first letter denotes the error type ("A", "M" or "Z"); the second letter #' denotes the trend type ("N","A","M" or "Z"); and the third letter denotes #' the season type ("N","A","M" or "Z"). In all cases, "N"=none, "A"=additive, #' "M"=multiplicative and "Z"=automatically selected. So, for example, "ANN" is #' simple exponential smoothing with additive errors, "MAM" is multiplicative #' Holt-Winters' method with multiplicative errors, and so on. #' #' It is also possible for the model to be of class \code{"ets"}, and equal to #' the output from a previous call to \code{ets}. In this case, the same model #' is fitted to \code{y} without re-estimating any smoothing parameters. See #' also the \code{use.initial.values} argument. #' @param damped If TRUE, use a damped trend (either additive or #' multiplicative). If NULL, both damped and non-damped trends will be tried #' and the best model (according to the information criterion \code{ic}) #' returned. #' @param alpha Value of alpha. If NULL, it is estimated. #' @param beta Value of beta. If NULL, it is estimated. #' @param gamma Value of gamma. If NULL, it is estimated. #' @param phi Value of phi. If NULL, it is estimated. #' @param additive.only If TRUE, will only consider additive models. Default is #' FALSE. #' @param lambda Box-Cox transformation parameter. If \code{lambda="auto"}, #' then a transformation is automatically selected using \code{BoxCox.lambda}. #' The transformation is ignored if NULL. Otherwise, #' data transformed before model is estimated. When \code{lambda} is specified, #' \code{additive.only} is set to \code{TRUE}. #' @param lower Lower bounds for the parameters (alpha, beta, gamma, phi). Ignored if \code{bounds=="admissible"}. #' @param upper Upper bounds for the parameters (alpha, beta, gamma, phi). Ignored if \code{bounds=="admissible"}. #' @param opt.crit Optimization criterion. One of "mse" (Mean Square Error), #' "amse" (Average MSE over first \code{nmse} forecast horizons), "sigma" #' (Standard deviation of residuals), "mae" (Mean of absolute residuals), or #' "lik" (Log-likelihood, the default). #' @param nmse Number of steps for average multistep MSE (1<=\code{nmse}<=30). #' @param bounds Type of parameter space to impose: \code{"usual" } indicates #' all parameters must lie between specified lower and upper bounds; #' \code{"admissible"} indicates parameters must lie in the admissible space; #' \code{"both"} (default) takes the intersection of these regions. #' @param ic Information criterion to be used in model selection. #' @param restrict If \code{TRUE} (default), the models with infinite variance #' will not be allowed. #' @param allow.multiplicative.trend If \code{TRUE}, models with multiplicative #' trend are allowed when searching for a model. Otherwise, the model space #' excludes them. This argument is ignored if a multiplicative trend model is #' explicitly requested (e.g., using \code{model="MMN"}). #' @param use.initial.values If \code{TRUE} and \code{model} is of class #' \code{"ets"}, then the initial values in the model are also not #' re-estimated. #' @param na.action A function which indicates what should happen when the data #' contains NA values. By default, the largest contiguous portion of the #' time-series will be used. #' @param ... Other undocumented arguments. #' @inheritParams forecast.ts #' #' @return An object of class "\code{ets}". #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{ets} and associated #' functions. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, #' \code{\link{Arima}}. #' @references Hyndman, R.J., Koehler, A.B., Snyder, R.D., and Grose, S. (2002) #' "A state space framework for automatic forecasting using exponential #' smoothing methods", \emph{International J. Forecasting}, \bold{18}(3), #' 439--454. #' #' Hyndman, R.J., Akram, Md., and Archibald, B. (2008) "The admissible #' parameter space for exponential smoothing models". \emph{Annals of #' Statistical Mathematics}, \bold{60}(2), 407--426. #' #' Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag. \url{http://www.exponentialsmoothing.net}. #' @keywords ts #' @examples #' fit <- ets(USAccDeaths) #' plot(forecast(fit)) #' #' @export ets <- function(y, model="ZZZ", damped=NULL, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, additive.only=FALSE, lambda=NULL, biasadj=FALSE, lower=c(rep(0.0001, 3), 0.8), upper=c(rep(0.9999, 3), 0.98), opt.crit=c("lik", "amse", "mse", "sigma", "mae"), nmse=3, bounds=c("both", "usual", "admissible"), ic=c("aicc", "aic", "bic"), restrict=TRUE, allow.multiplicative.trend=FALSE, use.initial.values=FALSE, na.action = c("na.contiguous", "na.interp", "na.fail"), ...) { # dataname <- substitute(y) opt.crit <- match.arg(opt.crit) bounds <- match.arg(bounds) ic <- match.arg(ic) if(!is.function(na.action)){ na.fn_name <- match.arg(na.action) na.action <- get(na.fn_name) } seriesname <- deparse(substitute(y)) if (any(class(y) %in% c("data.frame", "list", "matrix", "mts"))) { stop("y should be a univariate time series") } y <- as.ts(y) # Check if data is constant if (missing(model) && is.constant(y)) { return(ses(y, alpha = 0.99999, initial = "simple")$model) } # Remove missing values near ends ny <- length(y) y <- na.action(y) if (ny != length(y) && na.fn_name == "na.contiguous") { warning("Missing values encountered. Using longest contiguous portion of time series") ny <- length(y) } orig.y <- y if (identical(class(model), "ets") && is.null(lambda)) { lambda <- model$lambda } if (!is.null(lambda)) { y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") additive.only <- TRUE } if (nmse < 1 || nmse > 30) { stop("nmse out of range") } m <- frequency(y) if (any(upper < lower)) { stop("Lower limits must be less than upper limits") } # If model is an ets object, re-fit model to new data if ("ets" %in% class(model)) { # Prevent alpha being zero (to avoid divide by zero in the C code) alpha <- max(model$par["alpha"], 1e-10) beta <- model$par["beta"] if (is.na(beta)) { beta <- NULL } gamma <- model$par["gamma"] if (is.na(gamma)) { gamma <- NULL } phi <- model$par["phi"] if (is.na(phi)) { phi <- NULL } modelcomponents <- paste(model$components[1], model$components[2], model$components[3], sep = "") damped <- (model$components[4] == "TRUE") if (use.initial.values) { errortype <- substr(modelcomponents, 1, 1) trendtype <- substr(modelcomponents, 2, 2) seasontype <- substr(modelcomponents, 3, 3) # Recompute errors from pegelsresid.C e <- pegelsresid.C(y, m, model$initstate, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) # Compute error measures np <- length(model$par) + 1 model$loglik <- -0.5 * e$lik model$aic <- e$lik + 2 * np model$bic <- e$lik + log(ny) * np model$aicc <- model$aic + 2 * np * (np + 1) / (ny - np - 1) model$mse <- e$amse[1] model$amse <- mean(e$amse) # Compute states, fitted values and residuals tsp.y <- tsp(y) model$states <- ts(e$states, frequency = tsp.y[3], start = tsp.y[1] - 1 / tsp.y[3]) colnames(model$states)[1] <- "l" if (trendtype != "N") { colnames(model$states)[2] <- "b" } if (seasontype != "N") { colnames(model$states)[(2 + (trendtype != "N")):ncol(model$states)] <- paste("s", 1:m, sep = "") } if (errortype == "A") { model$fitted <- ts(y - e$e, frequency = tsp.y[3], start = tsp.y[1]) } else { model$fitted <- ts(y / (1 + e$e), frequency = tsp.y[3], start = tsp.y[1]) } model$residuals <- ts(e$e, frequency = tsp.y[3], start = tsp.y[1]) model$sigma2 <- sum(model$residuals ^ 2, na.rm = TRUE) / (ny - np) model$x <- orig.y model$series <- seriesname if (!is.null(lambda)) { model$fitted <- InvBoxCox(model$fitted, lambda, biasadj, var(model$residuals)) attr(lambda, "biasadj") <- biasadj } model$lambda <- lambda # Return model object return(model) } else { model <- modelcomponents if (missing(use.initial.values)) { message("Model is being refit with current smoothing parameters but initial states are being re-estimated.\nSet 'use.initial.values=TRUE' if you want to re-use existing initial values.") } } } errortype <- substr(model, 1, 1) trendtype <- substr(model, 2, 2) seasontype <- substr(model, 3, 3) if (!is.element(errortype, c("M", "A", "Z"))) { stop("Invalid error type") } if (!is.element(trendtype, c("N", "A", "M", "Z"))) { stop("Invalid trend type") } if (!is.element(seasontype, c("N", "A", "M", "Z"))) { stop("Invalid season type") } if (m < 1 || length(y) <= m) { # warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") seasontype <- "N" } if (m == 1) { if (seasontype == "A" || seasontype == "M") { stop("Nonseasonal data") } else { substr(model, 3, 3) <- seasontype <- "N" } } if (m > 24) { if (is.element(seasontype, c("A", "M"))) { stop("Frequency too high") } else if (seasontype == "Z") { warning("I can't handle data with frequency greater than 24. Seasonality will be ignored. Try stlf() if you need seasonal forecasts.") substr(model, 3, 3) <- seasontype <- "N" # m <- 1 } } # Check inputs if (restrict) { if ((errortype == "A" && (trendtype == "M" || seasontype == "M")) | (errortype == "M" && trendtype == "M" && seasontype == "A") || (additive.only && (errortype == "M" || trendtype == "M" || seasontype == "M"))) { stop("Forbidden model combination") } } data.positive <- (min(y) > 0) if (!data.positive && errortype == "M") { stop("Inappropriate model for data with negative or zero values") } if (!is.null(damped)) { if (damped && trendtype == "N") { stop("Forbidden model combination") } } n <- length(y) # Check we have enough data to fit a model npars <- 2L # alpha + l0 if (trendtype == "A" || trendtype == "M") { npars <- npars + 2L } # beta + b0 if (seasontype == "A" || seasontype == "M") { npars <- npars + m } # gamma + s if (!is.null(damped)) { npars <- npars + as.numeric(damped) } # Produce something non-optimized for tiny data sets if (n <= npars + 4L) { if (!is.null(damped)) { if (damped) { warning("Not enough data to use damping") } } if (seasontype == "A" || seasontype == "M") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = gamma, phi = phi, exponential = (trendtype == "M"), seasonal = ifelse(seasontype != "A", "multiplicative", "additive"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } else { warning("Seasonal component could not be estimated") } } if (trendtype == "A" || trendtype == "M") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } else { warning("Trend component could not be estimated") } } if (trendtype == "N" && seasontype == "N") { fit <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = FALSE, gamma = FALSE, lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if (!("try-error" %in% class(fit))) { fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } } # Try holt and ses and return best fit1 <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = beta, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) fit2 <- try(HoltWintersZZ( orig.y, alpha = alpha, beta = FALSE, gamma = FALSE, phi = phi, exponential = (trendtype == "M"), lambda = lambda, biasadj = biasadj, warnings = FALSE ), silent = TRUE) if ("try-error" %in% class(fit1)) { fit <- fit2 } else if (fit1$sigma2 < fit2$sigma2) { fit <- fit1 } else { fit <- fit2 } if("try-error" %in% class(fit)) stop("Unable to estimate a model.") fit$call <- match.call() fit$method <- as.character(fit) fit$series <- deparse(substitute(y)) return(fit) } # Fit model (assuming only one nonseasonal model) if (errortype == "Z") { errortype <- c("A", "M") } if (trendtype == "Z") { if (allow.multiplicative.trend) { trendtype <- c("N", "A", "M") } else { trendtype <- c("N", "A") } } if (seasontype == "Z") { seasontype <- c("N", "A", "M") } if (is.null(damped)) { damped <- c(TRUE, FALSE) } best.ic <- Inf for (i in 1:length(errortype)) { for (j in 1:length(trendtype)) { for (k in 1:length(seasontype)) { for (l in 1:length(damped)) { if (trendtype[j] == "N" && damped[l]) { next } if (restrict) { if (errortype[i] == "A" && (trendtype[j] == "M" || seasontype[k] == "M")) { next } if (errortype[i] == "M" && trendtype[j] == "M" && seasontype[k] == "A") { next } if (additive.only && (errortype[i] == "M" || trendtype[j] == "M" || seasontype[k] == "M")) { next } } if (!data.positive && errortype[i] == "M") { next } fit <- try(etsmodel( y, errortype[i], trendtype[j], seasontype[k], damped[l], alpha, beta, gamma, phi, lower = lower, upper = upper, opt.crit = opt.crit, nmse = nmse, bounds = bounds, ... ), silent=TRUE) if(is.element("try-error", class(fit))) fit.ic <- Inf else fit.ic <- switch(ic, aic = fit$aic, bic = fit$bic, aicc = fit$aicc) if (!is.na(fit.ic)) { if (fit.ic < best.ic) { model <- fit best.ic <- fit.ic best.e <- errortype[i] best.t <- trendtype[j] best.s <- seasontype[k] best.d <- damped[l] } } } } } } if (best.ic == Inf) { stop("No model able to be fitted") } model$m <- m model$method <- paste("ETS(", best.e, ",", best.t, ifelse(best.d, "d", ""), ",", best.s, ")", sep = "") model$series <- seriesname model$components <- c(best.e, best.t, best.s, best.d) model$call <- match.call() model$initstate <- model$states[1, ] np <- length(model$par) model$sigma2 <- sum(model$residuals^2, na.rm = TRUE) / (ny - np) model$x <- orig.y if (!is.null(lambda)) { model$fitted <- InvBoxCox(model$fitted, lambda, biasadj, model$sigma2) attr(lambda, "biasadj") <- biasadj } model$lambda <- lambda # model$call$data <- dataname return(structure(model, class = "ets")) } #' @export as.character.ets <- function(x, ...) { paste( "ETS(", x$components[1], ",", x$components[2], ifelse(x$components[4], "d", ""), ",", x$components[3], ")", sep = "" ) } # myRequire <- function(libName) { # req.suc <- require(libName, quietly=TRUE, character.only=TRUE) # if(!req.suc) stop("The ",libName," package is not available.") # req.suc # } # getNewBounds <- function(par, lower, upper, nstate) { # myLower <- NULL # myUpper <- NULL # if("alpha" %in% names(par)) { # myLower <- c(myLower, lower[1]) # myUpper <- c(myUpper, upper[1]) # } # if("beta" %in% names(par)) { # myLower <- c(myLower, lower[2]) # myUpper <- c(myUpper, upper[2]) # } # if("gamma" %in% names(par)) { # myLower <- c(myLower, lower[3]) # myUpper <- c(myUpper, upper[3]) # } # if("phi" %in% names(par)) { # myLower <- c(myLower, lower[4]) # myUpper <- c(myUpper, upper[4]) # } # myLower <- c(myLower,rep(-1e8,nstate)) # myUpper <- c(myUpper,rep(1e8,nstate)) # list(lower=myLower, upper=myUpper) # } etsmodel <- function(y, errortype, trendtype, seasontype, damped, alpha=NULL, beta=NULL, gamma=NULL, phi=NULL, lower, upper, opt.crit, nmse, bounds, maxit=2000, control=NULL, seed=NULL, trace=FALSE) { tsp.y <- tsp(y) if (is.null(tsp.y)) { tsp.y <- c(1, length(y), 1) } if (seasontype != "N") { m <- tsp.y[3] } else { m <- 1 } # Modify limits if alpha, beta or gamma have been specified. if (!is.null(alpha)) { upper[2] <- min(alpha, upper[2]) upper[3] <- min(1 - alpha, upper[3]) } if (!is.null(beta)) { lower[1] <- max(beta, lower[1]) } if (!is.null(gamma)) { upper[1] <- min(1 - gamma, upper[1]) } # Initialize smoothing parameters par <- initparam(alpha, beta, gamma, phi, trendtype, seasontype, damped, lower, upper, m, bounds) names(alpha) <- names(beta) <- names(gamma) <- names(phi) <- NULL par.noopt <- c(alpha = alpha, beta = beta, gamma = gamma, phi = phi) if (!is.null(par.noopt)) { par.noopt <- c(na.omit(par.noopt)) } if (!is.na(par["alpha"])) { alpha <- par["alpha"] } if (!is.na(par["beta"])) { beta <- par["beta"] } if (!is.na(par["gamma"])) { gamma <- par["gamma"] } if (!is.na(par["phi"])) { phi <- par["phi"] } # if(errortype=="M" | trendtype=="M" | seasontype=="M") # bounds="usual" if (!check.param(alpha, beta, gamma, phi, lower, upper, bounds, m)) { print(paste("Model: ETS(", errortype, ",", trendtype, ifelse(damped, "d", ""), ",", seasontype, ")", sep = "")) stop("Parameters out of range") } # Initialize state init.state <- initstate(y, trendtype, seasontype) nstate <- length(init.state) par <- c(par, init.state) lower <- c(lower, rep(-Inf, nstate)) upper <- c(upper, rep(Inf, nstate)) np <- length(par) if (np >= length(y) - 1) { # Not enough data to continue return(list(aic = Inf, bic = Inf, aicc = Inf, mse = Inf, amse = Inf, fit = NULL, par = par, states = init.state)) } # ------------------------------------------------- # if(is.null(seed)) seed <- 1000*runif(1) # if(solver=="malschains" || solver=="malschains_c") { # malschains <- NULL # if(!myRequire("Rmalschains")) # stop("malschains optimizer unavailable") # func <- NULL # #env <- NULL # if(solver=="malschains") { # func <- function(myPar) { # names(myPar) <- names(par) # res <- lik(myPar,y=y,nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # res # } # env <- new.env() # } else { # env <- etsTargetFunctionInit(par=par, y=y, nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # func <- .Call("etsGetTargetFunctionRmalschainsPtr", PACKAGE="forecast") # } # myBounds <- getNewBounds(par, lower, upper, nstate) # if(is.null(control)) { # control <- Rmalschains::malschains.control(ls="simplex", lsOnly=TRUE) # } # control$optimum <- if(opt.crit=="lik") -1e12 else 0 # fredTmp <- Rmalschains::malschains(func, env=env, lower=myBounds$lower, upper=myBounds$upper, # maxEvals=maxit, seed=seed, initialpop=par, control=control) # fred <- NULL # fred$par <- fredTmp$sol # fit.par <- fred$par # names(fit.par) <- names(par) # } else if (solver=="Rdonlp2") { # # donlp2 <- NULL # myRequire("Rdonlp2") # # env <- etsTargetFunctionInit(par=par, y=y, nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt)) # # func <- .Call("etsGetTargetFunctionRdonlp2Ptr", PACKAGE="forecast") # # myBounds <- getNewBounds(par, lower, upper, nstate) # # fred <- donlp2(par, func, env=env, par.lower=myBounds$lower, par.upper=myBounds$upper)#, nlin.lower=c(-1), nlin.upper=c(1)) #nlin.lower=c(0,-Inf, -Inf, -Inf), nlin.upper=c(0,0,0,0)) # # fit.par <- fred$par # # names(fit.par) <- names(par) # } else if(solver=="optim_c"){ env <- etsTargetFunctionInit( par = par, y = y, nstate = nstate, errortype = errortype, trendtype = trendtype, seasontype = seasontype, damped = damped, par.noopt = par.noopt, lowerb = lower, upperb = upper, opt.crit = opt.crit, nmse = as.integer(nmse), bounds = bounds, m = m, pnames = names(par), pnames2 = names(par.noopt) ) fred <- .Call( "etsNelderMead", par, env, -Inf, sqrt(.Machine$double.eps), 1.0, 0.5, 2.0, trace, maxit, PACKAGE = "forecast" ) fit.par <- fred$par names(fit.par) <- names(par) # } else { #if(solver=="optim") # # Optimize parameters and state # if(length(par)==1) # method <- "Brent" # else # method <- "Nelder-Mead" # fred <- optim(par,lik,method=method,y=y,nstate=nstate, errortype=errortype, trendtype=trendtype, # seasontype=seasontype, damped=damped, par.noopt=par.noopt, lowerb=lower, upperb=upper, # opt.crit=opt.crit, nmse=nmse, bounds=bounds, m=m,pnames=names(par),pnames2=names(par.noopt), # control=list(maxit=maxit)) # fit.par <- fred$par # names(fit.par) <- names(par) # } # ------------------------------------------------- init.state <- fit.par[(np - nstate + 1):np] # Add extra state if (seasontype != "N") { init.state <- c(init.state, m * (seasontype == "M") - sum(init.state[(2 + (trendtype != "N")):nstate])) } if (!is.na(fit.par["alpha"])) { alpha <- fit.par["alpha"] } if (!is.na(fit.par["beta"])) { beta <- fit.par["beta"] } if (!is.na(fit.par["gamma"])) { gamma <- fit.par["gamma"] } if (!is.na(fit.par["phi"])) { phi <- fit.par["phi"] } e <- pegelsresid.C(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) np <- np + 1 ny <- length(y) aic <- e$lik + 2 * np bic <- e$lik + log(ny) * np aicc <- aic + 2 * np * (np + 1) / (ny - np - 1) mse <- e$amse[1] amse <- mean(e$amse) states <- ts(e$states, frequency = tsp.y[3], start = tsp.y[1] - 1 / tsp.y[3]) colnames(states)[1] <- "l" if (trendtype != "N") { colnames(states)[2] <- "b" } if (seasontype != "N") { colnames(states)[(2 + (trendtype != "N")):ncol(states)] <- paste("s", 1:m, sep = "") } tmp <- c("alpha", rep("beta", trendtype != "N"), rep("gamma", seasontype != "N"), rep("phi", damped)) fit.par <- c(fit.par, par.noopt) # fit.par <- fit.par[order(names(fit.par))] if (errortype == "A") { fits <- y - e$e } else { fits <- y / (1 + e$e) } return(list( loglik = -0.5 * e$lik, aic = aic, bic = bic, aicc = aicc, mse = mse, amse = amse, fit = fred, residuals = ts(e$e, frequency = tsp.y[3], start = tsp.y[1]), fitted = ts(fits, frequency = tsp.y[3], start = tsp.y[1]), states = states, par = fit.par )) } etsTargetFunctionInit <- function(par, y, nstate, errortype, trendtype, seasontype, damped, par.noopt, lowerb, upperb, opt.crit, nmse, bounds, m, pnames, pnames2) { names(par) <- pnames names(par.noopt) <- pnames2 alpha <- c(par["alpha"], par.noopt["alpha"])["alpha"] if (is.na(alpha)) { stop("alpha problem!") } if (trendtype != "N") { beta <- c(par["beta"], par.noopt["beta"])["beta"] if (is.na(beta)) { stop("beta Problem!") } } else { beta <- NULL } if (seasontype != "N") { gamma <- c(par["gamma"], par.noopt["gamma"])["gamma"] if (is.na(gamma)) { stop("gamma Problem!") } } else { m <- 1 gamma <- NULL } if (damped) { phi <- c(par["phi"], par.noopt["phi"])["phi"] if (is.na(phi)) { stop("phi Problem!") } } else { phi <- NULL } # determine which values to optimize and which ones are given by the user/not needed optAlpha <- !is.null(alpha) optBeta <- !is.null(beta) optGamma <- !is.null(gamma) optPhi <- !is.null(phi) givenAlpha <- FALSE givenBeta <- FALSE givenGamma <- FALSE givenPhi <- FALSE if (!is.null(par.noopt["alpha"])) { if (!is.na(par.noopt["alpha"])) { optAlpha <- FALSE givenAlpha <- TRUE } } if (!is.null(par.noopt["beta"])) { if (!is.na(par.noopt["beta"])) { optBeta <- FALSE givenBeta <- TRUE } } if (!is.null(par.noopt["gamma"])) { if (!is.na(par.noopt["gamma"])) { optGamma <- FALSE givenGamma <- TRUE } } if (!is.null(par.noopt["phi"])) { if (!is.na(par.noopt["phi"])) { optPhi <- FALSE givenPhi <- TRUE } } if (!damped) { phi <- 1 } if (trendtype == "N") { beta <- 0 } if (seasontype == "N") { gamma <- 0 } # cat("alpha: ", alpha) # cat(" beta: ", beta) # cat(" gamma: ", gamma) # cat(" phi: ", phi, "\n") # # cat("useAlpha: ", useAlpha) # cat(" useBeta: ", useBeta) # cat(" useGamma: ", useGamma) # cat(" usePhi: ", usePhi, "\n") env <- new.env() res <- .Call( "etsTargetFunctionInit", y = y, nstate = nstate, errortype = switch(errortype, "A" = 1, "M" = 2), trendtype = switch(trendtype, "N" = 0, "A" = 1, "M" = 2), seasontype = switch(seasontype, "N" = 0, "A" = 1, "M" = 2), damped = damped, lowerb = lowerb, upperb = upperb, opt.crit = opt.crit, nmse = as.integer(nmse), bounds = bounds, m = m, optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi, alpha, beta, gamma, phi, env, PACKAGE = "forecast" ) res } initparam <- function(alpha, beta, gamma, phi, trendtype, seasontype, damped, lower, upper, m, bounds) { if(bounds == "admissible") { lower[1L:3L] <- lower[1L:3L]*0 upper[1L:3L] <- upper[1L:3L]*0 + 1e-3 } else if (any(lower > upper)) { stop("Inconsistent parameter boundaries") } # Select alpha if (is.null(alpha)) { alpha <- lower[1] + 0.2 * (upper[1] - lower[1]) / m if (alpha > 1 || alpha < 0) { alpha <- lower[1] + 2e-3 } par <- c(alpha = alpha) } else { par <- numeric(0) } # Select beta if (trendtype != "N" && is.null(beta)) { # Ensure beta < alpha upper[2] <- min(upper[2], alpha) beta <- lower[2] + 0.1 * (upper[2] - lower[2]) if (beta < 0 || beta > alpha) { beta <- alpha - 1e-3 } par <- c(par, beta = beta) } # Select gamma if (seasontype != "N" && is.null(gamma)) { # Ensure gamma < 1-alpha upper[3] <- min(upper[3], 1 - alpha) gamma <- lower[3] + 0.05 * (upper[3] - lower[3]) if (gamma < 0 || gamma > 1 - alpha) { gamma <- 1 - alpha - 1e-3 } par <- c(par, gamma = gamma) } # Select phi if (damped && is.null(phi)) { phi <- lower[4] + .99 * (upper[4] - lower[4]) if (phi < 0 || phi > 1) { phi <- upper[4] - 1e-3 } par <- c(par, phi = phi) } return(par) } check.param <- function(alpha, beta, gamma, phi, lower, upper, bounds, m) { if (bounds != "admissible") { if (!is.null(alpha)) { if (alpha < lower[1] || alpha > upper[1]) { return(0) } } if (!is.null(beta)) { if (beta < lower[2] || beta > alpha || beta > upper[2]) { return(0) } } if (!is.null(phi)) { if (phi < lower[4] || phi > upper[4]) { return(0) } } if (!is.null(gamma)) { if (gamma < lower[3] || gamma > 1 - alpha || gamma > upper[3]) { return(0) } } } if (bounds != "usual") { if (!admissible(alpha, beta, gamma, phi, m)) { return(0) } } return(1) } initstate <- function(y, trendtype, seasontype) { if (seasontype != "N") { # Do decomposition m <- frequency(y) n <- length(y) if (n < 4) { stop("You've got to be joking (not enough data).") } else if (n < 3 * m) # Fit simple Fourier model. { fouriery <- fourier(y, 1) fit <- tslm(y ~ trend + fouriery) if (seasontype == "A") { y.d <- list(seasonal = y - fit$coefficients[1] - fit$coefficients[2] * (1:n)) } else { # seasontype=="M". Biased method, but we only need a starting point y.d <- list(seasonal = y / (fit$coefficients[1] + fit$coefficients[2] * (1:n))) } } else { # n is large enough to do a decomposition y.d <- decompose(y, type = switch(seasontype, A = "additive", M = "multiplicative")) } init.seas <- rev(y.d$seasonal[2:m]) # initial seasonal component names(init.seas) <- paste("s", 0:(m - 2), sep = "") # Seasonally adjusted data if (seasontype == "A") { y.sa <- y - y.d$seasonal } else { init.seas <- pmax(init.seas, 1e-2) # We do not want negative seasonal indexes if (sum(init.seas) > m) { init.seas <- init.seas / sum(init.seas + 1e-2) } y.sa <- y / pmax(y.d$seasonal, 1e-2) } } else # non-seasonal model { m <- 1 init.seas <- NULL y.sa <- y } maxn <- min(max(10, 2 * m), length(y.sa)) if (trendtype == "N") { l0 <- mean(y.sa[1:maxn]) b0 <- NULL } else # Simple linear regression on seasonally adjusted data { fit <- lsfit(1:maxn, y.sa[1:maxn]) if (trendtype == "A") { l0 <- fit$coefficients[1] b0 <- fit$coefficients[2] # If error type is "M", then we don't want l0+b0=0. # So perturb just in case. if (abs(l0 + b0) < 1e-8) { l0 <- l0 * (1 + 1e-3) b0 <- b0 * (1 - 1e-3) } } else # if(trendtype=="M") { l0 <- fit$coefficients[1] + fit$coefficients[2] # First fitted value if (abs(l0) < 1e-8) { l0 <- 1e-7 } b0 <- (fit$coefficients[1] + 2 * fit$coefficients[2]) / l0 # Ratio of first two fitted values l0 <- l0 / b0 # First fitted value divided by b0 if (abs(b0) > 1e10) { # Avoid infinite slopes b0 <- sign(b0) * 1e10 } if (l0 < 1e-8 || b0 < 1e-8) # Simple linear approximation didn't work. { l0 <- max(y.sa[1], 1e-3) b0 <- max(y.sa[2] / y.sa[1], 1e-3) } } } names(l0) <- "l" if (!is.null(b0)) { names(b0) <- "b" } return(c(l0, b0, init.seas)) } lik <- function(par, y, nstate, errortype, trendtype, seasontype, damped, par.noopt, lowerb, upperb, opt.crit, nmse, bounds, m, pnames, pnames2) { # browser() # cat("par: ", par, "\n") names(par) <- pnames names(par.noopt) <- pnames2 alpha <- c(par["alpha"], par.noopt["alpha"])["alpha"] if (is.na(alpha)) { stop("alpha problem!") } if (trendtype != "N") { beta <- c(par["beta"], par.noopt["beta"])["beta"] if (is.na(beta)) { stop("beta Problem!") } } else { beta <- NULL } if (seasontype != "N") { gamma <- c(par["gamma"], par.noopt["gamma"])["gamma"] if (is.na(gamma)) { stop("gamma Problem!") } } else { m <- 1 gamma <- NULL } if (damped) { phi <- c(par["phi"], par.noopt["phi"])["phi"] if (is.na(phi)) { stop("phi Problem!") } } else { phi <- NULL } if (!check.param(alpha, beta, gamma, phi, lowerb, upperb, bounds, m)) { return(Inf) } np <- length(par) init.state <- par[(np - nstate + 1):np] # Add extra state if (seasontype != "N") { init.state <- c(init.state, m * (seasontype == "M") - sum(init.state[(2 + (trendtype != "N")):nstate])) } # Check states if (seasontype == "M") { seas.states <- init.state[-(1:(1 + (trendtype != "N")))] if (min(seas.states) < 0) { return(Inf) } } e <- pegelsresid.C(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) if (is.na(e$lik)) { return(Inf) } if (e$lik < -1e10) { # Avoid perfect fits return(-1e10) } # cat("lik: ", e$lik, "\n") # points(alpha,e$lik,col=2) if (opt.crit == "lik") { return(e$lik) } else if (opt.crit == "mse") { return(e$amse[1]) } else if (opt.crit == "amse") { return(mean(e$amse)) } else if (opt.crit == "sigma") { return(mean(e$e ^ 2)) } else if (opt.crit == "mae") { return(mean(abs(e$e))) } } #' @export print.ets <- function(x, ...) { cat(paste(x$method, "\n\n")) if(!is.null(x$call)) { cat("Call:", deparse(x$call), "", sep = "\n") } ncoef <- length(x$initstate) if (!is.null(x$lambda)) { cat(" Box-Cox transformation: lambda=", round(x$lambda, 4), "\n\n") } cat(" Smoothing parameters:\n") cat(paste(" alpha =", round(x$par["alpha"], 4), "\n")) if (x$components[2] != "N") { cat(paste(" beta =", round(x$par["beta"], 4), "\n")) } if (x$components[3] != "N") { cat(paste(" gamma =", round(x$par["gamma"], 4), "\n")) } if (x$components[4] != "FALSE") { cat(paste(" phi =", round(x$par["phi"], 4), "\n")) } cat("\n Initial states:\n") cat(paste(" l =", round(x$initstate[1], 4), "\n")) if (x$components[2] != "N") { cat(paste(" b =", round(x$initstate[2], 4), "\n")) } else { x$initstate <- c(x$initstate[1], NA, x$initstate[2:ncoef]) ncoef <- ncoef + 1 } if (x$components[3] != "N") { cat(" s = ") if (ncoef <= 8) { cat(round(x$initstate[3:ncoef], 4)) } else { cat(round(x$initstate[3:8], 4)) cat("\n ") cat(round(x$initstate[9:ncoef], 4)) } cat("\n") } cat("\n sigma: ") cat(round(sqrt(x$sigma2), 4)) if (!is.null(x$aic)) { stats <- c(x$aic, x$aicc, x$bic) names(stats) <- c("AIC", "AICc", "BIC") cat("\n\n") print(stats) } # cat("\n AIC: ") # cat(round(x$aic,4)) # cat("\n AICc: ") # cat(round(x$aicc,4)) # cat("\n BIC: ") # cat(round(x$bic,4)) } pegelsresid.C <- function(y, m, init.state, errortype, trendtype, seasontype, damped, alpha, beta, gamma, phi, nmse) { n <- length(y) p <- length(init.state) x <- numeric(p * (n + 1)) x[1:p] <- init.state e <- numeric(n) lik <- 0 if (!damped) { phi <- 1 } if (trendtype == "N") { beta <- 0 } if (seasontype == "N") { gamma <- 0 } amse <- numeric(nmse) Cout <- .C( "etscalc", as.double(y), as.integer(n), as.double(x), as.integer(m), as.integer(switch(errortype, "A" = 1, "M" = 2)), as.integer(switch(trendtype, "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(seasontype, "N" = 0, "A" = 1, "M" = 2)), as.double(alpha), as.double(beta), as.double(gamma), as.double(phi), as.double(e), as.double(lik), as.double(amse), as.integer(nmse), PACKAGE = "forecast" ) if (!is.na(Cout[[13]])) { if (abs(Cout[[13]] + 99999) < 1e-7) { Cout[[13]] <- NA } } tsp.y <- tsp(y) e <- ts(Cout[[12]]) tsp(e) <- tsp.y return(list(lik = Cout[[13]], amse = Cout[[14]], e = e, states = matrix(Cout[[3]], nrow = n + 1, ncol = p, byrow = TRUE))) } admissible <- function(alpha, beta, gamma, phi, m) { if (is.null(phi)) { phi <- 1 } if (phi < 0 || phi > 1 + 1e-8) { return(0) } if (is.null(gamma)) { if (alpha < 1 - 1 / phi || alpha > 1 + 1 / phi) { return(0) } if (!is.null(beta)) { if (beta < alpha * (phi - 1) || beta > (1 + phi) * (2 - alpha)) { return(0) } } } else if (m > 1) # Seasonal model { if (is.null(beta)) { beta <- 0 } if (gamma < max(1 - 1 / phi - alpha, 0) || gamma > 1 + 1 / phi - alpha) { return(0) } if (alpha < 1 - 1 / phi - gamma * (1 - m + phi + phi * m) / (2 * phi * m)) { return(0) } if (beta < -(1 - phi) * (gamma / m + alpha)) { return(0) } # End of easy tests. Now use characteristic equation P <- c(phi * (1 - alpha - gamma), alpha + beta - alpha * phi + gamma - 1, rep(alpha + beta - alpha * phi, m - 2), (alpha + beta - phi), 1) roots <- polyroot(P) # cat("maxpolyroots: ", max(abs(roots)), "\n") if (max(abs(roots)) > 1 + 1e-10) { return(0) } } # Passed all tests return(1) } ### PLOT COMPONENTS #' Plot components from ETS model #' #' Produces a plot of the level, slope and seasonal components from an ETS #' model. #' #' \code{autoplot} will produce an equivalent plot as a ggplot object. #' #' @param x Object of class \dQuote{ets}. #' @param object Object of class \dQuote{ets}. Used for ggplot graphics (S3 #' method consistency). #' @param range.bars Logical indicating if each plot should have a bar at its #' right side representing relative size. If NULL, automatic selection takes #' place. #' @param ... Other plotting parameters to affect the plot. #' @return None. Function produces a plot #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{ets}} #' @keywords hplot #' @examples #' #' fit <- ets(USAccDeaths) #' plot(fit) #' plot(fit,plot.type="single",ylab="",col=1:3) #' #' library(ggplot2) #' autoplot(fit) #' #' @export plot.ets <- function(x, ...) { if (!is.null(x$lambda)) { y <- BoxCox(x$x, x$lambda) } else { y <- x$x } if (x$components[3] == "N" && x$components[2] == "N") { plot( cbind(observed = y, level = x$states[, 1]), main = paste("Decomposition by", x$method, "method"), ... ) } else if (x$components[3] == "N") { plot( cbind(observed = y, level = x$states[, 1], slope = x$states[, "b"]), main = paste("Decomposition by", x$method, "method"), ... ) } else if (x$components[2] == "N") { plot( cbind(observed = y, level = x$states[, 1], season = x$states[, "s1"]), main = paste("Decomposition by", x$method, "method"), ... ) } else { plot( cbind( observed = y, level = x$states[, 1], slope = x$states[, "b"], season = x$states[, "s1"] ), main = paste("Decomposition by", x$method, "method"), ... ) } } #' @export summary.ets <- function(object, ...) { class(object) <- c("summary.ets", class(object)) object } #' @export print.summary.ets <- function(x, ...) { NextMethod() cat("\nTraining set error measures:\n") print(accuracy(x)) } #' @export coef.ets <- function(object, ...) { object$par } #' @rdname fitted.Arima #' @export fitted.ets <- function(object, h=1, ...) { if (h == 1) { return(object$fitted) } else { return(hfitted(object = object, h = h, FUN = "ets", ...)) } } #' @export hfitted.ets <- function(object, h=1, ...) { n <- length(object$x) out <- rep(NA_real_, n) for(i in seq_len(n-h+1)) { out[i+h-1] <- .C( "etsforecast", as.double(object$states[i, ]), as.integer(object$m), as.integer(switch(object$components[2], "N" = 0, "A" = 1, "M" = 2)), as.integer(switch(object$components[3], "N" = 0, "A" = 1, "M" = 2)), as.double(ifelse(object$components[4] == "FALSE", 1, object$par["phi"])), as.integer(h), as.double(numeric(h)), PACKAGE = "forecast" )[[7]][h] } out } #' @export logLik.ets <- function(object, ...) { structure(object$loglik, df = length(object$par) + 1, class = "logLik") } #' @export nobs.ets <- function(object, ...) { length(object$x) } #' Is an object a particular model type? #' #' Returns true if the model object is of a particular type #' #' @param x object to be tested #' @export is.ets <- function(x) { inherits(x, "ets") } forecast/R/attach.R0000644000176200001440000000561114633662406013665 0ustar liggesusers.onAttach <- function(...) { if (!interactive() || withr::with_preserve_seed(stats::runif(1)) > 0.2) return() tips <- c( "Use suppressPackageStartupMessages() to eliminate package startup messages.", "Stackoverflow is a great place to get help on R issues:\n http://stackoverflow.com/tags/forecasting+r.", "Crossvalidated is a great place to get help on forecasting issues:\n http://stats.stackexchange.com/tags/forecasting.", "Need help getting started? Try the online textbook FPP:\n http://otexts.com/fpp2/", "Want to stay up-to-date? Read the Hyndsight blog:\n https://robjhyndman.com/hyndsight/", "Want to meet other forecasters? Join the International Institute of Forecasters:\n http://forecasters.org/" ) tip <- withr::with_preserve_seed(sample(tips, 1)) msg <- paste("This is forecast", packageVersion("forecast"), "\n ", tip) packageStartupMessage(msg) } register_s3_method <- function(pkg, generic, class, fun = NULL) { stopifnot(is.character(pkg), length(pkg) == 1) stopifnot(is.character(generic), length(generic) == 1) stopifnot(is.character(class), length(class) == 1) if (is.null(fun)) { fun <- get(paste0(generic, ".", class), envir = parent.frame()) } else { stopifnot(is.function(fun)) } if (pkg %in% loadedNamespaces()) { registerS3method(generic, class, fun, envir = asNamespace(pkg)) } # Always register hook in case package is later unloaded & reloaded setHook( packageEvent(pkg, "onLoad"), function(...) { registerS3method(generic, class, fun, envir = asNamespace(pkg)) } ) } overwrite_s3_generic <- function(pkg, generic){ if (pkg %in% loadedNamespaces()) { assign(generic, get(generic, asNamespace(pkg)), envir = asNamespace("forecast")) } # Always register hook in case package is later unloaded & reloaded # setHook( # packageEvent(pkg, "onLoad"), # function(...) { # pkg_env <- asNamespace("forecast") # unlockBinding(generic, pkg_env) # assign(generic, get(generic, asNamespace(pkg)), envir = pkg_env) # lockBinding(generic, pkg_env) # } # ) } #' @importFrom utils methods .onLoad <- function(...) { overwrite_s3_generic("ggplot2", "autolayer") register_s3_method("ggplot2", "autolayer", "ts") register_s3_method("ggplot2", "autolayer", "mts") register_s3_method("ggplot2", "autolayer", "msts") register_s3_method("ggplot2", "autolayer", "forecast") register_s3_method("ggplot2", "autolayer", "mforecast") # methods <- strsplit(utils::.S3methods(forecast), ".", fixed = TRUE) # overwrite_s3_generic("fabletools", "forecast") # for(method in methods){ # register_s3_method("fabletools", method[1], method[2]) # } # methods <- strsplit(utils::.S3methods(accuracy), ".", fixed = TRUE) # overwrite_s3_generic("fabletools", "accuracy") # for(method in methods){ # register_s3_method("fabletools", method[1], method[2]) # } invisible() } forecast/R/data.R0000644000176200001440000000321014150370574013317 0ustar liggesusers #' Australian monthly gas production #' #' Australian monthly gas production: 1956--1995. #' #' #' @format Time series data #' @source Australian Bureau of Statistics. #' @keywords datasets #' @examples #' plot(gas) #' seasonplot(gas) #' tsdisplay(gas) #' "gas" #' Daily morning gold prices #' #' Daily morning gold prices in US dollars. 1 January 1985 -- 31 March 1989. #' #' #' @format Time series data #' @keywords datasets #' @examples #' tsdisplay(gold) #' "gold" #' Half-hourly electricity demand #' #' Half-hourly electricity demand in England and Wales from Monday 5 June 2000 #' to Sunday 27 August 2000. Discussed in Taylor (2003), and kindly provided by #' James W Taylor. Units: Megawatts #' #' #' @format Time series data #' @references Taylor, J.W. (2003) Short-term electricity demand forecasting #' using double seasonal exponential smoothing. \emph{Journal of the #' Operational Research Society}, \bold{54}, 799-805. #' @source James W Taylor #' @keywords datasets #' @examples #' plot(taylor) #' "taylor" #' Australian total wine sales #' #' Australian total wine sales by wine makers in bottles <= 1 litre. Jan 1980 #' -- Aug 1994. #' #' #' @format Time series data #' @source Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} #' @keywords datasets #' @examples #' tsdisplay(wineind) #' "wineind" #' Quarterly production of woollen yarn in Australia #' #' Quarterly production of woollen yarn in Australia: tonnes. Mar 1965 -- Sep #' 1994. #' #' #' @format Time series data #' @source Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} #' @keywords datasets #' @examples #' tsdisplay(woolyrnq) #' "woolyrnq" forecast/R/baggedModel.R0000644000176200001440000001706714323125536014615 0ustar liggesusers## #' Forecasting using a bagged model #' #' The bagged model forecasting method. #' #' This function implements the bagged model forecasting method described in #' Bergmeir et al. By default, the \code{\link{ets}} function is applied to all #' bootstrapped series. Base models other than \code{\link{ets}} can be given by the #' parameter \code{fn}. Using the default parameters, the function #' \code{\link{bld.mbb.bootstrap}} is used to calculate the bootstrapped series #' with the Box-Cox and Loess-based decomposition (BLD) bootstrap. The function #' \code{\link{forecast.baggedModel}} can then be used to calculate forecasts. #' #' \code{baggedETS} is a wrapper for \code{baggedModel}, setting \code{fn} to "ets". #' This function is included for backwards compatibility only, and may be #' deprecated in the future. #' #' @aliases print.baggedModel #' #' @param y A numeric vector or time series of class \code{ts}. #' @param bootstrapped_series bootstrapped versions of y. #' @param fn the forecast function to use. Default is \code{\link{ets}}. #' @param \dots Other arguments passed to the forecast function. #' @return Returns an object of class "\code{baggedModel}". #' #' The function \code{print} is used to obtain and print a summary of the #' results. #' #' \item{models}{A list containing the fitted ensemble models.} #' \item{method}{The function for producing a forecastable model.} #' \item{y}{The original time series.} #' \item{bootstrapped_series}{The bootstrapped series.} #' \item{modelargs}{The arguments passed through to \code{fn}.} #' \item{fitted}{Fitted values (one-step forecasts). The #' mean of the fitted values is calculated over the ensemble.} #' \item{residuals}{Original values minus fitted values.} #' @author Christoph Bergmeir, Fotios Petropoulos #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' fit <- baggedModel(WWWusage) #' fcast <- forecast(fit) #' plot(fcast) #' #' @export baggedModel <- function(y, bootstrapped_series=bld.mbb.bootstrap(y, 100), fn=ets, ...) { # Add package info in case forecast not loaded if(!is.function(fn)){ warning(paste0("Using character specification for `fn` is deprecated. Please use `fn = ", match.arg(fn,c("ets", "auto.arima")), "`.")) fn <- utils::getFromNamespace(match.arg(fn,c("ets", "auto.arima")), "forecast") } mod_boot <- lapply(bootstrapped_series, function(x) { mod <- fn(x, ...) }) # Return results out <- list() out$y <- as.ts(y) out$bootstrapped_series <- bootstrapped_series out$models <- mod_boot out$modelargs <- list(...) fitted_boot <- lapply(out$models, fitted) fitted_boot <- as.matrix(as.data.frame(fitted_boot)) out$fitted <- ts(apply(fitted_boot, 1, mean)) tsp(out$fitted) <- tsp(out$y) out$residuals <- out$y - out$fitted out$series <- deparse(substitute(y)) out$method <- "baggedModel" out$call <- match.call() return(structure(out, class = c("baggedModel"))) } #' @rdname baggedModel #' @export baggedETS <- function(y, bootstrapped_series=bld.mbb.bootstrap(y, 100), ...) { out <- baggedModel(y, bootstrapped_series, fn = ets, ...) class(out) <- c("baggedETS", class(out)) out } #' Forecasting using a bagged model #' #' Returns forecasts and other information for bagged models. #' #' Intervals are calculated as min and max values over the point forecasts from #' the models in the ensemble. I.e., the intervals are not prediction #' intervals, but give an indication of how different the forecasts within the #' ensemble are. #' #' @param object An object of class "\code{baggedModel}" resulting from a call to #' \code{\link{baggedModel}}. #' @param h Number of periods for forecasting. #' @param ... Other arguments, passed on to the \code{\link{forecast}} function of the original method #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' An object of class "\code{forecast}" is a list containing at least the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} #' \item{upper}{Upper limits for prediction intervals} #' \item{level}{The confidence values associated with the prediction intervals} #' \item{x}{The original time series (either \code{object} itself or the #' time series used to create the model stored as \code{object}).} #' \item{xreg}{The external regressors used in fitting (if given).} #' \item{residuals}{Residuals from the fitted model. That #' is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' @author Christoph Bergmeir, Fotios Petropoulos #' @seealso \code{\link{baggedModel}}. #' @references Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging #' Exponential Smoothing Methods using STL Decomposition and Box-Cox #' Transformation. International Journal of Forecasting 32, 303-312. #' @keywords ts #' @examples #' fit <- baggedModel(WWWusage) #' fcast <- forecast(fit) #' plot(fcast) #' #' \dontrun{ #' fit2 <- baggedModel(WWWusage, fn="auto.arima") #' fcast2 <- forecast(fit2) #' plot(fcast2) #' accuracy(fcast2)} #' #' @export forecast.baggedModel <- function(object, h=ifelse(frequency(object$y) > 1, 2 * frequency(object$y), 10), ...) { out <- list( model = object, series = object$series, x = object$y, method = object$method, fitted = object$fitted, residuals = object$residuals ) # out <- object tspx <- tsp(out$x) forecasts_boot <- lapply(out$model$models, function(mod) { if (inherits(mod, "ets")) { forecast(mod, PI = FALSE, h = h, ...)$mean } else { forecast(mod, h = h, ...)$mean } }) forecasts_boot <- as.matrix(as.data.frame(forecasts_boot)) colnames(forecasts_boot) <- NULL if (!is.null(tspx)) { start.f <- tspx[2] + 1 / frequency(out$x) } else { start.f <- length(out$x) + 1 } # out <- list() out$forecasts_boot <- forecasts_boot # browser() # out$model$models out$mean <- ts(apply(forecasts_boot, 1, mean), frequency = frequency(out$x), start = start.f) out$median <- ts(apply(forecasts_boot, 1, median)) out$lower <- ts(apply(forecasts_boot, 1, min)) out$upper <- ts(apply(forecasts_boot, 1, max)) out$level <- 100 tsp(out$median) <- tsp(out$lower) <- tsp(out$upper) <- tsp(out$mean) class(out) <- "forecast" out } # fitted.baggedModel <- function(object, h=1, accum_func=mean, ...){ # # fitted_boot <- lapply(object$models, fitted, h) # fitted_boot <- as.matrix(as.data.frame(fitted_boot)) # fitted_boot <- apply(fitted_boot, 2, accum_func) # fitted_boot # } # residuals.baggedModel <- function(object, h=1, ...){ # # residuals_boot <- lapply(object$models, residuals, h) # residuals_boot <- as.matrix(as.data.frame(residuals_boot)) # residuals_boot # # #Alternative implementation: # #object$x - fitted(object, h) # } #' @export print.baggedModel <- function(x, digits = max(3, getOption("digits") - 3), ...) { cat("Series:", x$series, "\n") cat("Model: ", x$method, "\n") cat("Call: ") print(x$call) # print(x$model) # cat("\nsigma^2 estimated as ", format(mean(residuals(x)^2,na.rm=TRUE), digits = digits), "\n", sep = "") invisible(x) } #' @rdname is.ets #' @export is.baggedModel <- function(x) { inherits(x, "baggedModel") } forecast/R/wrangle.R0000644000176200001440000000320114150370574014045 0ustar liggesuserstoMat <- function(x) { if (NCOL(x) > 1 && !is.matrix(x)) { x <- matrix(x, ncol = NCOL(x)) } return(x) } # Converts arguments into data.frame, whilst retaining mts/ts/matrix properties datamat <- function(..., flatten=TRUE, functions=TRUE) { vars <- list(...) if (length(vars) == 0) { return(data.frame()) } if (!is.null(names(vars))) { names(vars)[!nzchar(names(vars))] <- as.character(substitute(list(...))[-1])[!nzchar(names(vars))] } else { names(vars) <- as.character(substitute(list(...))[-1]) } if (flatten) { i <- 1 while (i <= length(vars)) { if (is.data.frame(vars[[i]])) { vars <- c(vars, c(vars[[i]])) # Append data.frame components vars[[i]] <- NULL # Remove data.frame } else if (is.matrix(vars[[i]])) { for (j in 1:NCOL(vars[[i]])) { vars[[length(vars) + 1]] <- vars[[i]][, j] names(vars)[length(vars)] <- make.names(colnames(vars[[i]])[j]) } i <- i + 1 } else { i <- i + 1 } } } class(vars) <- "data.frame" row.names(vars) <- 1:max(sapply(vars, NROW)) # if(is.ts(vars[,1])){ # if(NCOL(vars)>1){ # class(vars) <- c(class(vars),"mts") # } # class(vars) <- c(class(vars),"ts") # tspx <- unique(sapply(vars,tsp), MARGIN = 2) # if(length(tspx)==3){ # attr(vars, "tsp") <- tspx # } # } return(vars) } recoverTSP <- function(times.x) { freq <- sort(unique(round(times.x %% 1, digits = 6))) # The subset cannot increase frequency freq <- length(freq) return(c(min(times.x), min(times.x) + (length(times.x) - 1) / freq, freq)) } forecast/R/residuals.R0000644000176200001440000001377114150370574014416 0ustar liggesusers#' Residuals for various time series models #' #' Returns time series of residuals from a fitted model. #' #' Innovation residuals correspond to the white noise process that drives the #' evolution of the time series model. Response residuals are the difference #' between the observations and the fitted values (equivalent to \code{h}-step #' forecasts). For functions with no \code{h} argument, \code{h=1}. For #' homoscedastic models, the innovation residuals and the response residuals #' for \code{h=1} are identical. Regression residuals are available for #' regression models with ARIMA errors, and are equal to the original data #' minus the effect of the regression variables. If there are no regression #' variables, the errors will be identical to the original series (possibly #' adjusted to have zero mean). \code{arima.errors} is a deprecated function #' which is identical to \code{residuals.Arima(object, type="regression")}. #' For \code{nnetar} objects, when \code{type="innovations"} and \code{lambda} is used, a #' matrix of time-series consisting of the residuals from each of the fitted neural networks is returned. #' #' @param object An object containing a time series model of class \code{ar}, #' \code{Arima}, \code{bats}, \code{ets}, \code{arfima}, \code{nnetar} or #' \code{stlm}. #' If \code{object} is of class \code{forecast}, then the function will return #' \code{object$residuals} if it exists, otherwise it returns the differences between #' the observations and their fitted values. #' @param type Type of residual. #' @param h If \code{type='response'}, then the fitted values are computed for #' \code{h}-step forecasts. #' @param ... Other arguments not used. #' @return A \code{ts} object. #' @author Rob J Hyndman #' @seealso \code{\link{fitted.Arima}}, \code{\link{checkresiduals}}. #' @keywords ts #' #' @export residuals.forecast <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.ar <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) # innovation and response residuals are the same for AR models object$resid } #' @rdname residuals.forecast #' #' @aliases residuals.forecast_ARIMA #' @examples #' fit <- Arima(lynx,order=c(4,0,0), lambda=0.5) #' #' plot(residuals(fit)) #' plot(residuals(fit, type='response')) #' @export residuals.Arima <- function(object, type=c("innovation", "response", "regression"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else if (type == "response") { getResponse(object) - fitted(object, h = h) } else { x <- getResponse(object) if (!is.null(object$lambda)) { x <- BoxCox(x, object$lambda) } xreg <- getxreg(object) # Remove intercept if (is.element("intercept", names(object$coef))) { xreg <- cbind(rep(1, length(x)), xreg) } # Return errors if (is.null(xreg)) { return(x) } else { norder <- sum(object$arma[1:4]) return(ts( c(x - xreg %*% as.matrix(object$coef[(norder + 1):length(object$coef)])), frequency = frequency(x), start = start(x) )) } } } #' @export residuals.forecast_ARIMA <- residuals.Arima #' @rdname residuals.forecast #' @export residuals.bats <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$errors } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.tbats <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$errors } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.ets <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object, h = h) } } #' @rdname residuals.forecast #' @export residuals.ARFIMA <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { if (!is.null(object$residuals)) { # Object produced by arfima() return(object$residuals) } else # Object produced by fracdiff() { if (is.element("x", names(object))) { x <- object$x } else { x <- eval.parent(parse(text = as.character(object$call)[2])) } if (!is.null(object$lambda)) { x <- BoxCox(x, object$lambda) } y <- fracdiff::diffseries(x - mean(x), d = object$d) fit <- arima(y, order = c(length(object$ar), 0, length(object$ma)), include.mean = FALSE, fixed = c(object$ar, -object$ma)) return(residuals(fit, type = "innovation")) } } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.nnetar <- function(object, type=c("innovation", "response"), h=1, ...) { type <- match.arg(type) if (type == "innovation" && !is.null(object$lambda)) { res <- matrix(unlist(lapply(object$model, residuals)), ncol = length(object$model)) if (!is.null(object$scalex$scale)) { res <- res * object$scalex$scale } } else { res <- getResponse(object) - fitted(object, h = h) } tspx <- tsp(getResponse(object)) res <- ts(res, frequency = tspx[3L], end = tspx[2L]) return(res) } #' @rdname residuals.forecast #' @export residuals.stlm <- function(object, type=c("innovation", "response"), ...) { type <- match.arg(type) if (type == "innovation") { object$residuals } else { getResponse(object) - fitted(object) } } #' @rdname residuals.forecast #' @export residuals.tslm <- function(object, type=c("innovation", "response", "deviance"), ...) { type <- match.arg(type) if (type == "innovation" || type == "deviance") { object$residuals } else { getResponse(object) - fitted(object) } } forecast/R/dshw.r0000644000176200001440000002212714207263356013425 0ustar liggesusers#################################################################### ## Double Seasonal Holt Winters method as per Taylor (2003) ## Periods must be nested. ## y can be an msts object, or periods can be passed explicitly. #################################################################### #' Double-Seasonal Holt-Winters Forecasting #' #' Returns forecasts using Taylor's (2003) Double-Seasonal Holt-Winters method. #' #' Taylor's (2003) double-seasonal Holt-Winters method uses additive trend and #' multiplicative seasonality, where there are two seasonal components which #' are multiplied together. For example, with a series of half-hourly data, one #' would set \code{period1=48} for the daily period and \code{period2=336} for #' the weekly period. The smoothing parameter notation used here is different #' from that in Taylor (2003); instead it matches that used in Hyndman et al #' (2008) and that used for the \code{\link{ets}} function. #' #' @param y Either an \code{\link{msts}} object with two seasonal periods or a #' numeric vector. #' @param period1 Period of the shorter seasonal period. Only used if \code{y} #' is not an \code{\link{msts}} object. #' @param period2 Period of the longer seasonal period. Only used if \code{y} #' is not an \code{\link{msts}} object. #' @param h Number of periods for forecasting. #' @param alpha Smoothing parameter for the level. If \code{NULL}, the #' parameter is estimated using least squares. #' @param beta Smoothing parameter for the slope. If \code{NULL}, the parameter #' is estimated using least squares. #' @param gamma Smoothing parameter for the first seasonal period. If #' \code{NULL}, the parameter is estimated using least squares. #' @param omega Smoothing parameter for the second seasonal period. If #' \code{NULL}, the parameter is estimated using least squares. #' @param phi Autoregressive parameter. If \code{NULL}, the parameter is #' estimated using least squares. #' @param armethod If TRUE, the forecasts are adjusted using an AR(1) model for #' the errors. #' @param model If it's specified, an existing model is applied to a new data #' set. #' @inheritParams forecast.ts #' @inheritParams BoxCox #' #' @return An object of class "\code{forecast}" which is a list that includes the #' following elements: #' \item{model}{A list containing information about the fitted model} #' \item{method}{The name of the forecasting method as a character string} #' \item{mean}{Point forecasts as a time series} #' \item{x}{The original time series.} #' \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} #' \item{fitted}{Fitted values (one-step forecasts)} #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{dshw}. #' #' @author Rob J Hyndman #' @seealso \code{\link[stats]{HoltWinters}}, \code{\link{ets}}. #' @references Taylor, J.W. (2003) Short-term electricity demand forecasting #' using double seasonal exponential smoothing. \emph{Journal of the #' Operational Research Society}, \bold{54}, 799-805. #' #' Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) #' \emph{Forecasting with exponential smoothing: the state space approach}, #' Springer-Verlag. \url{http://www.exponentialsmoothing.net}. #' @keywords ts #' @examples #' #' \dontrun{ #' fcast <- dshw(taylor) #' plot(fcast) #' #' t <- seq(0,5,by=1/20) #' x <- exp(sin(2*pi*t) + cos(2*pi*t*4) + rnorm(length(t),0,.1)) #' fit <- dshw(x,20,5) #' plot(fit) #' } #' #' @export dshw <- function(y, period1=NULL, period2=NULL, h=2 * max(period1, period2), alpha=NULL, beta=NULL, gamma=NULL, omega=NULL, phi=NULL, lambda=NULL, biasadj=FALSE, armethod=TRUE, model = NULL) { if (min(y, na.rm = TRUE) <= 0) { stop("dshw not suitable when data contain zeros or negative numbers") } seriesname <- deparse(substitute(y)) if (!is.null(model) && model$method == "DSHW") { period1 <- model$period1 period2 <- model$period2 } else if (inherits(y, "msts") && (length(attr(y, "msts")) == 2)) { period1 <- as.integer(sort(attr(y, "msts"))[1]) period2 <- as.integer(sort(attr(y, "msts"))[2]) } else if (is.null(period1) || is.null(period2)) { stop("Error in dshw(): y must either be an msts object with two seasonal periods OR the seasonal periods should be specified with period1= and period2=") } else { if (period1 > period2) { tmp <- period2 period2 <- period1 period1 <- tmp } } if (any(class(y) != "msts")) { y <- msts(y, c(period1, period2)) } if (length(y) < 2 * max(period2)) { stop("Insufficient data to estimate model") } if (!armethod) { phi <- 0 } if (period1 < 1 || period1 == period2) { stop("Inappropriate periods") } ratio <- period2 / period1 if (ratio - trunc(ratio) > 1e-10) { stop("Seasonal periods are not nested") } if (!is.null(model)) { lambda <- model$model$lambda } if (!is.null(lambda)) { origy <- y y <- BoxCox(y, lambda) lambda <- attr(y, "lambda") } if (!is.null(model)) { pars <- model$model alpha <- pars$alpha beta <- pars$beta gamma <- pars$gamma omega <- pars$omega phi <- pars$phi } else { pars <- rep(NA, 5) if (!is.null(alpha)) { pars[1] <- alpha } if (!is.null(beta)) { pars[2] <- beta } if (!is.null(gamma)) { pars[3] <- gamma } if (!is.null(omega)) { pars[4] <- omega } if (!is.null(phi)) { pars[5] <- phi } } # Estimate parameters if (sum(is.na(pars)) > 0) { pars <- par_dshw(y, period1, period2, pars) alpha <- pars[1] beta <- pars[2] gamma <- pars[3] omega <- pars[4] phi <- pars[5] } ## Allocate space n <- length(y) yhat <- numeric(n) ## Starting values I <- seasindex(y, period1) wstart <- seasindex(y, period2) wstart <- wstart / rep(I, ratio) w <- wstart x <- c(0, diff(y[1:period2])) t <- t.start <- mean(((y[1:period2] - y[(period2 + 1):(2 * period2)]) / period2) + x) / 2 s <- s.start <- (mean(y[1:(2 * period2)]) - (period2 + 0.5) * t) ## In-sample fit for (i in 1:n) { yhat[i] <- (s + t) * I[i] * w[i] snew <- alpha * (y[i] / (I[i] * w[i])) + (1 - alpha) * (s + t) tnew <- beta * (snew - s) + (1 - beta) * t I[i + period1] <- gamma * (y[i] / (snew * w[i])) + (1 - gamma) * I[i] w[i + period2] <- omega * (y[i] / (snew * I[i])) + (1 - omega) * w[i] s <- snew t <- tnew } # Forecasts fcast <- (s + (1:h) * t) * rep(I[n + (1:period1)], h / period1 + 1)[1:h] * rep(w[n + (1:period2)], h / period2 + 1)[1:h] fcast <- msts(fcast, c(period1, period2), start = tsp(y)[2] + 1 / tsp(y)[3]) # Calculate MSE and MAPE yhat <- ts(yhat) tsp(yhat) <- tsp(y) yhat <- msts(yhat, c(period1, period2)) e <- y - yhat e <- msts(e, c(period1, period2)) if (armethod) { yhat <- yhat + phi * c(0, e[-n]) fcast <- fcast + phi ^ (1:h) * e[n] e <- y - yhat } mse <- mean(e ^ 2) mape <- mean(abs(e) / y) * 100 end.y <- end(y) if (end.y[2] == frequency(y)) { end.y[1] <- end.y[1] + 1 end.y[2] <- 1 } else { end.y[2] <- end.y[2] + 1 } fcast <- msts(fcast, c(period1, period2)) if (!is.null(lambda)) { y <- origy fcast <- InvBoxCox(fcast, lambda, biasadj, var(e)) attr(lambda, "biasadj") <- biasadj # Does this also need a biasadj backtransform? yhat <- InvBoxCox(yhat, lambda) } return(structure(list( mean = fcast, method = "DSHW", x = y, residuals = e, fitted = yhat, series = seriesname, model = list( mape = mape, mse = mse, alpha = alpha, beta = beta, gamma = gamma, omega = omega, phi = phi, lambda = lambda, l0 = s.start, b0 = t.start, s10 = wstart, s20 = I ), period1 = period1, period2 = period2 ), class = "forecast")) } ### Double Seasonal Holt-Winters smoothing parameter optimization par_dshw <- function(y, period1, period2, pars) { start <- c(0.1, 0.01, 0.001, 0.001, 0.0)[is.na(pars)] out <- optim(start, dshw.mse, y = y, period1 = period1, period2 = period2, pars = pars) pars[is.na(pars)] <- out$par return(pars) } dshw.mse <- function(par, y, period1, period2, pars) { pars[is.na(pars)] <- par if (max(pars) > 0.99 | min(pars) < 0 | pars[5] > .9) { return(Inf) } else { return(dshw(y, period1, period2, h = 1, pars[1], pars[2], pars[3], pars[4], pars[5], armethod = (abs(pars[5]) > 1e-7))$model$mse) } } ### Calculating seasonal indexes seasindex <- function(y, p) { n <- length(y) n2 <- 2 * p shorty <- y[1:n2] average <- numeric(n) simplema <- zoo::rollmean.default(shorty, p) if (identical(p %% 2, 0)) # Even order { centeredma <- zoo::rollmean.default(simplema[1:(n2 - p + 1)], 2) average[p / 2 + 1:p] <- shorty[p / 2 + 1:p] / centeredma[1:p] si <- average[c(p + (1:(p / 2)), (1 + p / 2):p)] } else # Odd order { average[(p - 1) / 2 + 1:p] <- shorty[(p - 1) / 2 + 1:p] / simplema[1:p] si <- average[c(p + (1:((p - 1) / 2)), (1 + (p - 1) / 2):p)] } return(si) } forecast/R/makeMatrices.R0000644000176200001440000002253014323125536015017 0ustar liggesusers# These functions make the w, F, x and g matrices # # # Author: srazbash ############################################################################### makeTBATSFMatrix <- function(alpha, beta=NULL, small.phi=NULL, seasonal.periods=NULL, k.vector=NULL, gamma.bold.matrix=NULL, ar.coefs=NULL, ma.coefs=NULL) { # 1. Alpha Row F <- matrix(1, nrow = 1, ncol = 1) if (!is.null(beta)) { F <- cbind(F, matrix(small.phi, nrow = 1, ncol = 1)) } if (!is.null(seasonal.periods)) { tau <- sum(k.vector) * 2 zero.tau <- matrix(0, nrow = 1, ncol = tau) F <- cbind(F, zero.tau) } if (!is.null(ar.coefs)) { p <- length(ar.coefs) ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = p) alpha.phi <- alpha * ar.coefs F <- cbind(F, alpha.phi) } if (!is.null(ma.coefs)) { q <- length(ma.coefs) ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = q) alpha.theta <- alpha * ma.coefs F <- cbind(F, alpha.theta) } # 2. Beta Row if (!is.null(beta)) { beta.row <- matrix(c(0, small.phi), nrow = 1, ncol = 2) if (!is.null(seasonal.periods)) { beta.row <- cbind(beta.row, zero.tau) } if (!is.null(ar.coefs)) { beta.phi <- beta * ar.coefs beta.row <- cbind(beta.row, beta.phi) } if (!is.null(ma.coefs)) { beta.theta <- beta * ma.coefs beta.row <- cbind(beta.row, beta.theta) } F <- rbind(F, beta.row) } # 3. Seasonal Row if (!is.null(seasonal.periods)) { seasonal.row <- t(zero.tau) if (!is.null(beta)) { seasonal.row <- cbind(seasonal.row, seasonal.row) } # Make the A matrix A <- matrix(0, tau, tau) last.pos <- 0 for (i in 1:length(k.vector)) { if (seasonal.periods[i] != 2) { C <- .Call("makeCIMatrix", k_s = as.integer(k.vector[i]), m_s = as.double(seasonal.periods[i]), PACKAGE = "forecast") } else { C <- matrix(0, 1, 1) } S <- .Call("makeSIMatrix", k_s = as.integer(k.vector[i]), m_s = as.double(seasonal.periods[i]), PACKAGE = "forecast") # C <- matrix(0,k.vector[i],k.vector[i]) # for(j in 1:k.vector[i]) { # l <- round((2*pi*j/seasonal.periods[i]), digits=15) # C[j,j] <- cos(l) # } # S <- matrix(0,k.vector[i],k.vector[i]) # for(j in 1:k.vector[i]) { # S[j,j] <- sin(2*pi*j/seasonal.periods[i]) # } # print(C) # print(S) Ai <- .Call("makeAIMatrix", C_s = C, S_s = S, k_s = as.integer(k.vector[i]), PACKAGE = "forecast") A[(last.pos + 1):(last.pos + (2 * k.vector[i])), (last.pos + 1):(last.pos + (2 * k.vector[i]))] <- Ai last.pos <- last.pos + (2 * k.vector[i]) } seasonal.row <- cbind(seasonal.row, A) if (!is.null(ar.coefs)) { B <- t(gamma.bold.matrix) %*% ar.coefs seasonal.row <- cbind(seasonal.row, B) } if (!is.null(ma.coefs)) { C <- t(gamma.bold.matrix) %*% ma.coefs seasonal.row <- cbind(seasonal.row, C) } F <- rbind(F, seasonal.row) } # 4. AR() Rows if (!is.null(ar.coefs)) { # p <- length(ar.coefs) ar.rows <- matrix(0, nrow = p, ncol = 1) if (!is.null(beta)) { ar.rows <- cbind(ar.rows, ar.rows) } if (!is.null(seasonal.periods)) { ar.seasonal.zeros <- matrix(0, nrow = p, ncol = tau) ar.rows <- cbind(ar.rows, ar.seasonal.zeros) } ident <- diag((p - 1)) ident <- cbind(ident, matrix(0, nrow = (p - 1), ncol = 1)) ar.part <- rbind(ar.coefs, ident) ar.rows <- cbind(ar.rows, ar.part) if (!is.null(ma.coefs)) { ma.in.ar <- matrix(0, nrow = p, ncol = q) ma.in.ar[1, ] <- ma.coefs ar.rows <- cbind(ar.rows, ma.in.ar) } F <- rbind(F, ar.rows) } # 5. MA() Rows if (!is.null(ma.coefs)) { ma.rows <- matrix(0, nrow = q, ncol = 1) if (!is.null(beta)) { ma.rows <- cbind(ma.rows, ma.rows) } if (!is.null(seasonal.periods)) { ma.seasonal <- matrix(0, nrow = q, ncol = tau) ma.rows <- cbind(ma.rows, ma.seasonal) } if (!is.null(ar.coefs)) { ar.in.ma <- matrix(0, nrow = q, ncol = p) ma.rows <- cbind(ma.rows, ar.in.ma) } ident <- diag((q - 1)) ident <- cbind(ident, matrix(0, nrow = (q - 1), ncol = 1)) ma.part <- rbind(matrix(0, nrow = 1, ncol = q), ident) ma.rows <- cbind(ma.rows, ma.part) F <- rbind(F, ma.rows) } return(F) } # makeWMatrix <- function(small.phi=NULL, seasonal.periods=NULL, ar.coefs=NULL, ma.coefs=NULL) { # # the.list <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = as.integer(seasonal.periods), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # # # return(the.list) # # } # makeGMatrix <- function(alpha, beta=NULL, gamma.vector=NULL, seasonal.periods=NULL, p=0, q=0) { # li <- .Call("makeBATSGMatrix", alpha, beta, gamma.vector, as.integer(seasonal.periods), as.integer(p), as.integer(q), PACKAGE="forecast") # # return(li) # } makeFMatrix <- function(alpha, beta=NULL, small.phi=NULL, seasonal.periods=NULL, gamma.bold.matrix=NULL, ar.coefs=NULL, ma.coefs=NULL) { # 1. Alpha Row F <- matrix(1, nrow = 1, ncol = 1) if (!is.null(beta)) { F <- cbind(F, matrix(small.phi, nrow = 1, ncol = 1)) } if (!is.null(seasonal.periods)) { tau <- sum(seasonal.periods) zero.tau <- matrix(0, nrow = 1, ncol = tau) F <- cbind(F, zero.tau) } if (!is.null(ar.coefs)) { p <- length(ar.coefs) ar.coefs <- matrix(ar.coefs, nrow = 1, ncol = p) alpha.phi <- alpha * ar.coefs F <- cbind(F, alpha.phi) } if (!is.null(ma.coefs)) { q <- length(ma.coefs) ma.coefs <- matrix(ma.coefs, nrow = 1, ncol = q) alpha.theta <- alpha * ma.coefs F <- cbind(F, alpha.theta) } # 2. Beta Row if (!is.null(beta)) { beta.row <- matrix(c(0, small.phi), nrow = 1, ncol = 2) if (!is.null(seasonal.periods)) { beta.row <- cbind(beta.row, zero.tau) } if (!is.null(ar.coefs)) { beta.phi <- beta * ar.coefs beta.row <- cbind(beta.row, beta.phi) } if (!is.null(ma.coefs)) { beta.theta <- beta * ma.coefs beta.row <- cbind(beta.row, beta.theta) } F <- rbind(F, beta.row) } # 3. Seasonal Row if (!is.null(seasonal.periods)) { seasonal.row <- t(zero.tau) if (!is.null(beta)) { seasonal.row <- cbind(seasonal.row, seasonal.row) } # Make the A matrix for (i in seasonal.periods) { if (i == seasonal.periods[1]) { a.row.one <- matrix(0, nrow = 1, ncol = i) a.row.one[i] <- 1 a.row.two <- cbind(diag((i - 1)), matrix(0, nrow = (i - 1), ncol = 1)) A <- rbind(a.row.one, a.row.two) } else { old.A.rows <- dim(A)[1] old.A.columns <- dim(A)[2] a.row.one <- matrix(0, nrow = 1, ncol = i) a.row.one[i] <- 1 a.row.two <- cbind(diag((i - 1)), matrix(0, nrow = (i - 1), ncol = 1)) Ai <- rbind(a.row.one, a.row.two) A <- rbind(A, matrix(0, nrow = dim(Ai)[1], ncol = old.A.columns)) A <- cbind(A, matrix(0, nrow = dim(A)[1], ncol = dim(Ai)[2])) A[((old.A.rows + 1):(old.A.rows + dim(Ai)[1])), ((old.A.columns + 1):(old.A.columns + dim(Ai)[2]))] <- Ai } } seasonal.row <- cbind(seasonal.row, A) if (!is.null(ar.coefs)) { B <- t(gamma.bold.matrix) %*% ar.coefs seasonal.row <- cbind(seasonal.row, B) } if (!is.null(ma.coefs)) { C <- t(gamma.bold.matrix) %*% ma.coefs seasonal.row <- cbind(seasonal.row, C) } F <- rbind(F, seasonal.row) } # 4. AR() Rows if (!is.null(ar.coefs)) { # p <- length(ar.coefs) ar.rows <- matrix(0, nrow = p, ncol = 1) if (!is.null(beta)) { ar.rows <- cbind(ar.rows, ar.rows) } if (!is.null(seasonal.periods)) { ar.seasonal.zeros <- matrix(0, nrow = p, ncol = tau) ar.rows <- cbind(ar.rows, ar.seasonal.zeros) } ident <- diag((p - 1)) ident <- cbind(ident, matrix(0, nrow = (p - 1), ncol = 1)) ar.part <- rbind(ar.coefs, ident) ar.rows <- cbind(ar.rows, ar.part) if (!is.null(ma.coefs)) { ma.in.ar <- matrix(0, nrow = p, ncol = q) ma.in.ar[1, ] <- ma.coefs ar.rows <- cbind(ar.rows, ma.in.ar) } F <- rbind(F, ar.rows) } # 5. MA() Rows if (!is.null(ma.coefs)) { ma.rows <- matrix(0, nrow = q, ncol = 1) if (!is.null(beta)) { ma.rows <- cbind(ma.rows, ma.rows) } if (!is.null(seasonal.periods)) { ma.seasonal <- matrix(0, nrow = q, ncol = tau) ma.rows <- cbind(ma.rows, ma.seasonal) } if (!is.null(ar.coefs)) { ar.in.ma <- matrix(0, nrow = q, ncol = p) ma.rows <- cbind(ma.rows, ar.in.ma) } ident <- diag((q - 1)) ident <- cbind(ident, matrix(0, nrow = (q - 1), ncol = 1)) ma.part <- rbind(matrix(0, nrow = 1, ncol = q), ident) ma.rows <- cbind(ma.rows, ma.part) F <- rbind(F, ma.rows) } return(F) } makeXMatrix <- function(l, b=NULL, s.vector=NULL, d.vector=NULL, epsilon.vector=NULL) { x.transpose <- matrix(l, nrow = 1, ncol = 1) if (!is.null(b)) { x.transpose <- cbind(x.transpose, matrix(b, nrow = 1, ncol = 1)) } if (!is.null(s.vector)) { x.transpose <- cbind(x.transpose, matrix(s.vector, nrow = 1, ncol = length(s.vector))) } if (!is.null(d.vector)) { x.transpose <- cbind(x.transpose, matrix(d.vector, nrow = 1, ncol = length(d.vector))) } if (!is.null(epsilon.vector)) { x.transpose <- cbind(x.transpose, matrix(epsilon.vector, nrow = 1, ncol = length(epsilon.vector))) } x <- t(x.transpose) return(list(x = x, x.transpose = x.transpose)) } forecast/R/clean.R0000644000176200001440000001634214323125536013500 0ustar liggesusers# Functions to remove outliers and fill missing values in a time series # Nothing for multiple seasonality yet. # na.interp fills in missing values # Uses linear interpolation for non-seasonal series # Adds seasonality based on a periodic stl decomposition with seasonal series # Argument lambda allows for Box-Cox transformation #' Interpolate missing values in a time series #' #' By default, uses linear interpolation for non-seasonal series. For seasonal series, a #' robust STL decomposition is first computed. Then a linear interpolation is applied to the #' seasonally adjusted data, and the seasonal component is added back. #' #' A more general and flexible approach is available using \code{na.approx} in #' the \code{zoo} package. #' #' @param x time series #' @param linear Should a linear interpolation be used. #' @inheritParams forecast.ts #' @return Time series #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{tsoutliers}} #' @keywords ts #' @examples #' #' data(gold) #' plot(na.interp(gold)) #' #' @export na.interp <- function(x, lambda=NULL, linear=(frequency(x) <= 1 | sum(!is.na(x)) <= 2 * frequency(x))) { missng <- is.na(x) # Do nothing if no missing values if (sum(missng) == 0L) { return(x) } origx <- x rangex <- range(x, na.rm=TRUE) drangex <- rangex[2L] - rangex[1L] # Convert to ts if (is.null(tsp(x))) { x <- ts(x) } if (length(dim(x)) > 1) { if (NCOL(x) == 1) { x <- x[, 1] } else { stop("The time series is not univariate.") } } # Transform if requested if (!is.null(lambda)) { x <- BoxCox(x, lambda = lambda) lambda <- attr(x, "lambda") } freq <- frequency(x) tspx <- tsp(x) n <- length(x) tt <- 1:n idx <- tt[!missng] if (linear) { # Use linear interpolation x <- ts(approx(idx, x[idx], tt, rule = 2)$y) } else { # Otherwise estimate seasonal component robustly # Then add to linear interpolation of seasonally adjusted series # Fit Fourier series for seasonality and a polynomial for the trend, # just to get something reasonable to start with if ("msts" %in% class(x)) { K <- pmin(trunc(attributes(x)$msts / 2), 20L) } else { K <- min(trunc(freq / 2), 5) } X <- cbind(fourier(x, K), poly(tt, degree = pmin(pmax(trunc(n / 10), 1), 6L))) fit <- lm(x ~ X, na.action = na.exclude) pred <- predict(fit, newdata = data.frame(X)) x[missng] <- pred[missng] # Now re-do it with stl to get better results fit <- mstl(x, robust = TRUE) # Interpolate seasonally adjusted values sa <- seasadj(fit) sa <- approx(idx, sa[idx], 1:n, rule = 2)$y # Replace original missing values seas <- seasonal(fit) if (NCOL(seas) > 1) { seas <- rowSums(seas) } x[missng] <- sa[missng] + seas[missng] } # Backtransform if required if (!is.null(lambda)) { x <- InvBoxCox(x, lambda = lambda) } # Ensure time series characteristics not lost tsp(x) <- tspx # Check stability and use linear interpolation if there is a problem if(!linear & (max(x) > rangex[2L]+0.5*drangex | min(x) < rangex[1L]-0.5*drangex)) return(na.interp(origx, lambda=lambda, linear=TRUE)) else return(x) } # Function to identify outliers and replace them with better values # Missing values replaced as well if replace.missing=TRUE #' Identify and replace outliers and missing values in a time series #' #' Uses supsmu for non-seasonal series and a robust STL decomposition for #' seasonal series. To estimate missing values and outlier replacements, #' linear interpolation is used on the (possibly seasonally adjusted) series #' #' @param x time series #' @param replace.missing If TRUE, it not only replaces outliers, but also #' interpolates missing values #' @param iterate the number of iterations required #' @inheritParams forecast.ts #' @return Time series #' @author Rob J Hyndman #' @references Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. #' @seealso \code{\link[forecast]{na.interp}}, #' \code{\link[forecast]{tsoutliers}}, \code{\link[stats]{supsmu}} #' @keywords ts #' @examples #' #' cleangold <- tsclean(gold) #' #' @export tsclean <- function(x, replace.missing=TRUE, iterate=2, lambda = NULL) { outliers <- tsoutliers(x, iterate = iterate, lambda = lambda) x[outliers$index] <- outliers$replacements if (replace.missing) { x <- na.interp(x, lambda = lambda) } return(x) } # Function to identify time series outlieres #' Identify and replace outliers in a time series #' #' Uses supsmu for non-seasonal series and a periodic stl decomposition with #' seasonal series to identify outliers and estimate their replacements. #' #' #' @param x time series #' @param iterate the number of iterations required #' @inheritParams forecast.ts #' @return \item{index}{Indicating the index of outlier(s)} #' \item{replacement}{Suggested numeric values to replace identified outliers} #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsclean}} #' @references Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. #' @keywords ts #' @examples #' #' data(gold) #' tsoutliers(gold) #' #' @export tsoutliers <- function(x, iterate=2, lambda=NULL) { n <- length(x) freq <- frequency(x) # Identify and fill missing values missng <- is.na(x) nmiss <- sum(missng) if (nmiss > 0L) { xx <- na.interp(x, lambda = lambda) } else { xx <- x } # Check if constant if (is.constant(xx)) { return(list(index = integer(0), replacements = numeric(0))) } # Transform if requested if (!is.null(lambda)) { xx <- BoxCox(xx, lambda = lambda) lambda <- attr(xx, "lambda") } # Seasonally adjust data if necessary if (freq > 1 && n > 2 * freq) { fit <- mstl(xx, robust=TRUE) # Check if seasonality is sufficient to warrant adjustment rem <- remainder(fit) detrend <- xx - trendcycle(fit) strength <- 1 - var(rem) / var(detrend) if (strength >= 0.6) { xx <- seasadj(fit) } } # Use super-smoother on the (seasonally adjusted) data tt <- 1:n mod <- supsmu(tt, xx) resid <- xx - mod$y # Make sure missing values are not interpeted as outliers if (nmiss > 0L) { resid[missng] <- NA } # Limits of acceptable residuals resid.q <- quantile(resid, probs = c(0.25, 0.75), na.rm = TRUE) iqr <- diff(resid.q) limits <- resid.q + 3 * iqr * c(-1, 1) # Find residuals outside limits if ((limits[2] - limits[1]) > 1e-14) { outliers <- which((resid < limits[1]) | (resid > limits[2])) } else { outliers <- numeric(0) } # Replace all missing values including outliers x[outliers] <- NA x <- na.interp(x, lambda = lambda) # Do no more than 2 iterations regardless of the value of iterate if (iterate > 1) { tmp <- tsoutliers(x, iterate = 1, lambda = lambda) if (length(tmp$index) > 0) # Found some more { outliers <- sort(unique(c(outliers, tmp$index))) x[outliers] <- NA if(sum(!is.na(x)) == 1L) { # Only one non-missing value x[is.na(x)] <- x[!is.na(x)] } else x <- na.interp(x, lambda = lambda) } } # Return outlier indexes and replacements return(list(index = outliers, replacements = x[outliers])) } forecast/R/unitRoot.R0000644000176200001440000004056414150370574014246 0ustar liggesusers#' Number of differences required for a stationary series #' #' Functions to estimate the number of differences required to make a given #' time series stationary. \code{ndiffs} estimates the number of first #' differences necessary. #' #' \code{ndiffs} uses a unit root test to determine the number of differences #' required for time series \code{x} to be made stationary. If #' \code{test="kpss"}, the KPSS test is used with the null hypothesis that #' \code{x} has a stationary root against a unit-root alternative. Then the #' test returns the least number of differences required to pass the test at #' the level \code{alpha}. If \code{test="adf"}, the Augmented Dickey-Fuller #' test is used and if \code{test="pp"} the Phillips-Perron test is used. In #' both of these cases, the null hypothesis is that \code{x} has a unit root #' against a stationary root alternative. Then the test returns the least #' number of differences required to fail the test at the level \code{alpha}. #' #' @param x A univariate time series #' @param alpha Level of the test, possible values range from 0.01 to 0.1. #' @param test Type of unit root test to use #' @param type Specification of the deterministic component in the regression #' @param max.d Maximum number of non-seasonal differences allowed #' @param ... Additional arguments to be passed on to the unit root test #' @return An integer indicating the number of differences required for stationarity. #' @author Rob J Hyndman, Slava Razbash & Mitchell O'Hara-Wild #' @seealso \code{\link{auto.arima}} and \code{\link{ndiffs}} #' @references #' Dickey DA and Fuller WA (1979), "Distribution of the Estimators for #' Autoregressive Time Series with a Unit Root", \emph{Journal of the American #' Statistical Association} \bold{74}:427-431. #' #' Kwiatkowski D, Phillips PCB, Schmidt P and Shin Y (1992) "Testing the Null #' Hypothesis of Stationarity against the Alternative of a Unit Root", #' \emph{Journal of Econometrics} \bold{54}:159-178. #' #' Osborn, D.R. (1990) "A survey of seasonality in UK macroeconomic variables", #' \emph{International Journal of Forecasting}, \bold{6}:327-336. #' #' Phillips, P.C.B. and Perron, P. (1988) "Testing for a unit root in time series regression", #' \emph{Biometrika}, \bold{72}(2), 335-346. #' #' Said E and Dickey DA (1984), "Testing for Unit Roots in Autoregressive #' Moving Average Models of Unknown Order", \emph{Biometrika} #' \bold{71}:599-607. #' @keywords ts #' @examples #' ndiffs(WWWusage) #' ndiffs(diff(log(AirPassengers), 12)) #' @importFrom urca ur.kpss ur.df ur.pp #' @export ndiffs <- function(x, alpha = 0.05, test = c("kpss", "adf", "pp"), type = c("level", "trend"), max.d = 2, ...) { test <- match.arg(test) type <- match(match.arg(type), c("level", "trend")) x <- c(na.omit(c(x))) d <- 0 if (alpha < 0.01) { warning("Specified alpha value is less than the minimum, setting alpha=0.01") alpha <- 0.01 } else if (alpha > 0.1) { warning("Specified alpha value is larger than the maximum, setting alpha=0.1") alpha <- 0.1 } if (is.constant(x)) { return(d) } urca_pval <- function(urca_test) { approx(urca_test@cval[1, ], as.numeric(sub("pct", "", colnames(urca_test@cval))) / 100, xout = urca_test@teststat[1], rule = 2)$y } kpss_wrap <- function(..., use.lag = trunc(3 * sqrt(length(x)) / 13)) { ur.kpss(..., use.lag = use.lag) } runTests <- function(x, test, alpha) { tryCatch( { suppressWarnings( diff <- switch(test, kpss = urca_pval(kpss_wrap(x, type = c("mu", "tau")[type], ...)) < alpha, adf = urca_pval(ur.df(x, type = c("drift", "trend")[type], ...)) > alpha, pp = urca_pval(ur.pp(x, type = "Z-tau", model = c("constant", "trend")[type], ...)) > alpha, stop("This shouldn't happen") ) ) diff }, error = function(e) { warning( call. = FALSE, sprintf( "The chosen unit root test encountered an error when testing for the %s difference. From %s(): %s %i differences will be used. Consider using a different unit root test.", switch(as.character(d), `0` = "first", `1` = "second", `2` = "third", paste0(d + 1, "th")), deparse(e$call[[1]]), e$message, d ) ) FALSE } ) } dodiff <- runTests(x, test, alpha) if (is.na(dodiff)) { return(d) } while (dodiff && d < max.d) { d <- d + 1 x <- diff(x) if (is.constant(x)) { return(d) } dodiff <- runTests(x, test, alpha) if (is.na(dodiff)) { return(d - 1) } } return(d) } # Number of seasonal differences #' Number of differences required for a seasonally stationary series #' #' Functions to estimate the number of differences required to make a given #' time series stationary. \code{nsdiffs} estimates the number of seasonal differences #' necessary. #' #' \code{nsdiffs} uses seasonal unit root tests to determine the number of #' seasonal differences required for time series \code{x} to be made stationary #' (possibly with some lag-one differencing as well). #' #' Several different tests are available: #' * If \code{test="seas"} (default), a measure of seasonal strength is used, where differencing is #' selected if the seasonal strength (Wang, Smith & Hyndman, 2006) exceeds 0.64 #' (based on minimizing MASE when forecasting using auto.arima on M3 and M4 data). #' * If \code{test="ch"}, the Canova-Hansen (1995) test is used #' (with null hypothesis of deterministic seasonality) #' * If \code{test="hegy"}, the Hylleberg, Engle, Granger & Yoo (1990) test is used. #' * If \code{test="ocsb"}, the Osborn-Chui-Smith-Birchenhall #' (1988) test is used (with null hypothesis that a seasonal unit root exists). #' #' @md #' #' @inheritParams ndiffs #' @param x A univariate time series #' @param alpha Level of the test, possible values range from 0.01 to 0.1. #' @param test Type of unit root test to use #' @param m Deprecated. Length of seasonal period #' @param max.D Maximum number of seasonal differences allowed #' #' @return An integer indicating the number of differences required for stationarity. #' #' @references #' #' Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering #' for time series data", \emph{Data Mining and Knowledge Discovery}, #' \bold{13}(3), 335-364. #' #' Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the #' order of integration for consumption", \emph{Oxford Bulletin of Economics #' and Statistics} \bold{50}(4):361-377. #' #' Canova F and Hansen BE (1995) "Are Seasonal Patterns Constant #' over Time? A Test for Seasonal Stability", \emph{Journal of Business and #' Economic Statistics} \bold{13}(3):237-252. #' #' Hylleberg S, Engle R, Granger C and Yoo B (1990) "Seasonal integration #' and cointegration.", \emph{Journal of Econometrics} \bold{44}(1), pp. 215-238. #' #' @author Rob J Hyndman, Slava Razbash and Mitchell O'Hara-Wild #' #' @seealso \code{\link{auto.arima}}, \code{\link{ndiffs}}, \code{\link{ocsb.test}}, \code{\link[uroot]{hegy.test}}, and \code{\link[uroot]{ch.test}} #' #' @examples #' nsdiffs(AirPassengers) #' @export nsdiffs <- function(x, alpha = 0.05, m = frequency(x), test = c("seas", "ocsb", "hegy", "ch"), max.D = 1, ...) { test <- match.arg(test) D <- 0 if (alpha < 0.01) { warning("Specified alpha value is less than the minimum, setting alpha=0.01") alpha <- 0.01 } else if (alpha > 0.1) { warning("Specified alpha value is larger than the maximum, setting alpha=0.1") alpha <- 0.1 } if (test == "ocsb" && alpha != 0.05) { warning("Significance levels other than 5% are not currently supported by test='ocsb', defaulting to alpha = 0.05.") alpha <- 0.05 } if (test %in% c("hegy", "ch")) { if (!requireNamespace("uroot", quietly = TRUE)) { stop(paste0("Using a ", test, ' test requires the uroot package. Please install it using `install.packages("uroot")`')) } } if (is.constant(x)) { return(D) } if (!missing(m)) { warning("argument m is deprecated; please set the frequency in the ts object.", call. = FALSE ) x <- ts(x, frequency = m) } if (frequency(x) == 1) { stop("Non seasonal data") } else if (frequency(x) < 1) { warning("I can't handle data with frequency less than 1. Seasonality will be ignored.") return(0) } if (frequency(x) >= length(x)) { return(0) } # Can't take differences runTests <- function(x, test, alpha) { tryCatch( { suppressWarnings( diff <- switch(test, seas = seas.heuristic(x, ...) > 0.64, # Threshold chosen based on seasonal M3 auto.arima accuracy. ocsb = with(ocsb.test(x, maxlag = 3, lag.method = "AIC", ...), statistics > critical), hegy = tail(uroot::hegy.test(x, deterministic = c(1, 1, 0), maxlag = 3, lag.method = "AIC", ...)$pvalues, 2)[-2] > alpha, ch = uroot::ch.test(x, type = "trig", ...)$pvalues["joint"] < alpha ) ) stopifnot(diff %in% c(0, 1)) diff }, error = function(e) { warning( call. = FALSE, sprintf( "The chosen seasonal unit root test encountered an error when testing for the %s difference. From %s(): %s %i seasonal differences will be used. Consider using a different unit root test.", switch(as.character(D), `0` = "first", `1` = "second", `2` = "third", paste0(D + 1, "th")), deparse(e$call[[1]]), e$message, D ) ) 0 } ) } dodiff <- runTests(x, test, alpha) if (dodiff && frequency(x) %% 1 != 0) { warning("The time series frequency has been rounded to support seasonal differencing.", call. = FALSE) x <- ts(x, frequency = round(frequency(x))) } while (dodiff && D < max.D) { D <- D + 1 x <- diff(x, lag = frequency(x)) if (is.constant(x)) { return(D) } if (length(x) >= 2 * frequency(x) & D < max.D) { dodiff <- runTests(x, test, alpha) } else { dodiff <- FALSE } } return(D) } # Adjusted from robjhyndman/tsfeatures seas.heuristic <- function(x) { if ("msts" %in% class(x)) { msts <- attributes(x)$msts nperiods <- length(msts) } else if ("ts" %in% class(x)) { msts <- frequency(x) nperiods <- msts > 1 season <- 0 } else { stop("The object provided must be a time-series object (`msts` or `ts`)") } season <- NA stlfit <- mstl(x) remainder <- stlfit[, "Remainder"] seasonal <- stlfit[, grep("Season", colnames(stlfit)), drop = FALSE] vare <- var(remainder, na.rm = TRUE) nseas <- NCOL(seasonal) if (nseas > 0) { season <- numeric(nseas) for (i in seq(nseas)) { season[i] <- max(0, min(1, 1-vare/var(remainder+seasonal[, i], na.rm = TRUE))) } } return(season) } # Model specification from Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", Oxford Bulletin of Economics and Statistics 50(4):361-377. # # $\Delta\Delta_m X_t = \beta_1Z_{4,t-1} + \beta_2Z_{5,t-m} + \alpha_1\Delta\Delta_mX_{t-1} + \ldots + \alpha_p\Delta\Delta_mX_{t-p}$ # Where $Z_{4,t} = \hat{\lambda}(B)\Delta_mX_t$, $Z_{5,t} = \hat{\lambda}(B)\Delta X_t$, and $\hat{\lambda}(B)$ is an AR(p) lag operator with coefficients from an estimated AR(p) process of $\Delta\Delta_m X_t$. #' Osborn, Chui, Smith, and Birchenhall Test for Seasonal Unit Roots #' #' An implementation of the Osborn, Chui, Smith, and Birchenhall (OCSB) test. #' #' @inheritParams uroot::hegy.test #' @aliases print.OCSBtest #' @details #' The regression equation may include lags of the dependent variable. When lag.method = "fixed", the lag order is fixed to maxlag; otherwise, maxlag is the maximum number of lags considered in a lag selection procedure that minimises the lag.method criterion, which can be AIC or BIC or corrected AIC, AICc, obtained as AIC + (2k(k+1))/(n-k-1), where k is the number of parameters and n is the number of available observations in the model. #' #' Critical values for the test are based on simulations, which has been smoothed over to produce critical values for all seasonal periods. #' #' @return #' ocsb.test returns a list of class "OCSBtest" with the following components: #' * statistics the value of the test statistics. #' * pvalues the p-values for each test statistics. #' * method a character string describing the type of test. #' * data.name a character string giving the name of the data. #' * fitted.model the fitted regression model. #' #' @references #' Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the #' order of integration for consumption", \emph{Oxford Bulletin of Economics #' and Statistics} \bold{50}(4):361-377. #' #' @seealso \code{\link{nsdiffs}} #' #' @examples #' ocsb.test(AirPassengers) #' @importFrom stats AIC BIC #' #' @export ocsb.test <- function(x, lag.method = c("fixed", "AIC", "BIC", "AICc"), maxlag = 0) { lag.method <- match.arg(lag.method) sname <- deparse(substitute(x)) period <- round(frequency(x)) # Avoid non-integer seasonal period if (period == 1) { stop("Data must be seasonal to use `ocsb.test`. Check your ts frequency.") } genLags <- function(y, maxlag) { if (maxlag == 0) { return(ts(numeric(NROW(y)), start = start(y), frequency = frequency(y))) } out <- do.call(cbind, lapply(seq_len(maxlag), function(k) stats::lag(y, -k))) if (NCOL(out) > 1) { colnames(out) <- paste0("lag_", seq_len(maxlag)) } return(out) } fitOCSB <- function(x, lag, maxlag) { period <- round(frequency(x)) # Avoid non-integer seasonal period # Compute (1-B)(1-B^m)y_t y <- diff(diff(x, period)) ylag <- genLags(y, lag) if (maxlag > 0) { # Ensure models are fitted on same length for lag order selection via lag.method y <- tail(y, -maxlag) } mf <- na.omit(cbind(y = y, x = ylag)) # Estimate lambda(B) coefficients ar.fit <- lm(y ~ 0 + ., data = mf) # Compute lambda(B)(1-B^m)y_{t-1} Z4_frame <- na.omit(cbind(y = diff(x, period), x = genLags(diff(x, period), lag))) Z4 <- Z4_frame[, "y"] - suppressWarnings(predict(ar.fit, Z4_frame)) # Compute lambda(B)(1-B)y_{t-m} Z5_frame <- na.omit(cbind(y = diff(x), x = genLags(diff(x), lag))) Z5 <- Z5_frame[, "y"] - suppressWarnings(predict(ar.fit, Z5_frame)) # Combine regressors data <- na.omit(cbind(mf, Z4 = stats::lag(Z4, -1), Z5 = stats::lag(Z5, -period))) y <- data[, 1] xreg <- data[, -1] lm(y ~ 0 + xreg) } # Estimate maxlag if (maxlag > 0) { if (lag.method != "fixed") { tmp <- vector("list", maxlag + 1) fits <- lapply(seq_len(maxlag), function(lag) fitOCSB(x, lag, maxlag)) icvals <- unlist(switch(lag.method, AIC = lapply(fits, AIC), BIC = lapply(fits, BIC), AICc = lapply( fits, function(x) { k <- x$rank + 1 -2 * logLik(x) + 2 * k + (2 * k * (k + 1)) / (length(residuals(x)) - k - 1) } ) )) id <- which.min(icvals) maxlag <- id - 1 } } regression <- fitOCSB(x, maxlag, maxlag) # if(any(is.na(regression$coefficients))) # stop("Model did not reach a solution. Check the time series data.") stat <- summary(regression)$coefficients[c("xregZ4", "xregZ5"), "t value"] if (any(is.na(stat))) { stop("Model did not reach a solution. Consider using a longer series or a different test.") } structure(list( statistics = stat[2], critical = calcOCSBCritVal(period), method = "OCSB test", lag.method = lag.method, lag.order = maxlag, fitted.model = regression, data.name = sname ), class = "OCSBtest" ) } # Return critical values for OCSB test at 5% level # Approximation based on extensive simulations. calcOCSBCritVal <- function(seasonal.period) { log.m <- log(seasonal.period) return(-0.2937411 * exp(-0.2850853 * (log.m - 0.7656451) + (-0.05983644) * ((log.m - 0.7656451)^2)) - 1.652202) } #' @export print.OCSBtest <- function(x, ...) { cat("\n") cat(strwrap(x$method, prefix = "\t"), sep = "\n") cat("\n") cat("data: ", x$data.name, "\n\n", sep = "") cat(paste0("Test statistic: ", round(x$statistics, 4), ", 5% critical value: ", round(x$critical, 4))) cat("\n") cat("alternative hypothesis: stationary") cat("\n\n") cat(paste0("Lag order ", x$lag.order, " was selected using ", x$lag.method)) } forecast/R/season.R0000644000176200001440000002531314150370574013706 0ustar liggesusers### Functions to handle seasonality #' Number of days in each season #' #' Returns number of days in each month or quarter of the observed time period. #' #' Useful for month length adjustments #' #' @param x time series #' @return Time series #' @author Rob J Hyndman #' @seealso \code{\link[forecast]{bizdays}} #' @keywords ts #' @examples #' #' par(mfrow=c(2,1)) #' plot(ldeaths,xlab="Year",ylab="pounds", #' main="Monthly deaths from lung disease (UK)") #' ldeaths.adj <- ldeaths/monthdays(ldeaths)*365.25/12 #' plot(ldeaths.adj,xlab="Year",ylab="pounds", #' main="Adjusted monthly deaths from lung disease (UK)") #' #' @export monthdays <- function(x) { if (!is.ts(x)) { stop("Not a time series") } f <- frequency(x) if (f == 12) { days <- c(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) } else if (f == 4) { days <- c(90, 91, 92, 92) } else { stop("Not monthly or quarterly data") } nyears <- round(length(x) / f + 1) + 1 years <- (1:nyears) + (start(x)[1] - 1) leap.years <- ((years %% 4 == 0) & !(years %% 100 == 0 & years %% 400 != 0))[1:nyears] dummy <- t(matrix(rep(days, nyears), nrow = f)) if (f == 12) { dummy[leap.years, 2] <- 29 } else { dummy[leap.years, 1] <- 91 } xx <- c(t(dummy))[start(x)[2] - 1 + (1:length(x))] return(ts(xx, start = start(x), frequency = f)) } #' Forecast seasonal index #' #' Returns vector containing the seasonal index for \code{h} future periods. If #' the seasonal index is non-periodic, it uses the last values of the index. #' #' #' @param object Output from \code{\link[stats]{decompose}} or #' \link[stats]{stl}. #' @param h Number of periods ahead to forecast #' @return Time series #' @author Rob J Hyndman #' @keywords ts #' @examples #' uk.stl <- stl(UKDriverDeaths,"periodic") #' uk.sa <- seasadj(uk.stl) #' uk.fcast <- holt(uk.sa,36) #' seasf <- sindexf(uk.stl,36) #' uk.fcast$mean <- uk.fcast$mean + seasf #' uk.fcast$lower <- uk.fcast$lower + cbind(seasf,seasf) #' uk.fcast$upper <- uk.fcast$upper + cbind(seasf,seasf) #' uk.fcast$x <- UKDriverDeaths #' plot(uk.fcast,main="Forecasts from Holt's method with seasonal adjustment") #' #' @export sindexf <- function(object, h) { if ("stl" %in% class(object)) { ss <- object$time.series[, 1] m <- frequency(ss) ss <- ss[length(ss) - (m:1) + 1] tsp.x <- tsp(object$time.series) } else if ("decomposed.ts" %in% class(object)) { ss <- object$figure m <- frequency(object$seasonal) n <- length(object$trend) ss <- rep(ss, n / m + 1)[1:n] ss <- ss[n - (m:1) + 1] tsp.x <- tsp(object$seasonal) } else { stop("Object of unknown class") } out <- ts(rep(ss, h / m + 1)[1:h], frequency = m, start = tsp.x[2] + 1 / m) return(out) } #' Seasonal dummy variables #' #' \code{seasonaldummy} returns a matrix of dummy variables suitable for use in #' \code{\link{Arima}}, \code{\link{auto.arima}} or \code{\link{tslm}}. The #' last season is omitted and used as the control. #' #' \code{seasonaldummyf} is deprecated, instead use the \code{h} argument in #' \code{seasonaldummy}. #' #' The number of dummy variables is determined from the time series #' characteristics of \code{x}. When \code{h} is missing, the length of #' \code{x} also determines the number of rows for the matrix returned by #' \code{seasonaldummy}. the value of \code{h} determines the number of rows #' for the matrix returned by \code{seasonaldummy}, typically used for #' forecasting. The values within \code{x} are not used. #' #' @param x Seasonal time series: a \code{ts} or a \code{msts} object #' @param h Number of periods ahead to forecast (optional) #' @return Numerical matrix. #' @author Rob J Hyndman #' @seealso \code{\link{fourier}} #' @keywords ts #' @examples #' #' plot(ldeaths) #' #' # Using seasonal dummy variables #' month <- seasonaldummy(ldeaths) #' deaths.lm <- tslm(ldeaths ~ month) #' tsdisplay(residuals(deaths.lm)) #' ldeaths.fcast <- forecast(deaths.lm, #' data.frame(month=I(seasonaldummy(ldeaths,36)))) #' plot(ldeaths.fcast) #' #' # A simpler approach to seasonal dummy variables #' deaths.lm <- tslm(ldeaths ~ season) #' ldeaths.fcast <- forecast(deaths.lm, h=36) #' plot(ldeaths.fcast) #' #' @export seasonaldummy <- function(x, h=NULL) { if (!is.ts(x)) { stop("Not a time series") } else { fr.x <- frequency(x) } if (is.null(h)) { if (fr.x == 1) { stop("Non-seasonal time series") } dummy <- as.factor(cycle(x)) dummy.mat <- matrix(0, ncol = frequency(x) - 1, nrow = length(x)) nrow <- 1:length(x) for (i in 1:(frequency(x) - 1)) dummy.mat[dummy == paste(i), i] <- 1 colnames(dummy.mat) <- if (fr.x == 12) { month.abb[1:11] } else if (fr.x == 4) { c("Q1", "Q2", "Q3") } else { paste("S", 1:(fr.x - 1), sep = "") } return(dummy.mat) } else { return(seasonaldummy(ts(rep(0, h), start = tsp(x)[2] + 1 / fr.x, frequency = fr.x))) } } #' @rdname seasonaldummy #' @export seasonaldummyf <- function(x, h) { warning("seasonaldummyf() is deprecated, please use seasonaldummy()") if (!is.ts(x)) { stop("Not a time series") } f <- frequency(x) return(seasonaldummy(ts(rep(0, h), start = tsp(x)[2] + 1 / f, frequency = f))) } #' Fourier terms for modelling seasonality #' #' \code{fourier} returns a matrix containing terms from a Fourier series, up #' to order \code{K}, suitable for use in \code{\link{Arima}}, #' \code{\link{auto.arima}}, or \code{\link{tslm}}. #' #' \code{fourierf} is deprecated, instead use the \code{h} argument in #' \code{fourier}. #' #' The period of the Fourier terms is determined from the time series #' characteristics of \code{x}. When \code{h} is missing, the length of #' \code{x} also determines the number of rows for the matrix returned by #' \code{fourier}. Otherwise, the value of \code{h} determines the number of #' rows for the matrix returned by \code{fourier}, typically used for #' forecasting. The values within \code{x} are not used. #' #' Typical use would omit \code{h} when generating Fourier terms for training a model #' and include \code{h} when generating Fourier terms for forecasting. #' #' When \code{x} is a \code{ts} object, the value of \code{K} should be an #' integer and specifies the number of sine and cosine terms to return. Thus, #' the matrix returned has \code{2*K} columns. #' #' When \code{x} is a \code{msts} object, then \code{K} should be a vector of #' integers specifying the number of sine and cosine terms for each of the #' seasonal periods. Then the matrix returned will have \code{2*sum(K)} #' columns. #' #' @param x Seasonal time series: a \code{ts} or a \code{msts} object #' @param K Maximum order(s) of Fourier terms #' @param h Number of periods ahead to forecast (optional) #' @return Numerical matrix. #' @author Rob J Hyndman #' @seealso \code{\link{seasonaldummy}} #' @keywords ts #' @examples #' #' library(ggplot2) #' #' # Using Fourier series for a "ts" object #' # K is chosen to minimize the AICc #' deaths.model <- auto.arima(USAccDeaths, xreg=fourier(USAccDeaths,K=5), seasonal=FALSE) #' deaths.fcast <- forecast(deaths.model, xreg=fourier(USAccDeaths, K=5, h=36)) #' autoplot(deaths.fcast) + xlab("Year") #' #' # Using Fourier series for a "msts" object #' taylor.lm <- tslm(taylor ~ fourier(taylor, K = c(3, 3))) #' taylor.fcast <- forecast(taylor.lm, #' data.frame(fourier(taylor, K = c(3, 3), h = 270))) #' autoplot(taylor.fcast) #' #' @export fourier <- function(x, K, h=NULL) { if (is.null(h)) { return(...fourier(x, K, 1:NROW(x))) } else { return(...fourier(x, K, NROW(x) + (1:h))) } } #' @rdname fourier #' @export fourierf <- function(x, K, h) { warning("fourierf() is deprecated, please use fourier()") return(...fourier(x, K, length(x) + (1:h))) } # Function to do the work. ...fourier <- function(x, K, times) { if (any(class(x) == "msts")) { period <- attr(x, "msts") } else { period <- frequency(x) } # Patch for older versions of R that do not have sinpi and cospi functions. if (!exists("sinpi")) { sinpi <- function(x) { sin(pi * x) } cospi <- function(x) { cos(pi * x) } } if (length(period) != length(K)) { stop("Number of periods does not match number of orders") } if (any(2 * K > period)) { stop("K must be not be greater than period/2") } # Compute periods of all Fourier terms p <- numeric(0) labels <- character(0) for (j in seq_along(period)) { if (K[j] > 0) { p <- c(p, (1:K[j]) / period[j]) labels <- c(labels, paste( paste0(c("S", "C"), rep(1:K[j], rep(2, K[j]))), round(period[j]), sep = "-" )) } } # Remove equivalent seasonal periods due to multiple seasonality k <- duplicated(p) p <- p[!k] labels <- labels[!rep(k, rep(2, length(k)))] # Remove columns where sinpi=0 k <- abs(2 * p - round(2 * p)) > .Machine$double.eps # Compute matrix of Fourier terms X <- matrix(NA_real_, nrow = length(times), ncol = 2L * length(p)) for (j in seq_along(p)) { if (k[j]) { X[, 2L * j - 1L] <- sinpi(2 * p[j] * times) } X[, 2L * j] <- cospi(2 * p[j] * times) } colnames(X) <- labels # Remove missing columns X <- X[, !is.na(colSums(X)), drop = FALSE] return(X) } #' Moving-average smoothing #' #' \code{ma} computes a simple moving average smoother of a given time series. #' #' The moving average smoother averages the nearest \code{order} periods of #' each observation. As neighbouring observations of a time series are likely #' to be similar in value, averaging eliminates some of the randomness in the #' data, leaving a smooth trend-cycle component. \deqn{\hat{T}_{t} = #' \frac{1}{m} \sum_{j=-k}^k #' y_{t+j}}{T[t]=1/m(y[t-k]+y[t-k+1]+\ldots+y[t]+\ldots+y[t+k-1]+y[t+k])} where #' \eqn{k=\frac{m-1}{2}}{k=(m-1)/2} #' #' When an even \code{order} is specified, the observations averaged will #' include one more observation from the future than the past (k is rounded #' up). If centre is TRUE, the value from two moving averages (where k is #' rounded up and down respectively) are averaged, centering the moving #' average. #' #' @param x Univariate time series #' @param order Order of moving average smoother #' @param centre If TRUE, then the moving average is centred for even orders. #' @return Numerical time series object containing the simple moving average #' smoothed values. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{decompose}} #' @keywords ts #' @examples #' #' plot(wineind) #' sm <- ma(wineind,order=12) #' lines(sm,col="red") #' #' @export ma <- function(x, order, centre=TRUE) { if (abs(order - round(order)) > 1e-8) { stop("order must be an integer") } if (order %% 2 == 0 && centre) { # centred and even w <- c(0.5, rep(1, order - 1), 0.5) / order } else { # odd or not centred w <- rep(1, order) / order } return(filter(x, w)) } forecast/R/fitBATS.R0000644000176200001440000005332114323125536013650 0ustar liggesusers# TODO: # # Author: srazbash ############################################################################### fitPreviousBATSModel <- function(y, model, biasadj=FALSE) { seasonal.periods <- model$seasonal.periods if (is.null(seasonal.periods) == FALSE) { seasonal.periods <- as.integer(sort(seasonal.periods)) } paramz <- unParameterise(model$parameters$vect, model$parameters$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs p <- length(ar.coefs) q <- length(ma.coefs) ## Calculate the variance: # 1. Re-set up the matrices w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi <- small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! y.touse <- y if (!is.null(lambda)) { y.touse <- BoxCox(y, lambda = lambda) lambda <- attr(y.touse, "lambda") } fitted.values.and.errors <- calcModel(y.touse, model$seed.states, F, g$g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) if (!is.null(lambda)) { fitted.values <- InvBoxCox(fitted.values, lambda = lambda, biasadj, variance) } model.for.output <- model model.for.output$variance <- variance model.for.output$fitted.values <- c(fitted.values) model.for.output$errors <- c(e) model.for.output$x <- fitted.values.and.errors$x model.for.output$y <- y attributes(model.for.output$fitted.values) <- attributes(model.for.output$errors) <- attributes(y) return(model.for.output) } fitSpecificBATS <- function(y, use.box.cox, use.beta, use.damping, seasonal.periods=NULL, starting.params=NULL, x.nought=NULL, ar.coefs=NULL, ma.coefs=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { if (!is.null(seasonal.periods)) { seasonal.periods <- as.integer(sort(seasonal.periods)) } ## Meaning/purpose of the first if() statement: If this is the first pass, then use default starting values. Else if it is the second pass, then use the values form the first pass as starting values. if (is.null(starting.params)) { ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } # Calculate starting values: if (sum(seasonal.periods) > 16) { alpha <- (1e-6) } else { alpha <- .09 } if (use.beta) { if (sum(seasonal.periods) > 16) { beta.v <- (5e-7) } else { beta.v <- .05 } b <- 0.00 if (use.damping) { small.phi <- .999 } else { small.phi <- 1 } } else { beta.v <- NULL b <- NULL small.phi <- NULL use.damping <- FALSE } if (!is.null(seasonal.periods)) { gamma.v <- rep(.001, length(seasonal.periods)) s.vector <- numeric(sum(seasonal.periods)) # for(s in seasonal.periods) { # s.vector <- cbind(s.vector, numeric(s)) # } } else { gamma.v <- NULL s.vector <- NULL } if (use.box.cox) { if (!is.null(init.box.cox)) { lambda <- init.box.cox } else { lambda <- BoxCox.lambda(y, lower = 0, upper = 1.5) } y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") } else { # the "else" is not needed at the moment lambda <- NULL } } else { paramz <- unParameterise(starting.params$vect, starting.params$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta b <- 0 small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v if (!is.null(seasonal.periods)) { s.vector <- numeric(sum(seasonal.periods)) } else { s.vector <- NULL } # ar.coefs <- paramz$ar.coefs # ma.coefs <- paramz$ma.coefs ## Check for the existence of ARMA() coefficients if (!is.null(ar.coefs)) { p <- length(ar.coefs) } else { p <- 0 } if (!is.null(ma.coefs)) { q <- length(ma.coefs) } else { q <- 0 } } if (is.null(x.nought)) { # Start with the seed states equal to zero if (!is.null(ar.coefs)) { d.vector <- numeric(length(ar.coefs)) } else { d.vector <- NULL } if (!is.null(ma.coefs)) { epsilon.vector <- numeric(length(ma.coefs)) } else { epsilon.vector <- NULL } x.nought <- makeXMatrix(l = 0, b = b, s.vector = s.vector, d.vector = d.vector, epsilon.vector = epsilon.vector)$x } ## Optimise the starting values: # Make the parameter vector parameterise param.vector <- parameterise(alpha = alpha, beta.v = beta.v, small.phi = small.phi, gamma.v = gamma.v, lambda = lambda, ar.coefs = ar.coefs, ma.coefs = ma.coefs) par.scale <- makeParscaleBATS(param.vector$control) # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) D <- F - g$g %*% w$w.transpose ## Set up matrices to find the seed states if (use.box.cox) { y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") # x.nought <- BoxCox(x.nought, lambda=lambda) y.tilda <- calcModel(y.transformed, x.nought, F, g$g, w)$e } else { y.tilda <- calcModel(y, x.nought, F, g$g, w)$e } w.tilda.transpose <- matrix(0, nrow = length(y), ncol = ncol(w$w.transpose)) w.tilda.transpose[1, ] <- w$w.transpose # for(i in 2:length(y)) { # w.tilda.transpose[i,] <- w.tilda.transpose[(i-1),] %*% D # } w.tilda.transpose <- .Call( "calcWTilda", wTildaTransposes = w.tilda.transpose, Ds = D, PACKAGE = "forecast" ) ## If there is a seasonal component in the model, then the follow adjustment need to be made so that the seed states can be found if (!is.null(seasonal.periods)) { # drop the lines from w.tilda.transpose that correspond to the last seasonal value of each seasonal period list.cut.w <- cutW(use.beta = use.beta, w.tilda.transpose = w.tilda.transpose, seasonal.periods = seasonal.periods, p = p, q = q) w.tilda.transpose <- list.cut.w$matrix mask.vector <- list.cut.w$mask.vector ## Run the regression to find the SEED STATES coefs <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients ## Find the ACTUAL SEASONAL seed states x.nought <- calcSeasonalSeeds(use.beta = use.beta, coefs = coefs, seasonal.periods = seasonal.periods, mask.vector = mask.vector, p = p, q = q) } else { # Remove the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } x.nought <- lm(t(y.tilda) ~ w.tilda.transpose - 1)$coefficients x.nought <- matrix(x.nought, nrow = length(x.nought), ncol = 1) ## Replace the AR() and MA() bits if they exist if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) x.nought <- rbind(x.nought, arma.seed.states) } } #### # Set up environment opt.env <- new.env() assign("F", F, envir = opt.env) assign("w.transpose", w$w.transpose, envir = opt.env) assign("g", g$g, envir = opt.env) assign("gamma.bold.matrix", g$gamma.bold.matrix, envir = opt.env) assign("y", matrix(y, nrow = 1, ncol = length(y)), envir = opt.env) assign("y.hat", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("e", matrix(0, nrow = 1, ncol = length(y)), envir = opt.env) assign("x", matrix(0, nrow = length(x.nought), ncol = length(y)), envir = opt.env) if (!is.null(seasonal.periods)) { tau <- sum(seasonal.periods) } else { tau <- 0 } ## Second pass of optimisation if (use.box.cox) { # Un-transform the seed states # x.nought.untransformed <- InvBoxCox(x.nought, lambda=lambda) assign("x.nought.untransformed", InvBoxCox(x.nought, lambda = lambda), envir = opt.env) # Optimise the likelihood function optim.like <- optim(par = param.vector$vect, fn = calcLikelihood, method = "Nelder-Mead", opt.env = opt.env, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) # Get the parameters out of the param.vector paramz <- unParameterise(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs # Transform the seed states x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = lambda) lambda <- attr(x.nought, "lambda") ## Calculate the variance: # 1. Re-set up the matrices # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi = small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! y.transformed <- BoxCox(y, lambda = lambda) lambda <- attr(y.transformed, "lambda") fitted.values.and.errors <- calcModel(y.transformed, x.nought, F, g$g, w) e <- fitted.values.and.errors$e variance <- sum((e * e)) / length(y) fitted.values <- InvBoxCox(fitted.values.and.errors$y.hat, lambda = lambda, biasadj, variance) attr(lambda, "biasadj") <- biasadj # e <- InvBoxCox(e, lambda=lambda) # ee <- y-fitted.values } else { # else if we are not using the Box-Cox transformation # Optimise the likelihood function if (length(param.vector$vect) > 1) { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformed, method = "Nelder-Mead", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, control = list(maxit = (100 * length(param.vector$vect) ^ 2), parscale = par.scale)) } else { optim.like <- optim(par = param.vector$vect, fn = calcLikelihoodNOTransformed, method = "BFGS", opt.env = opt.env, x.nought = x.nought, use.beta = use.beta, use.small.phi = use.damping, seasonal.periods = seasonal.periods, p = p, q = q, tau = tau, control = list(parscale = par.scale)) } # Get the parameters out of the param.vector paramz <- unParameterise(optim.like$par, param.vector$control) lambda <- paramz$lambda alpha <- paramz$alpha beta.v <- paramz$beta small.phi <- paramz$small.phi gamma.v <- paramz$gamma.v ar.coefs <- paramz$ar.coefs ma.coefs <- paramz$ma.coefs ## Calculate the variance: # 1. Re-set up the matrices # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta.v, gamma.vector=gamma, seasonal.periods=seasonal.periods, p=p, q=q) g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.v, seasonal.periods, as.integer(p), as.integer(q), PACKAGE = "forecast") F <- makeFMatrix(alpha = alpha, beta = beta.v, small.phi <- small.phi, seasonal.periods = seasonal.periods, gamma.bold.matrix = g$gamma.bold.matrix, ar.coefs = ar.coefs, ma.coefs = ma.coefs) # 2. Calculate! fitted.values.and.errors <- calcModel(y, x.nought, F, g$g, w) e <- fitted.values.and.errors$e fitted.values <- fitted.values.and.errors$y.hat variance <- sum((e * e)) / length(y) } # Get the likelihood likelihood <- optim.like$value # Calculate the AIC aic <- likelihood + 2 * (length(param.vector$vect) + nrow(x.nought)) # Make a list object model.for.output <- list(lambda = lambda, alpha = alpha, beta = beta.v, damping.parameter = small.phi, gamma.values = gamma.v, ar.coefficients = ar.coefs, ma.coefficients = ma.coefs, likelihood = likelihood, optim.return.code = optim.like$convergence, variance = variance, AIC = aic, parameters = list(vect = optim.like$par, control = param.vector$control), seed.states = x.nought, fitted.values = c(fitted.values), errors = c(e), x = fitted.values.and.errors$x, seasonal.periods = seasonal.periods, y = y) class(model.for.output) <- "bats" #### return(model.for.output) } calcModel <- function(y, x.nought, F, g, w) { # w is passed as a list length.ts <- length(y) x <- matrix(0, nrow = length(x.nought), ncol = length.ts) y.hat <- matrix(0, nrow = 1, ncol = length.ts) e <- matrix(0, nrow = 1, ncol = length.ts) y.hat[, 1] <- w$w.transpose %*% x.nought e[, 1] <- y[1] - y.hat[, 1] x[, 1] <- F %*% x.nought + g %*% e[, 1] y <- matrix(y, nrow = 1, ncol = length.ts) loop <- .Call("calcBATS", ys = y, yHats = y.hat, wTransposes = w$w.transpose, Fs = F, xs = x, gs = g, es = e, PACKAGE = "forecast") return(list(y.hat = loop$y.hat, e = loop$e, x = loop$x)) } calcLikelihood <- function(param.vector, opt.env, use.beta, use.small.phi, seasonal.periods, p=0, q=0, tau=0, bc.lower=0, bc.upper=1) { # param vector should be as follows: Box-Cox.parameter, alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables box.cox.parameter <- param.vector[1] alpha <- param.vector[2] if (use.beta) { if (use.small.phi) { small.phi <- param.vector[3] beta.v <- param.vector[4] gamma.start <- 5 } else { small.phi <- 1 beta.v <- param.vector[3] gamma.start <- 4 } } else { small.phi <- NULL beta.v <- NULL gamma.start <- 3 } if (!is.null(seasonal.periods)) { gamma.vector <- param.vector[gamma.start:(gamma.start + length(seasonal.periods) - 1)] final.gamma.pos <- gamma.start + length(gamma.vector) - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (p != 0) { ar.coefs <- matrix(param.vector[(final.gamma.pos + 1):(final.gamma.pos + p)], nrow = 1, ncol = p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- matrix(param.vector[(final.gamma.pos + p + 1):length(param.vector)], nrow = 1, ncol = q) } else { ma.coefs <- NULL } x.nought <- BoxCox(opt.env$x.nought.untransformed, lambda = box.cox.parameter) lambda <- attr(x.nought, "lambda") # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) # w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta, gamma.vector=gamma.vector, seasonal.periods=seasonal.periods, p=p, q=q) # g <- .Call("makeBATSGMatrix", as.numeric(alpha), beta.v, gamma.vector, seasonal.periods, as.integer(p), as.integer(q), PACKAGE="forecast") .Call("updateGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold.matrix, alpha_s = alpha, beta_s = beta.v, gammaVector_s = gamma.vector, seasonalPeriods_s = seasonal.periods, PACKAGE = "forecast") # F <- makeFMatrix(alpha=alpha, beta=beta.v, small.phi=small.phi, seasonal.periods=seasonal.periods, gamma.bold.matrix=g$gamma.bold.matrix, ar.coefs=ar.coefs, ma.coefs=ma.coefs) .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold.matrix, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") mat.transformed.y <- BoxCox(opt.env$y, box.cox.parameter) lambda <- attr(mat.transformed.y, "lambda") n <- ncol(opt.env$y) .Call("calcBATSFaster", ys = mat.transformed.y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, sPeriods_s = seasonal.periods, betaV = beta.v, tau_s = as.integer(tau), p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") log.likelihood <- n * log(sum(opt.env$e ^ 2)) - 2 * (box.cox.parameter - 1) * sum(log(opt.env$y)) assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env, box.cox = box.cox.parameter, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau, bc.lower = bc.lower, bc.upper = bc.upper)) { return(log.likelihood) } else { return(10 ^ 20) } } calcLikelihoodNOTransformed <- function(param.vector, opt.env, x.nought, use.beta, use.small.phi, seasonal.periods, p=0, q=0, tau=0) { # The likelihood function without the Box-Cox Transformation # param vector should be as follows: alpha, beta, small.phi, gamma.vector, ar.coefs, ma.coefs # Put the components of the param.vector into meaningful individual variables alpha <- param.vector[1] if (use.beta) { if (use.small.phi) { small.phi <- param.vector[2] beta.v <- param.vector[3] gamma.start <- 4 } else { small.phi <- 1 beta.v <- param.vector[2] gamma.start <- 3 } } else { small.phi <- NULL beta.v <- NULL gamma.start <- 2 } if (!is.null(seasonal.periods)) { gamma.vector <- param.vector[gamma.start:(gamma.start + length(seasonal.periods) - 1)] final.gamma.pos <- gamma.start + length(gamma.vector) - 1 } else { gamma.vector <- NULL final.gamma.pos <- gamma.start - 1 } if (p != 0) { ar.coefs <- matrix(param.vector[(final.gamma.pos + 1):(final.gamma.pos + p)], nrow = 1, ncol = p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- matrix(param.vector[(final.gamma.pos + p + 1):length(param.vector)], nrow = 1, ncol = q) } else { ma.coefs <- NULL } # w <- makeWMatrix(small.phi=small.phi, seasonal.periods=seasonal.periods, ar.coefs=ar.coefs, ma.coefs=ma.coefs) # w <- .Call("makeBATSWMatrix", smallPhi_s = small.phi, sPeriods_s = seasonal.periods, arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, PACKAGE = "forecast") .Call("updateWtransposeMatrix", wTranspose_s = opt.env$w.transpose, smallPhi_s = small.phi, tau_s = as.integer(tau), arCoefs_s = ar.coefs, maCoefs_s = ma.coefs, p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") # g <- makeGMatrix(alpha=alpha, beta=beta, gamma.vector=gamma.vector, seasonal.periods=seasonal.periods, p=p, q=q) # g <- .Call("makeBATSGMatrix", alpha, beta.v, gamma.vector, seasonal.periods, as.integer(p), as.integer(q), PACKAGE="forecast") .Call("updateGMatrix", g_s = opt.env$g, gammaBold_s = opt.env$gamma.bold.matrix, alpha_s = alpha, beta_s = beta.v, gammaVector_s = gamma.vector, seasonalPeriods_s = seasonal.periods, PACKAGE = "forecast") # F <- makeFMatrix(alpha=alpha, beta=beta.v, small.phi=small.phi, seasonal.periods=seasonal.periods, gamma.bold.matrix=g$gamma.bold.matrix, ar.coefs=ar.coefs, ma.coefs=ma.coefs) .Call("updateFMatrix", opt.env$F, small.phi, alpha, beta.v, opt.env$gamma.bold.matrix, ar.coefs, ma.coefs, tau, PACKAGE = "forecast") n <- ncol(opt.env$y) ######################################################################################### # e <- calcModel(y=y, x.nought=x.nought, F=F, g=g$g, w=w)$e ###################### #### calcModel() code: ## # x <- matrix(0, nrow=length(x.nought), ncol=n) # y.hat <- matrix(0,nrow=1, ncol=n) # e <- matrix(0, nrow=1, ncol=n) # opt.env$y.hat[,1] <- w$w.transpose %*% x.nought # opt.env$e[,1] <- opt.env$y[,1]-opt.env$y.hat[,1] # opt.env$x[,1] <- opt.env$F %*% x.nought + g$g %*% opt.env$e[,1] # mat.y <- matrix(opt.env$y, nrow=1, ncol=n) .Call("calcBATSFaster", ys = opt.env$y, yHats = opt.env$y.hat, wTransposes = opt.env$w.transpose, Fs = opt.env$F, xs = opt.env$x, gs = opt.env$g, es = opt.env$e, xNought_s = x.nought, sPeriods_s = seasonal.periods, betaV = beta.v, tau_s = as.integer(tau), p_s = as.integer(p), q_s = as.integer(q), PACKAGE = "forecast") ## #### #################################################################### log.likelihood <- n * log(sum(opt.env$e * opt.env$e)) # D <- opt.env$F - g$g %*% w$w.transpose assign("D", (opt.env$F - opt.env$g %*% opt.env$w.transpose), envir = opt.env) if (checkAdmissibility(opt.env = opt.env, box.cox = NULL, small.phi = small.phi, ar.coefs = ar.coefs, ma.coefs = ma.coefs, tau = tau)) { return(log.likelihood) } else { return(10 ^ 20) } } forecast/R/tbats.R0000644000176200001440000006305014323125536013531 0ustar liggesusers# Author: srazbash ############################################################################### #' TBATS model (Exponential smoothing state space model with Box-Cox #' transformation, ARMA errors, Trend and Seasonal components) #' #' Fits a TBATS model applied to \code{y}, as described in De Livera, Hyndman & #' Snyder (2011). Parallel processing is used by default to speed up the #' computations. #' #' @aliases as.character.tbats print.tbats #' #' @param y The time series to be forecast. Can be \code{numeric}, \code{msts} #' or \code{ts}. Only univariate time series are supported. #' @param use.box.cox \code{TRUE/FALSE} indicates whether to use the Box-Cox #' transformation or not. If \code{NULL} then both are tried and the best fit #' is selected by AIC. #' @param use.trend \code{TRUE/FALSE} indicates whether to include a trend or #' not. If \code{NULL} then both are tried and the best fit is selected by AIC. #' @param use.damped.trend \code{TRUE/FALSE} indicates whether to include a #' damping parameter in the trend or not. If \code{NULL} then both are tried #' and the best fit is selected by AIC. #' @param seasonal.periods If \code{y} is \code{numeric} then seasonal periods #' can be specified with this parameter. #' @param use.arma.errors \code{TRUE/FALSE} indicates whether to include ARMA #' errors or not. If \code{TRUE} the best fit is selected by AIC. If #' \code{FALSE} then the selection algorithm does not consider ARMA errors. #' @param use.parallel \code{TRUE/FALSE} indicates whether or not to use #' parallel processing. #' @param num.cores The number of parallel processes to be used if using #' parallel processing. If \code{NULL} then the number of logical cores is #' detected and all available cores are used. #' @param bc.lower The lower limit (inclusive) for the Box-Cox transformation. #' @param bc.upper The upper limit (inclusive) for the Box-Cox transformation. #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If TRUE, point forecasts and fitted values are mean #' forecast. Otherwise, these points can be considered the median of the #' forecast densities. #' @param model Output from a previous call to \code{tbats}. If model is #' passed, this same model is fitted to \code{y} without re-estimating any #' parameters. #' @param ... Additional arguments to be passed to \code{auto.arima} when #' choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, #' as will any arguments concerning seasonality and differencing, but arguments #' controlling the values of p and q will be used.) #' @return An object with class \code{c("tbats", "bats")}. The generic accessor #' functions \code{fitted.values} and \code{residuals} extract useful features #' of the value returned by \code{bats} and associated functions. The fitted #' model is designated TBATS(omega, p,q, phi, ,...,) where omega #' is the Box-Cox parameter and phi is the damping parameter; the error is #' modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods #' used in the model and k1,...,kJ are the corresponding number of Fourier #' terms used for each seasonality. #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{tbats.components}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths) #' plot(forecast(fit)) #' #' taylor.fit <- tbats(taylor) #' plot(forecast(taylor.fit))} #' #' @export tbats <- function(y, use.box.cox=NULL, use.trend=NULL, use.damped.trend=NULL, seasonal.periods=NULL, use.arma.errors=TRUE, use.parallel=length(y) > 1000, num.cores=2, bc.lower=0, bc.upper=1, biasadj=FALSE, model=NULL, ...) { if (!is.numeric(y) || NCOL(y) > 1) { stop("y should be a univariate time series") } seriesname <- deparse(substitute(y)) origy <- y attr_y <- attributes(origy) # Get seasonal periods if (is.null(seasonal.periods)) { if ("msts" %in% class(y)) { seasonal.periods <- sort(attr(y, "msts")) } else if ("ts" %in% class(y)) { seasonal.periods <- frequency(y) } else { y <- as.ts(y) seasonal.periods <- 1 } } else { # Add ts attributes if (!("ts" %in% class(y))) { y <- msts(y, seasonal.periods) } } seasonal.periods <- unique(pmax(seasonal.periods, 1)) if (all(seasonal.periods == 1)) { seasonal.periods <- NULL } ny <- length(y) y <- na.contiguous(y) if (ny != length(y)) { warning("Missing values encountered. Using longest contiguous portion of time series") if (!is.null(attr_y$tsp)) { attr_y$tsp[1:2] <- range(time(y)) } } # Refit model if available if (!is.null(model)) { if (is.element("tbats", class(model))) { refitModel <- try(fitPreviousTBATSModel(y, model = model), silent = TRUE) } else if (is.element("bats", class(model))) { refitModel <- bats(origy, model = model) } return(refitModel) } # Return constant model if required if (is.constant(y)) { fit <- list( y = y, x = matrix(y, nrow = 1, ncol = ny), errors = y * 0, fitted.values = y, seed.states = matrix(y[1]), AIC = -Inf, likelihood = -Inf, variance = 0, alpha = 0.9999, method = "TBATS", call = match.call() ) return(structure(fit, class = "bats")) } # Check for observations are positive if (any((y <= 0))) { use.box.cox <- FALSE } # Fit non-seasonal model as a benchmark non.seasonal.model <- bats( as.numeric(y), use.box.cox = use.box.cox, use.trend = use.trend, use.damped.trend = use.damped.trend, use.arma.errors = use.arma.errors, use.parallel = use.parallel, num.cores = num.cores, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ... ) # If non-seasonal data, return the non-seasonal model if (is.null(seasonal.periods)) { non.seasonal.model$call <- match.call() attributes(non.seasonal.model$fitted.values) <- attributes(non.seasonal.model$errors) <- attributes(origy) non.seasonal.model$y <- origy return(non.seasonal.model) } else { seasonal.mask <- (seasonal.periods == 1) seasonal.periods <- seasonal.periods[!seasonal.mask] } if (is.null(use.box.cox)) { use.box.cox <- c(FALSE, TRUE) } if (any(use.box.cox)) { init.box.cox <- BoxCox.lambda(y, lower = bc.lower, upper = bc.upper) } else { init.box.cox <- NULL } if (is.null(use.trend)) { use.trend <- c(FALSE, TRUE) } else if (use.trend == FALSE) { use.damped.trend <- FALSE } if (is.null(use.damped.trend)) { use.damped.trend <- c(FALSE, TRUE) } # Set a vector of model params for later comparison model.params <- logical(length = 3) model.params[1] <- any(use.box.cox) model.params[2] <- any(use.trend) model.params[3] <- any(use.damped.trend) y <- as.numeric(y) n <- length(y) k.vector <- rep(1, length(seasonal.periods)) if (use.parallel) { if (is.null(num.cores)) { num.cores <- detectCores(all.tests = FALSE, logical = TRUE) } clus <- makeCluster(num.cores) } best.model <- try(fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE) if (is.element("try-error", class(best.model))) { best.model <- list(AIC = Inf) } for (i in 1:length(seasonal.periods)) { if (seasonal.periods[i] == 2) { next } max.k <- floor(((seasonal.periods[i] - 1) / 2)) if (i != 1) { current.k <- 2 while (current.k <= max.k) { if (seasonal.periods[i] %% current.k != 0) { current.k <- current.k + 1 next } latter <- seasonal.periods[i] / current.k if (any(((seasonal.periods[1:(i - 1)] %% latter) == 0))) { max.k <- current.k - 1 break } else { current.k <- current.k + 1 } } } if (max.k == 1) { next } if (max.k <= 6) { k.vector[i] <- max.k best.model$AIC <- Inf repeat { # old.k <- k.vector[i] # k.vector[i] <- k.vector[i]-1 new.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(new.model))) { new.model <- list(AIC = Inf) } if (new.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] + 1 break } else { if (k.vector[i] == 1) { break } k.vector[i] <- k.vector[i] - 1 best.model <- new.model } } next } else { # Three different k vectors step.up.k <- k.vector step.down.k <- k.vector step.up.k[i] <- 7 step.down.k[i] <- 5 k.vector[i] <- 6 # Fit three different models ### if(use.parallel) then do parallel if (use.parallel) { k.control.array <- rbind(step.up.k, step.down.k, k.vector) models.list <- clusterApplyLB( clus, c(1:3), parFitSpecificTBATS, y = y, box.cox = model.params[1], trend = model.params[2], damping = model.params[3], seasonal.periods = seasonal.periods, k.control.matrix = k.control.array, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ) up.model <- models.list[[1]] level.model <- models.list[[3]] down.model <- models.list[[2]] } else { up.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = step.up.k, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(up.model))) { up.model <- list(AIC = Inf) } level.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(level.model))) { level.model <- list(AIC = Inf) } down.model <- try( fitSpecificTBATS( y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = step.down.k, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(down.model))) { down.model <- list(AIC = Inf) } } # Decide the best model of the three and then follow that direction to find the optimal k aic.vector <- c(up.model$AIC, level.model$AIC, down.model$AIC) ## If shifting down if (min(aic.vector) == down.model$AIC) { best.model <- down.model k.vector[i] <- 5 repeat{ k.vector[i] <- k.vector[i] - 1 down.model <- try( fitSpecificTBATS( y = y, use.box.cox = model.params[1], use.beta = model.params[2], use.damping = model.params[3], seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj ), silent = TRUE ) if (is.element("try-error", class(down.model))) { down.model <- list(AIC = Inf) } if (down.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] + 1 break } else { best.model <- down.model } if (k.vector[i] == 1) { break } } ## If staying level } else if (min(aic.vector) == level.model$AIC) { best.model <- level.model next ## If shifting up } else { best.model <- up.model k.vector[i] <- 7 repeat { k.vector[i] <- k.vector[i] + 1 up.model <- try( fitSpecificTBATS(y, model.params[1], model.params[2], model.params[3], seasonal.periods, k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(up.model))) { up.model <- list(AIC = Inf) } if (up.model$AIC > best.model$AIC) { k.vector[i] <- k.vector[i] - 1 break } else { best.model <- up.model } if (k.vector[i] == max.k) { break } } } } } aux.model <- best.model if (non.seasonal.model$AIC < best.model$AIC) { best.model <- non.seasonal.model } if ((length(use.box.cox) == 1) && use.trend[1] && (length(use.trend) == 1) && (length(use.damped.trend) == 1) && (use.parallel)) { # In the this case, there is only one alternative. use.parallel <- FALSE stopCluster(clus) } else if ((length(use.box.cox) == 1) && !use.trend[1] && (length(use.trend) == 1) && (use.parallel)) { # As above, in the this case, there is only one alternative. use.parallel <- FALSE stopCluster(clus) } if (use.parallel) { # Set up the control array control.array <- NULL for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (!trend && damping) { next } control.line <- c(box.cox, trend, damping) if (!is.null(control.array)) { control.array <- rbind(control.array, control.line) } else { control.array <- control.line } } } } models.list <- clusterApplyLB(clus, c(1:nrow(control.array)), parFilterTBATSSpecifics, y = y, control.array = control.array, model.params = model.params, seasonal.periods = seasonal.periods, k.vector = k.vector, use.arma.errors = use.arma.errors, aux.model = aux.model, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) stopCluster(clus) ## Choose the best model #### Get the AICs aics <- numeric(nrow(control.array)) for (i in 1:nrow(control.array)) { aics[i] <- models.list[[i]]$AIC } best.number <- which.min(aics) best.seasonal.model <- models.list[[best.number]] if (best.seasonal.model$AIC < best.model$AIC) { best.model <- best.seasonal.model } } else { for (box.cox in use.box.cox) { for (trend in use.trend) { for (damping in use.damped.trend) { if (all((model.params == c(box.cox, trend, damping)))) { new.model <- filterTBATSSpecifics(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, aux.model = aux.model, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) } else if (trend || !damping) { new.model <- filterTBATSSpecifics(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj, ...) } if (new.model$AIC < best.model$AIC) { best.model <- new.model } } } } } best.model$call <- match.call() attributes(best.model$fitted.values) <- attributes(best.model$errors) <- attr_y best.model$y <- origy best.model$series <- seriesname best.model$method <- "TBATS" return(best.model) } ###################################################################################################################################### parFilterTBATSSpecifics <- function(control.number, y, control.array, model.params, seasonal.periods, k.vector, use.arma.errors, aux.model=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { box.cox <- control.array[control.number, 1] trend <- control.array[control.number, 2] damping <- control.array[control.number, 3] if (!all((model.params == c(box.cox, trend, damping)))) { first.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) } else { first.model <- aux.model } if (is.element("try-error", class(first.model))) { first.model <- list(AIC = Inf) } if (use.arma.errors) { suppressWarnings(arma <- try(auto.arima(as.numeric(first.model$errors), d = 0, ...), silent = TRUE)) if (!is.element("try-error", class(arma))) { p <- arma$arma[1] q <- arma$arma[2] if ((p != 0) || (q != 0)) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters second.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(second.model))) { second.model <- list(AIC = Inf) } if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } else { return(first.model) } } ################################################################################################# parFitSpecificTBATS <- function(control.number, y, box.cox, trend, damping, seasonal.periods, k.control.matrix, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE) { k.vector <- k.control.matrix[control.number, ] model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(model))) { model <- list(AIC = Inf) } return(model) } filterTBATSSpecifics <- function(y, box.cox, trend, damping, seasonal.periods, k.vector, use.arma.errors, aux.model=NULL, init.box.cox=NULL, bc.lower=0, bc.upper=1, biasadj=FALSE, ...) { if (is.null(aux.model)) { first.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) } else { first.model <- aux.model } if (is.element("try-error", class(first.model))) { first.model <- list(AIC = Inf) } if (use.arma.errors) { suppressWarnings(arma <- try(auto.arima(as.numeric(first.model$errors), d = 0, ...), silent = TRUE)) if (!is.element("try-error", class(arma))) { p <- arma$arma[1] q <- arma$arma[2] if ((p != 0) || (q != 0)) { # Did auto.arima() find any AR() or MA() coefficients? if (p != 0) { ar.coefs <- numeric(p) } else { ar.coefs <- NULL } if (q != 0) { ma.coefs <- numeric(q) } else { ma.coefs <- NULL } starting.params <- first.model$parameters second.model <- try( fitSpecificTBATS(y, use.box.cox = box.cox, use.beta = trend, use.damping = damping, seasonal.periods = seasonal.periods, k.vector = k.vector, ar.coefs = ar.coefs, ma.coefs = ma.coefs, init.box.cox = init.box.cox, bc.lower = bc.lower, bc.upper = bc.upper, biasadj = biasadj), silent = TRUE ) if (is.element("try-error", class(second.model))) { second.model <- list(AIC = Inf) } if (second.model$AIC < first.model$AIC) { return(second.model) } else { return(first.model) } } else { # Else auto.arima() did not find any AR() or MA()coefficients return(first.model) } } else { return(first.model) } } else { return(first.model) } } makeSingleFourier <- function(j, m, T) { frier <- matrix(0, nrow = T, ncol = 2) for (t in 1:T) { frier[t, 1] <- cos((2 * pi * j) / m) frier[t, 2] <- sin((2 * pi * j) / m) } return(frier) } calcFTest <- function(r.sse, ur.sse, num.restrictions, num.u.params, num.observations) { f.stat <- ((r.sse - ur.sse) / num.restrictions) / (r.sse / (num.observations - num.u.params)) p.value <- pf(f.stat, num.restrictions, (num.observations - num.u.params), lower.tail = FALSE) return(p.value) } #' @rdname fitted.Arima #' @export fitted.tbats <- function(object, h=1, ...) { if (h == 1) { return(object$fitted.values) } else { return(hfitted(object = object, h = h, FUN = "tbats", ...)) } } #' @export print.tbats <- function(x, ...) { cat(as.character(x)) cat("\n") cat("\nCall: ") print(x$call) cat("\nParameters") if (!is.null(x$lambda)) { cat("\n Lambda: ") cat(round(x$lambda, 6)) } cat("\n Alpha: ") cat(x$alpha) if (!is.null(x$beta)) { cat("\n Beta: ") cat(x$beta) cat("\n Damping Parameter: ") cat(round(x$damping.parameter, 6)) } if (!is.null(x$gamma.one.values)) { cat("\n Gamma-1 Values: ") cat(x$gamma.one.values) } if (!is.null(x$gamma.two.values)) { cat("\n Gamma-2 Values: ") cat(x$gamma.two.values) } if (!is.null(x$ar.coefficients)) { cat("\n AR coefficients: ") cat(round(x$ar.coefficients, 6)) } if (!is.null(x$ma.coefficients)) { cat("\n MA coefficients: ") cat(round(x$ma.coefficients, 6)) } cat("\n") cat("\nSeed States:\n") print(x$seed.states) cat("\nSigma: ") cat(sqrt(x$variance)) cat("\nAIC: ") cat(x$AIC) cat("\n") } #' @rdname plot.bats #' #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths) #' plot(fit) #' autoplot(fit, range.bars = TRUE)} #' #' @export plot.tbats <- function(x, main="Decomposition by TBATS model", ...) { out <- tbats.components(x) plot.ts(out, main = main, nc = 1, ...) } #' Extract components of a TBATS model #' #' Extract the level, slope and seasonal components of a TBATS model. The extracted components are Box-Cox transformed using the estimated transformation parameter. #' #' #' @param x A tbats object created by \code{\link{tbats}}. #' @return A multiple time series (\code{mts}) object. The first series is the observed time series. The second series is the trend component of the fitted model. Series three onwards are the seasonal components of the fitted model with one time series for each of the seasonal components. All components are transformed using estimated Box-Cox parameter. #' @author Slava Razbash and Rob J Hyndman #' @seealso \code{\link{tbats}}. #' @references De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), #' Forecasting time series with complex seasonal patterns using exponential #' smoothing, \emph{Journal of the American Statistical Association}, #' \bold{106}(496), 1513-1527. #' @keywords ts #' @examples #' #' \dontrun{ #' fit <- tbats(USAccDeaths, use.parallel=FALSE) #' components <- tbats.components(fit) #' plot(components)} #' #' @export tbats.components <- function(x) { # Get original data, transform if necessary if (!is.null(x$lambda)) { y <- BoxCox(x$y, x$lambda) lambda <- attr(y, "lambda") } else { y <- x$y } # Compute matrices tau <- ifelse(!is.null(x$k.vector), 2 * sum(x$k.vector), 0) w <- .Call( "makeTBATSWMatrix", smallPhi_s = x$damping.parameter, kVector_s = as.integer(x$k.vector), arCoefs_s = x$ar.coefficients, maCoefs_s = x$ma.coefficients, tau_s = as.integer(tau), PACKAGE = "forecast" ) out <- cbind(observed = c(y), level = x$x[1, ]) if (!is.null(x$beta)) { out <- cbind(out, slope = x$x[2, ]) } # Add seasonal components if they exist if (tau > 0) { nonseas <- 2 + !is.null(x$beta) # No. non-seasonal columns in out nseas <- length(x$seasonal.periods) # No. seasonal periods seas.states <- cbind(x$seed.states, x$x)[-(1:(1 + !is.null(x$beta))), ] seas.states <- seas.states[, -ncol(seas.states)] w <- w$w.transpose[, -(1:(1 + !is.null(x$beta))), drop = FALSE] w <- w[, 1:tau, drop = FALSE] j <- cumsum(c(1, 2 * x$k.vector)) for (i in 1:nseas) out <- cbind(out, season = c(w[, j[i]:(j[i + 1] - 1), drop = FALSE] %*% seas.states[j[i]:(j[i + 1] - 1), ])) if (nseas > 1) { colnames(out)[nonseas + 1:nseas] <- paste("season", 1:nseas, sep = "") } } # Add time series characteristics out <- ts(out) tsp(out) <- tsp(y) return(out) } forecast/R/forecast2.R0000644000176200001440000005077714332530471014316 0ustar liggesusers# Mean forecast #' Mean Forecast #' #' Returns forecasts and prediction intervals for an iid model applied to y. #' #' The iid model is \deqn{Y_t=\mu + Z_t}{Y[t]=mu + Z[t]} where \eqn{Z_t}{Z[t]} #' is a normal iid error. Forecasts are given by \deqn{Y_n(h)=\mu}{Y[n+h]=mu} #' where \eqn{\mu}{mu} is estimated by the sample mean. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting #' @param level Confidence levels for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param bootstrap If TRUE, use a bootstrap method to compute prediction intervals. #' Otherwise, assume a normal distribution. #' @param npaths Number of bootstrapped sample paths to use if \code{bootstrap==TRUE}. #' @param x Deprecated. Included for backwards compatibility. #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{meanf}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link{rwf}} #' @keywords ts #' @examples #' nile.fcast <- meanf(Nile, h=10) #' plot(nile.fcast) #' #' @export meanf <- function(y, h=10, level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=FALSE, bootstrap=FALSE, npaths=5000, x=y) { n <- length(x) if (!is.null(lambda)) { origx <- x x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } meanx <- mean(x, na.rm = TRUE) fits <- rep(meanx, length(x)) res <- x - fits f <- rep(meanx, h) if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nconf <- length(level) s <- sd(x, na.rm = TRUE) if (bootstrap) { e <- na.omit(res) - mean(res, na.rm = TRUE) sim <- matrix(sample(e, size = npaths * h, replace = TRUE), ncol = npaths, nrow = h) sim <- sweep(sim, 1, f, "+") lower <- t(apply(sim, 1, quantile, prob = .5 - level / 200)) upper <- t(apply(sim, 1, quantile, prob = .5 + level / 200)) } else { lower <- upper <- matrix(NA, nrow = h, ncol = nconf) for (i in 1:nconf) { if (n > 1) { tfrac <- qt(0.5 - level[i] / 200, n - 1) } else { tfrac <- -Inf } w <- -tfrac * s * sqrt(1 + 1 / n) lower[, i] <- f - w upper[, i] <- f + w } } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (is.ts(x)) { fits <- copy_msts(x, fits) res <- copy_msts(x, res) f <- future_msts(x, f) lower <- future_msts(x, lower) upper <- future_msts(x, upper) } if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) x <- origx f <- InvBoxCox(f, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } out <- list( method = "Mean", level = level, x = x, series = deparse(substitute(y)), mean = f, lower = lower, upper = upper, model = structure(list(mu = f[1], mu.se = s / sqrt(length(x)), sd = s, bootstrap = bootstrap), class = "meanf"), lambda = lambda, fitted = fits, residuals = res ) out$model$call <- match.call() return(structure(out, class = "forecast")) } #' Box Cox Transformation #' #' BoxCox() returns a transformation of the input variable using a Box-Cox #' transformation. InvBoxCox() reverses the transformation. #' #' The Box-Cox transformation (as given by Bickel & Doksum 1981) is given by \deqn{f_\lambda(x) =(sign(x)|x|^\lambda - #' 1)/\lambda}{f(x;lambda)=(sign(x)|x|^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda #' is not equal to 0}. For \eqn{\lambda=0}{lambda=0}, #' \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}. #' #' @param x a numeric vector or time series of class \code{ts}. #' @param lambda transformation parameter. If \code{lambda = "auto"}, then #' the transformation parameter lambda is chosen using BoxCox.lambda (with a lower bound of -0.9) #' @param biasadj Use adjusted back-transformed mean for Box-Cox #' transformations. If transformed data is used to produce forecasts and fitted values, #' a regular back transformation will result in median forecasts. If biasadj is TRUE, #' an adjustment will be made to produce mean forecasts and fitted values. #' @param fvar Optional parameter required if biasadj=TRUE. Can either be the #' forecast variance, or a list containing the interval \code{level}, and the #' corresponding \code{upper} and \code{lower} intervals. #' @return a numeric vector of the same length as x. #' @author Rob J Hyndman & Mitchell O'Hara-Wild #' @seealso \code{\link{BoxCox.lambda}} #' @references Box, G. E. P. and Cox, D. R. (1964) An analysis of #' transformations. \emph{JRSS B} \bold{26} 211--246. #' Bickel, P. J. and Doksum K. A. (1981) An Analysis of Transformations Revisited. \emph{JASA} \bold{76} 296-311. #' @keywords ts #' @examples #' #' lambda <- BoxCox.lambda(lynx) #' lynx.fit <- ar(BoxCox(lynx,lambda)) #' plot(forecast(lynx.fit,h=20,lambda=lambda)) #' #' @export BoxCox <- function(x, lambda) { if (lambda == "auto") { lambda <- BoxCox.lambda(x, lower = -0.9) } if (lambda < 0) { x[x < 0] <- NA } if (lambda == 0) { out <- log(x) } else { out <- (sign(x) * abs(x) ^ lambda - 1) / lambda } if (!is.null(colnames(x))) { colnames(out) <- colnames(x) } attr(out, "lambda") <- lambda return(out) } #' @rdname BoxCox #' @export InvBoxCox <- function(x, lambda, biasadj=FALSE, fvar=NULL) { if (lambda < 0) { x[x > -1 / lambda] <- NA } if (lambda == 0) { out <- exp(x) } else { xx <- x * lambda + 1 out <- sign(xx) * abs(xx) ^ (1 / lambda) } if (!is.null(colnames(x))) { colnames(out) <- colnames(x) } if (is.null(biasadj)) { biasadj <- attr(lambda, "biasadj") } if (!is.logical(biasadj)) { warning("biasadj information not found, defaulting to FALSE.") biasadj <- FALSE } if (biasadj) { if (is.null(fvar)) { stop("fvar must be provided when biasadj=TRUE") } if (is.list(fvar)) { # Create fvar from forecast interval level <- max(fvar$level) if (NCOL(fvar$upper) > 1 && NCOL(fvar$lower)) { i <- match(level, fvar$level) fvar$upper <- fvar$upper[, i] fvar$lower <- fvar$lower[, i] } if (level > 1) { level <- level / 100 } level <- mean(c(level, 1)) # Note: Use BoxCox transformed upper and lower values fvar <- as.numeric((fvar$upper - fvar$lower) / stats::qnorm(level) / 2) ^ 2 } if (NCOL(fvar) > 1) { fvar <- diag(fvar) } out <- out * (1 + 0.5 * as.numeric(fvar) * (1 - lambda) / (out) ^ (2 * lambda)) } return(out) } # Deprecated InvBoxCoxf <- function(x=NULL, fvar=NULL, lambda=NULL) { message("Deprecated, use InvBoxCox instead") if (is.null(lambda)) { stop("Must specify lambda using lambda=numeric(1)") } if (is.null(fvar)) { level <- max(x$level) if (NCOL(x$upper) > 1 && NCOL(x$lower)) { i <- match(level, x$level) x$upper <- x$upper[, i] x$lower <- x$lower[, i] } if (level > 1) { level <- level / 100 } level <- mean(c(level, 1)) # Note: Use BoxCox transformed upper and lower values fvar <- ((x$upper - x$lower) / stats::qnorm(level) / 2) ^ 2 } else { x <- list(mean = x) } if ("matrix" %in% class(fvar)) { fvar <- diag(fvar) } return(x$mean * (1 + 0.5 * fvar * (1 - lambda) / (x$mean) ^ (2 * lambda))) } #' Forecasting using Structural Time Series models #' #' Returns forecasts and other information for univariate structural time #' series models. #' #' This function calls \code{predict.StructTS} and constructs an object of #' class "\code{forecast}" from the results. #' #' @param object An object of class "\code{StructTS}". Usually the result of a #' call to \code{\link[stats]{StructTS}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param ... Other arguments. #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{forecast.StructTS}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted model. #' That is x minus fitted values.} \item{fitted}{Fitted values (one-step #' forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{StructTS}}. #' @keywords ts #' @examples #' fit <- StructTS(WWWusage,"level") #' plot(forecast(fit)) #' #' @export forecast.StructTS <- function(object, h=ifelse(object$coef["epsilon"] > 1e-10, 2 * object$xtsp[3], 10), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=NULL, ...) { x <- object$data pred <- predict(object, n.ahead = h) if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) upper <- lower <- matrix(NA, ncol = nint, nrow = length(pred$pred)) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pred$pred - qq * pred$se upper[, i] <- pred$pred + qq * pred$se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (is.element("seas", names(object$coef))) { method <- "Basic structural model" } else if (is.element("slope", names(object$coef))) { method <- "Local linear structural model" } else { method <- "Local level structural model" } # Compute fitted values and residuals sigma2 <- c(predict(object, n.ahead=1)$se) res <- residuals(object) * sigma2 fits <- x - res if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda) x <- InvBoxCox(x, lambda) pred$pred <- InvBoxCox(pred$pred, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } mean <- future_msts(x, pred$pred) lower <- future_msts(x, lower) upper <- future_msts(x, upper) fits <- copy_msts(x, fits) res <- copy_msts(x, res) return(structure( list( method = method, model = object, level = level, mean = pred$pred, lower = lower, upper = upper, x = x, series = object$series, fitted = fits, residuals = res ), class = "forecast" )) } #' Forecasting using Holt-Winters objects #' #' Returns forecasts and other information for univariate Holt-Winters time #' series models. #' #' This function calls \code{\link[stats]{predict.HoltWinters}} and constructs #' an object of class "\code{forecast}" from the results. #' #' It is included for completeness, but the \code{\link{ets}} is recommended #' for use instead of \code{\link[stats]{HoltWinters}}. #' #' @param object An object of class "\code{HoltWinters}". Usually the result of #' a call to \code{\link[stats]{HoltWinters}}. #' @param h Number of periods for forecasting #' @param level Confidence level for prediction intervals. #' @param fan If TRUE, level is set to seq(51,99,by=3). This is suitable for #' fan plots. #' @param ... Other arguments. #' @inheritParams forecast.ts #' #' @return An object of class "\code{forecast}". #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts and #' prediction intervals. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by #' \code{forecast.HoltWinters}. #' #' An object of class \code{"forecast"} is a list containing at least the #' following elements: \item{model}{A list containing information about the #' fitted model} \item{method}{The name of the forecasting method as a #' character string} \item{mean}{Point forecasts as a time series} #' \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper #' limits for prediction intervals} \item{level}{The confidence values #' associated with the prediction intervals} \item{x}{The original time series #' (either \code{object} itself or the time series used to create the model #' stored as \code{object}).} \item{residuals}{Residuals from the fitted #' model.} \item{fitted}{Fitted values (one-step forecasts)} #' @author Rob J Hyndman #' @seealso \code{\link[stats]{predict.HoltWinters}}, #' \code{\link[stats]{HoltWinters}}. #' @keywords ts #' @examples #' fit <- HoltWinters(WWWusage,gamma=FALSE) #' plot(forecast(fit)) #' #' @export forecast.HoltWinters <- function(object, h=ifelse(frequency(object$x) > 1, 2 * frequency(object$x), 10), level=c(80, 95), fan=FALSE, lambda=NULL, biasadj=NULL, ...) { x <- object$x if (!is.null(object$exponential)) { if (object$exponential) { stop("Forecasting for exponential trend not yet implemented.") } } if (fan) { level <- seq(51, 99, by = 3) } else { if (min(level) > 0 && max(level) < 1) { level <- 100 * level } else if (min(level) < 0 || max(level) > 99.99) { stop("Confidence limit out of range") } } nint <- length(level) pred <- predict(object, n.ahead = h, prediction.interval = TRUE, level = level[1] / 100) pmean <- pred[, 1] upper <- lower <- matrix(NA, ncol = nint, nrow = length(pred[, 1])) se <- (pred[, 2] - pred[, 3]) / (2 * qnorm(0.5 * (1 + level[1] / 100))) for (i in 1:nint) { qq <- qnorm(0.5 * (1 + level[i] / 100)) lower[, i] <- pmean - qq * se upper[, i] <- pmean + qq * se } colnames(lower) <- colnames(upper) <- paste(level, "%", sep = "") if (!is.null(lambda)) { fitted <- InvBoxCox(object$fitted[, 1], lambda) x <- InvBoxCox(x, lambda) pmean <- InvBoxCox(pmean, lambda, biasadj, list(level = level, upper = upper, lower = lower)) lower <- InvBoxCox(lower, lambda) upper <- InvBoxCox(upper, lambda) } else { fitted <- object$fitted[, 1] } # Pad fitted values with NAs nf <- length(fitted) n <- length(x) fitted <- ts(c(rep(NA, n - nf), fitted)) fitted <- copy_msts(object$x, fitted) pmean <- future_msts(object$x, pmean) lower <- future_msts(object$x, lower) upper <- future_msts(object$x, upper) return(structure( list( method = "HoltWinters", model = object, level = level, mean = pmean, lower = lower, upper = upper, x = x, series = deparse(object$call$x), fitted = fitted, residuals = x - fitted ), class = "forecast" )) } ## CROSTON #' Forecasts for intermittent demand using Croston's method #' #' Returns forecasts and other information for Croston's forecasts applied to #' y. #' #' Based on Croston's (1972) method for intermittent demand forecasting, also #' described in Shenstone and Hyndman (2005). Croston's method involves using #' simple exponential smoothing (SES) on the non-zero elements of the time #' series and a separate application of SES to the times between non-zero #' elements of the time series. The smoothing parameters of the two #' applications of SES are assumed to be equal and are denoted by \code{alpha}. #' #' Note that prediction intervals are not computed as Croston's method has no #' underlying stochastic model. #' #' @param y a numeric vector or time series of class \code{ts} #' @param h Number of periods for forecasting. #' @param alpha Value of alpha. Default value is 0.1. #' @param x Deprecated. Included for backwards compatibility. #' @return An object of class \code{"forecast"} is a list containing at least #' the following elements: \item{model}{A list containing information about the #' fitted model. The first element gives the model used for non-zero demands. #' The second element gives the model used for times between non-zero demands. #' Both elements are of class \code{forecast}.} \item{method}{The name of the #' forecasting method as a character string} \item{mean}{Point forecasts as a #' time series} \item{x}{The original time series (either \code{object} itself #' or the time series used to create the model stored as \code{object}).} #' \item{residuals}{Residuals from the fitted model. That is y minus fitted #' values.} \item{fitted}{Fitted values (one-step forecasts)} #' #' The function \code{summary} is used to obtain and print a summary of the #' results, while the function \code{plot} produces a plot of the forecasts. #' #' The generic accessor functions \code{fitted.values} and \code{residuals} #' extract useful features of the value returned by \code{croston} and #' associated functions. #' @author Rob J Hyndman #' @seealso \code{\link{ses}}. #' @references Croston, J. (1972) "Forecasting and stock control for #' intermittent demands", \emph{Operational Research Quarterly}, \bold{23}(3), #' 289-303. #' #' Shenstone, L., and Hyndman, R.J. (2005) "Stochastic models underlying #' Croston's method for intermittent demand forecasting". \emph{Journal of #' Forecasting}, \bold{24}, 389-402. #' @keywords ts #' @examples #' y <- rpois(20,lambda=.3) #' fcast <- croston(y) #' plot(fcast) #' #' @export croston <- function(y, h=10, alpha=0.1, x=y) { if (sum(x < 0) > 0) { stop("Series should not contain negative values") } out <- croston2(x, h, alpha) out$x <- x if (!is.null(out$fitted)) { out$residuals <- x - out$fitted } out$method <- "Croston's method" out$series <- deparse(substitute(y)) return(structure(out, class = "forecast")) } croston2 <- function(x, h=10, alpha=0.1, nofits=FALSE) { x <- as.ts(x) y <- x[x > 0] tsp.x <- tsp(x) freq.x <- tsp.x[3] start.f <- tsp.x[2] + 1 / freq.x if (length(y) == 0) # All historical values are equal to zero { fc <- ts(rep(0, h), start = start.f, frequency = freq.x) if (nofits) { return(fc) } else { return(list(mean = fc, fitted = ts(x * 0, start = tsp.x[1], frequency = freq.x))) } } tt <- diff(c(0, (1:length(x))[x > 0])) # Times between non-zero observations if (length(y) == 1 && length(tt) == 1) # Only one non-zero observation { y.f <- list(mean = ts(rep(y, h), start = start.f, frequency = freq.x)) p.f <- list(mean = ts(rep(tt, h), start = start.f, frequency = freq.x)) } else if (length(y) <= 1 || length(tt) <= 1) { # length(tt)==0 but length(y)>0. How does that happen? return(list(mean = ts(rep(NA, h), start = start.f, frequency = freq.x))) } else { y.f <- ses(y, alpha = alpha, initial = "simple", h = h, PI = FALSE) p.f <- ses(tt, alpha = alpha, initial = "simple", h = h, PI = FALSE) } ratio <- ts(y.f$mean / p.f$mean, start = start.f, frequency = freq.x) if (nofits) { return(ratio) } else { n <- length(x) fits <- x * NA if (n > 1) { for (i in 1:(n - 1)) fits[i + 1] <- croston2(x[1:i], h = 1, alpha = alpha, nofits = TRUE) } ratio <- future_msts(x, ratio) fits <- copy_msts(x, fits) return(list(mean = ratio, fitted = fits, model = list(demand = y.f, period = p.f))) } } forecast/R/adjustSeasonalSeeds.R0000644000176200001440000001231414323125536016355 0ustar liggesusers############################################################################### # TBATS code cutWTBATS <- function(use.beta, w.tilda.transpose, seasonal.periods, p=0, q=0) { mask.vector <- numeric(length(seasonal.periods)) i <- length(seasonal.periods) while (i > 1) { for (j in 1:(i - 1)) { if ((seasonal.periods[i] %% seasonal.periods[j]) == 0) { mask.vector[j] <- 1 } } i <- i - 1 } w.pos.counter <- 1 w.pos <- 1 if (use.beta) { w.pos <- w.pos + 1 } for (s in seasonal.periods) { if (mask.vector[w.pos.counter] == 1) { w.tilda.transpose <- w.tilda.transpose[, -((w.pos + 1):(w.pos + s))] } else if (mask.vector[w.pos.counter] < 0) { # Cut more than one off w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -c((w.pos + mask.vector[w.pos.counter] + 1):w.pos)] w.pos <- w.pos + mask.vector[w.pos.counter] } else { w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -w.pos] w.pos <- w.pos - 1 } w.pos.counter <- w.pos.counter + 1 } if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } return(list(matrix = w.tilda.transpose, mask.vector = mask.vector)) } # BATS code below ######### cutW <- function(use.beta, w.tilda.transpose, seasonal.periods, p=0, q=0) { mask.vector <- numeric(length(seasonal.periods)) i <- length(seasonal.periods) while (i > 1) { for (j in 1:(i - 1)) { if ((seasonal.periods[i] %% seasonal.periods[j]) == 0) { mask.vector[j] <- 1 } } i <- i - 1 } if (length(seasonal.periods) > 1) { for (s in length(seasonal.periods):2) { for (j in (s - 1):1) { hcf <- findGCD(seasonal.periods[s], seasonal.periods[j]) if (hcf != 1) { if ((mask.vector[s] != 1) && (mask.vector[j] != 1)) { mask.vector[s] <- hcf * -1 } } } } } w.pos.counter <- 1 w.pos <- 1 if (use.beta) { w.pos <- w.pos + 1 } for (s in seasonal.periods) { if (mask.vector[w.pos.counter] == 1) { w.tilda.transpose <- w.tilda.transpose[, -((w.pos + 1):(w.pos + s))] } else if (mask.vector[w.pos.counter] < 0) { # Cut more than one off w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -c((w.pos + mask.vector[w.pos.counter] + 1):w.pos)] w.pos <- w.pos + mask.vector[w.pos.counter] } else { w.pos <- w.pos + s w.tilda.transpose <- w.tilda.transpose[, -w.pos] w.pos <- w.pos - 1 } w.pos.counter <- w.pos.counter + 1 } if ((p != 0) | (q != 0)) { end.cut <- ncol(w.tilda.transpose) start.cut <- end.cut - (p + q) + 1 w.tilda.transpose <- w.tilda.transpose[, -c(start.cut:end.cut)] } return(list(matrix = w.tilda.transpose, mask.vector = mask.vector)) } calcSeasonalSeeds <- function(use.beta, coefs, seasonal.periods, mask.vector, p=0, q=0) { x.pos.counter <- 1 sum.k <- 0 if (use.beta) { x.pos <- 2 new.x.nought <- matrix(coefs[1:2], nrow = 2, ncol = 1) } else { x.pos <- 1 new.x.nought <- matrix(coefs[1], nrow = 1, ncol = 1) } x.pos.counter <- 1 for (s in seasonal.periods) { if (mask.vector[x.pos.counter] == 1) { # Make a vector of zeros season <- matrix(0, nrow = s, ncol = 1) new.x.nought <- rbind(new.x.nought, season) } else if (mask.vector[x.pos.counter] < 0) { extract <- coefs[(x.pos + 1):(x.pos + s + mask.vector[x.pos.counter])] # print("extract:") # print(extract) # Find k k <- sum(extract) # update sum.k sum.k <- sum.k + k / s # create the current.periodicity vector current.periodicity <- extract - k / s current.periodicity <- matrix(current.periodicity, nrow = length(current.periodicity), ncol = 1) additional <- matrix(-k / s, nrow = (-1 * mask.vector[x.pos.counter]), ncol = 1) current.periodicity <- rbind(current.periodicity, additional) new.x.nought <- rbind(new.x.nought, current.periodicity) x.pos <- x.pos + s + mask.vector[x.pos.counter] } else { # Find k k <- sum(coefs[(x.pos + 1):(x.pos + s - 1)]) # update sum.k sum.k <- sum.k + k / s # create the current.periodicity vector current.periodicity <- coefs[(x.pos + 1):(x.pos + s - 1)] - k / s current.periodicity <- c(current.periodicity, -k / s) current.periodicity <- matrix(current.periodicity, nrow = length(current.periodicity), ncol = 1) new.x.nought <- rbind(new.x.nought, current.periodicity) x.pos <- x.pos + s - 1 } # Adjust L(t) x.pos.counter <- x.pos.counter + 1 } # print(new.x.nought) # Lastly, get the arma error seed states, if they exist. if ((p != 0) | (q != 0)) { arma.seed.states <- numeric((p + q)) arma.seed.states <- matrix(arma.seed.states, nrow = length(arma.seed.states), ncol = 1) # Final value of x.nought x.nought <- rbind(new.x.nought, arma.seed.states) } else { x.nought <- new.x.nought } return(x.nought) } findGCD <- function(larger, smaller) { remainder <- larger %% smaller if (remainder != 0) { return(findGCD(smaller, remainder)) } else { return(smaller) } } forecast/R/checkresiduals.R0000644000176200001440000001147214633662406015414 0ustar liggesusers#' Check that residuals from a time series model look like white noise #' #' If \code{plot=TRUE}, produces a time plot of the residuals, the #' corresponding ACF, and a histogram. If \code{test} is not \code{FALSE}, #' the output from either a Ljung-Box test or Breusch-Godfrey test is printed. #' #' @param object Either a time series model, a forecast object, or a time #' series (assumed to be residuals). #' @param lag Number of lags to use in the Ljung-Box or Breusch-Godfrey test. #' If missing, it is set to \code{min(10,n/5)} for non-seasonal data, and #' \code{min(2m, n/5)} for seasonal data, where \code{n} is the length of the series, #' and \code{m} is the seasonal period of the data. It is further constrained to be #' at least \code{df+3} where \code{df} is the degrees of freedom of the model. This #' ensures there are at least 3 degrees of freedom used in the chi-squared test. #' @param test Test to use for serial correlation. By default, if \code{object} #' is of class \code{lm}, then \code{test="BG"}. Otherwise, \code{test="LB"}. #' Setting \code{test=FALSE} will prevent the test results being printed. #' @param plot Logical. If \code{TRUE}, will produce the plot. #' @param ... Other arguments are passed to \code{\link{ggtsdisplay}}. #' @return None #' @author Rob J Hyndman #' @seealso \code{\link{ggtsdisplay}}, \code{\link[stats]{Box.test}}, #' \code{\link[lmtest]{bgtest}} #' @examples #' #' fit <- ets(WWWusage) #' checkresiduals(fit) #' #' @export checkresiduals <- function(object, lag, test, plot = TRUE, ...) { showtest <- TRUE if (missing(test)) { if (is.element("lm", class(object))) { test <- "BG" } else { test <- "LB" } showtest <- TRUE } else if (test != FALSE) { test <- match.arg(test, c("LB", "BG")) showtest <- TRUE } else { showtest <- FALSE } # Extract residuals if (is.element("ts", class(object)) | is.element("numeric", class(object))) { residuals <- object object <- list(method = "Missing") } else { residuals <- residuals(object) } if (length(residuals) == 0L) { stop("No residuals found") } if ("ar" %in% class(object)) { method <- paste("AR(", object$order, ")", sep = "") } else if (!is.null(object$method)) { method <- object$method } else if ("HoltWinters" %in% class(object)) { method <- "HoltWinters" } else if ("StructTS" %in% class(object)) { method <- "StructTS" } else { method <- try(as.character(object), silent = TRUE) if ("try-error" %in% class(method)) { method <- "Missing" } else if (length(method) > 1 | base::nchar(method[1]) > 50) { method <- "Missing" } } if (method == "Missing") { main <- "Residuals" } else { main <- paste("Residuals from", method) } if (plot) { suppressWarnings(ggtsdisplay(residuals, plot.type = "histogram", main = main, ...)) } # Check if we have the model if (is.element("forecast", class(object))) { object <- object$model } if (is.null(object) | !showtest) { return(invisible()) } # Seasonality of data freq <- frequency(residuals) # Find model df #if (grepl("STL \\+ ", method)) { # warning("The fitted degrees of freedom is based on the model used for the seasonally adjusted data.") #} if (inherits(object, "Arima") | test == "BG") { df <- modeldf(object) } else { df <- 0 } if (missing(lag)) { lag <- ifelse(freq > 1, 2 * freq, 10) lag <- min(lag, round(length(residuals) / 5)) lag <- max(df + 3, lag) } if (test == "BG") { # Do Breusch-Godfrey test BGtest <- lmtest::bgtest(object, order = lag) BGtest$data.name <- main # print(BGtest) return(BGtest) } else { # Do Ljung-Box test LBtest <- Box.test(zoo::na.approx(residuals), fitdf = df, lag = lag, type = "Ljung") LBtest$method <- "Ljung-Box test" LBtest$data.name <- main names(LBtest$statistic) <- "Q*" print(LBtest) cat(paste("Model df: ", df, ". Total lags used: ", lag, "\n\n", sep = "")) return(invisible(LBtest)) } } #' Compute model degrees of freedom #' #' @param object A time series model #' @param ... Other arguments currently ignored #' @export modeldf <- function(object, ...) { UseMethod("modeldf") } #' @export modeldf.default <- function(object, ...) { warning("Could not find appropriate degrees of freedom for this model.") NULL } #' @export modeldf.ets <- function(object, ...) { length(object$par) } #' @export modeldf.Arima <- function(object, ...) { sum(arimaorder(object)[c("p", "q", "P", "Q")], na.rm = TRUE) } #' @export modeldf.bats <- function(object, ...) { length(object$parameters$vect) } #' @export modeldf.lm <- function(object, ...) { length(object$coefficients) } #' @export modeldf.lagwalk <- function(object, ...) { as.numeric(object$par$includedrift) } #' @export modeldf.meanf <- function(object, ...) { 1 } forecast/R/mstl.R0000644000176200001440000005322714323125536013400 0ustar liggesusers#' Multiple seasonal decomposition #' #' Decompose a time series into seasonal, trend and remainder components. #' Seasonal components are estimated iteratively using STL. Multiple seasonal periods are #' allowed. The trend component is computed for the last iteration of STL. #' Non-seasonal time series are decomposed into trend and remainder only. #' In this case, \code{\link[stats]{supsmu}} is used to estimate the trend. #' Optionally, the time series may be Box-Cox transformed before decomposition. #' Unlike \code{\link[stats]{stl}}, \code{mstl} is completely automated. #' @param x Univariate time series of class \code{msts} or \code{ts}. #' @param iterate Number of iterations to use to refine the seasonal component. #' @param s.window Seasonal windows to be used in the decompositions. If scalar, #' the same value is used for all seasonal components. Otherwise, it should be a vector #' of the same length as the number of seasonal components (or longer). #' @param ... Other arguments are passed to \code{\link[stats]{stl}}. #' @inheritParams forecast.ts #' #' @seealso \code{\link[stats]{stl}}, \code{\link[stats]{supsmu}} #' @examples #' library(ggplot2) #' mstl(taylor) %>% autoplot() #' mstl(AirPassengers, lambda = "auto") %>% autoplot() #' @export mstl <- function(x, lambda = NULL, iterate = 2, s.window = 7+4*seq(6), ...) { # What is x? origx <- x n <- length(x) if ("msts" %in% class(x)) { msts <- attributes(x)$msts if (any(msts >= n / 2)) { warning("Dropping seasonal components with fewer than two full periods.") msts <- msts[msts < n / 2] x <- msts(x, seasonal.periods = msts) } msts <- sort(msts, decreasing = FALSE) } else if ("ts" %in% class(x)) { msts <- frequency(x) iterate <- 1L } else { x <- as.ts(x) msts <- 1L } # Check dimension if (!is.null(dim(x))) { if (NCOL(x) == 1L) { x <- x[, 1] } } # Replace missing values if necessary if (anyNA(x)) { x <- na.interp(x, lambda = lambda) } # Transform if necessary if (!is.null(lambda)) { x <- BoxCox(x, lambda = lambda) lambda <- attr(x, "lambda") } tt <- seq_len(n) # Now fit stl models with only one type of seasonality at a time if (msts[1L] > 1) { seas <- as.list(rep(0, length(msts))) deseas <- x if (length(s.window) == 1L) { s.window <- rep(s.window, length(msts)) } iterate <- pmax(1L, iterate) for (j in seq_len(iterate)) { for (i in seq_along(msts)) { deseas <- deseas + seas[[i]] fit <- stl(ts(deseas, frequency = msts[i]), s.window = s.window[i], ...) seas[[i]] <- msts(seasonal(fit), seasonal.periods = msts) attributes(seas[[i]]) <- attributes(x) deseas <- deseas - seas[[i]] } } trend <- msts(trendcycle(fit), seasonal.periods = msts) } else { msts <- NULL deseas <- x trend <- ts(stats::supsmu(seq_len(n), x)$y) } attributes(trend) <- attributes(x) # Put back NAs deseas[is.na(origx)] <- NA # Estimate remainder remainder <- deseas - trend # Package into matrix output <- cbind(c(origx), c(trend)) if (!is.null(msts)) { for (i in seq_along(msts)) { output <- cbind(output, c(seas[[i]])) } } output <- cbind(output, c(remainder)) colnames(output) <- paste0("V",seq(NCOL(output))) colnames(output)[1L:2L] <- c("Data", "Trend") if (!is.null(msts)) { colnames(output)[2L + seq_along(msts)] <- paste0("Seasonal", round(msts, 2)) } colnames(output)[NCOL(output)] <- "Remainder" output <- copy_msts(origx, output) class(output) <- c("mstl", class(output)) return(output) } #' @rdname autoplot.seas #' @export autoplot.mstl <- function(object, ...) { autoplot.mts(object, facets = TRUE, ylab = "", ...) } #' Forecasting using stl objects #' #' Forecasts of STL objects are obtained by applying a non-seasonal forecasting #' method to the seasonally adjusted data and re-seasonalizing using the last #' year of the seasonal component. #' #' \code{stlm} takes a time series \code{y}, applies an STL decomposition, and #' models the seasonally adjusted data using the model passed as #' \code{modelfunction} or specified using \code{method}. It returns an object #' that includes the original STL decomposition and a time series model fitted #' to the seasonally adjusted data. This object can be passed to the #' \code{forecast.stlm} for forecasting. #' #' \code{forecast.stlm} forecasts the seasonally adjusted data, then #' re-seasonalizes the results by adding back the last year of the estimated #' seasonal component. #' #' \code{stlf} combines \code{stlm} and \code{forecast.stlm}. It takes a #' \code{ts} argument, applies an STL decomposition, models the seasonally #' adjusted data, reseasonalizes, and returns the forecasts. However, it allows #' more general forecasting methods to be specified via #' \code{forecastfunction}. #' #' \code{forecast.stl} is similar to \code{stlf} except that it takes the STL #' decomposition as the first argument, instead of the time series. #' #' Note that the prediction intervals ignore the uncertainty associated with #' the seasonal component. They are computed using the prediction intervals #' from the seasonally adjusted series, which are then reseasonalized using the #' last year of the seasonal component. The uncertainty in the seasonal #' component is ignored. #' #' The time series model for the seasonally adjusted data can be specified in #' \code{stlm} using either \code{method} or \code{modelfunction}. The #' \code{method} argument provides a shorthand way of specifying #' \code{modelfunction} for a few special cases. More generally, #' \code{modelfunction} can be any function with first argument a \code{ts} #' object, that returns an object that can be passed to \code{\link{forecast}}. #' For example, \code{forecastfunction=ar} uses the \code{\link{ar}} function #' for modelling the seasonally adjusted series. #' #' The forecasting method for the seasonally adjusted data can be specified in #' \code{stlf} and \code{forecast.stl} using either \code{method} or #' \code{forecastfunction}. The \code{method} argument provides a shorthand way #' of specifying \code{forecastfunction} for a few special cases. More #' generally, \code{forecastfunction} can be any function with first argument a #' \code{ts} object, and other \code{h} and \code{level}, which returns an #' object of class \code{\link{forecast}}. For example, #' \code{forecastfunction=thetaf} uses the \code{\link{thetaf}} function for #' forecasting the seasonally adjusted series. #' #' @param y A univariate numeric time series of class \code{ts} #' @param object An object of class \code{stl} or \code{stlm}. Usually the #' result of a call to \code{\link[stats]{stl}} or \code{stlm}. #' @param method Method to use for forecasting the seasonally adjusted series. #' @param modelfunction An alternative way of specifying the function for #' modelling the seasonally adjusted series. If \code{modelfunction} is not #' \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used #' to specify the time series model to be used. #' @param model Output from a previous call to \code{stlm}. If a \code{stlm} #' model is passed, this same model is fitted to y without re-estimating any #' parameters. #' @param forecastfunction An alternative way of specifying the function for #' forecasting the seasonally adjusted series. If \code{forecastfunction} is #' not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is #' used to specify the forecasting method to be used. #' @param etsmodel The ets model specification passed to #' \code{\link[forecast]{ets}}. By default it allows any non-seasonal model. If #' \code{method!="ets"}, this argument is ignored. #' @param xreg Historical regressors to be used in #' \code{\link[forecast]{auto.arima}()} when \code{method=="arima"}. #' @param newxreg Future regressors to be used in #' \code{\link[forecast]{forecast.Arima}()}. #' @param h Number of periods for forecasting. #' @param level Confidence level for prediction intervals. #' @param fan If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable #' for fan plots. #' @param s.window Either the character string ``periodic'' or the span (in #' lags) of the loess window for seasonal extraction. #' @param t.window A number to control the smoothness of the trend. See #' \code{\link[stats]{stl}} for details. #' @param robust If \code{TRUE}, robust fitting will used in the loess #' procedure within \code{\link[stats]{stl}}. #' @param allow.multiplicative.trend If TRUE, then ETS models with #' multiplicative trends are allowed. Otherwise, only additive or no trend ETS #' models are permitted. #' @param x Deprecated. Included for backwards compatibility. #' @param ... Other arguments passed to \code{forecast.stl}, #' \code{modelfunction} or \code{forecastfunction}. #' @inheritParams forecast.ts #' #' @return \code{stlm} returns an object of class \code{stlm}. The other #' functions return objects of class \code{forecast}. #' #' There are many methods for working with \code{\link{forecast}} objects #' including \code{summary} to obtain and print a summary of the results, while #' \code{plot} produces a plot of the forecasts and prediction intervals. The #' generic accessor functions \code{fitted.values} and \code{residuals} extract #' useful features. #' @author Rob J Hyndman #' @seealso \code{\link[stats]{stl}}, \code{\link{forecast.ets}}, #' \code{\link{forecast.Arima}}. #' @keywords ts #' @examples #' #' tsmod <- stlm(USAccDeaths, modelfunction = ar) #' plot(forecast(tsmod, h = 36)) #' #' decomp <- stl(USAccDeaths, s.window = "periodic") #' plot(forecast(decomp)) #' @export forecast.stl <- function(object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object$time.series) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { method <- match.arg(method) if (is.null(forecastfunction)) { if (method != "arima" && (!is.null(xreg) || !is.null(newxreg))) { stop("xreg and newxreg arguments can only be used with ARIMA models") } if (method == "ets") { # Ensure non-seasonal model if (substr(etsmodel, 3, 3) != "N") { warning("The ETS model must be non-seasonal. I'm ignoring the seasonal component specified.") substr(etsmodel, 3, 3) <- "N" } forecastfunction <- function(x, h, level, ...) { fit <- ets(na.interp(x), model = etsmodel, allow.multiplicative.trend = allow.multiplicative.trend, ...) return(forecast(fit, h = h, level = level)) } } else if (method == "arima") { forecastfunction <- function(x, h, level, ...) { fit <- auto.arima(x, xreg = xreg, seasonal = FALSE, ...) return(forecast(fit, h = h, level = level, xreg = newxreg)) } } else if (method == "naive") { forecastfunction <- function(x, h, level, ...) { rwf(x, drift = FALSE, h = h, level = level, ...) } } else if (method == "rwdrift") { forecastfunction <- function(x, h, level, ...) { rwf(x, drift = TRUE, h = h, level = level, ...) } } } if (is.null(xreg) != is.null(newxreg)) { stop("xreg and newxreg arguments must both be supplied") } if (!is.null(newxreg)) { if (NROW(as.matrix(newxreg)) != h) { stop("newxreg should have the same number of rows as the forecast horizon h") } } if (fan) { level <- seq(51, 99, by = 3) } if ("mstl" %in% class(object)) { seasoncolumns <- which(grepl("Season", colnames(object))) nseasons <- length(seasoncolumns) seascomp <- matrix(0, ncol = nseasons, nrow = h) seasonal.periods <- as.numeric(sub("Seasonal","", colnames(object)[seasoncolumns])) n <- NROW(object) for (i in seq(nseasons)) { mp <- seasonal.periods[i] colname <- colnames(object)[seasoncolumns[i]] seascomp[, i] <- rep(object[n - rev(seq_len(mp)) + 1, colname], trunc(1 + (h - 1) / mp))[seq_len(h)] } lastseas <- rowSums(seascomp) xdata <- object[, "Data"] seascols <- grep("Seasonal", colnames(object)) allseas <- rowSumsTS(object[, seascols, drop = FALSE]) series <- NULL } else if ("stl" %in% class(object)) { m <- frequency(object$time.series) n <- NROW(object$time.series) lastseas <- rep(seasonal(object)[n - (m:1) + 1], trunc(1 + (h - 1) / m))[1:h] xdata <- ts(rowSums(object$time.series)) tsp(xdata) <- tsp(object$time.series) allseas <- seasonal(object) series <- deparse(object$call$x) } else { stop("Unknown object class") } # De-seasonalize x.sa <- seasadj(object) # Forecast fcast <- forecastfunction(x.sa, h = h, level = level, ...) # Reseasonalize fcast$mean <- future_msts(xdata, fcast$mean + lastseas) fcast$upper <- future_msts(xdata, fcast$upper + lastseas) fcast$lower <- future_msts(xdata, fcast$lower + lastseas) fcast$x <- xdata fcast$method <- paste("STL + ", fcast$method) fcast$series <- series fcast$fitted <- copy_msts(xdata, fitted(fcast) + allseas) fcast$residuals <- copy_msts(xdata, fcast$x - fcast$fitted) if (!is.null(lambda)) { fcast$x <- InvBoxCox(fcast$x, lambda) fcast$fitted <- InvBoxCox(fcast$fitted, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) attr(lambda, "biasadj") <- biasadj fcast$lambda <- lambda } return(fcast) } #' @export forecast.mstl <- function(object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { forecast.stl( object, method = method, etsmodel = etsmodel, forecastfunction = forecastfunction, h = h, level = level, fan = fan, lambda = lambda, biasadj = biasadj, xreg = xreg, newxreg = newxreg, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } # rowSums for mts objects # # Applies rowSums and returns ts with same tsp attributes as input. This # allows the result to be added to other time series with different lengths # but overlapping time indexes. # param mts a matrix or multivariate time series # return a vector of rowsums which is a ts if the \code{mts} is a ts rowSumsTS <- function (mts) { the_tsp <- tsp(mts) ret <- rowSums(mts) if (is.null(the_tsp)){ ret } else { tsp(ret) <- the_tsp as.ts(ret) } } # Function takes time series, does STL decomposition, and fits a model to seasonally adjusted series # But it does not forecast. Instead, the result can be passed to forecast(). #' @rdname forecast.stl #' @export stlm <- function(y, s.window = 7+4*seq(6), robust = FALSE, method = c("ets", "arima"), modelfunction = NULL, model = NULL, etsmodel = "ZZN", lambda = NULL, biasadj = FALSE, xreg = NULL, allow.multiplicative.trend = FALSE, x = y, ...) { method <- match.arg(method) # Check univariate if (NCOL(x) > 1L) { stop("y must be a univariate time series") } else { if (!is.null(ncol(x))) { if (ncol(x) == 1L) { # Probably redundant check x <- x[, 1L] } } } # Check x is a seasonal time series tspx <- tsp(x) if (is.null(tspx)) { stop("y is not a seasonal ts object") } else if (tspx[3] <= 1L) { stop("y is not a seasonal ts object") } # Transform data if necessary origx <- x if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } # Do STL decomposition stld <- mstl(x, s.window = s.window, robust = robust) if (!is.null(model)) { if (inherits(model$model, "ets")) { modelfunction <- function(x, ...) { return(ets(x, model = model$model, use.initial.values = TRUE, ...)) } } else if (inherits(model$model, "Arima")) { modelfunction <- function(x, ...) { return(Arima(x, model = model$model, xreg = xreg, ...)) } } else if (!is.null(model$modelfunction)) { if ("model" %in% names(formals(model$modelfunction))) { modelfunction <- function(x, ...) { return(model$modelfunction(x, model = model$model, ...)) } } } if (is.null(modelfunction)) { stop("Unknown model type") } } # Construct modelfunction if not passed as an argument else if (is.null(modelfunction)) { if (method != "arima" && !is.null(xreg)) { stop("xreg arguments can only be used with ARIMA models") } if (method == "ets") { # Ensure non-seasonal model if (substr(etsmodel, 3, 3) != "N") { warning("The ETS model must be non-seasonal. I'm ignoring the seasonal component specified.") substr(etsmodel, 3, 3) <- "N" } modelfunction <- function(x, ...) { return(ets( x, model = etsmodel, allow.multiplicative.trend = allow.multiplicative.trend, ... )) } } else if (method == "arima") { modelfunction <- function(x, ...) { return(auto.arima(x, xreg = xreg, seasonal = FALSE, ...)) } } } # De-seasonalize x.sa <- seasadj(stld) # Model seasonally adjusted data fit <- modelfunction(x.sa, ...) fit$x <- x.sa # Fitted values and residuals seascols <- grep("Seasonal", colnames(stld)) allseas <- rowSumsTS(stld[, seascols, drop = FALSE]) fits <- fitted(fit) + allseas res <- residuals(fit) if (!is.null(lambda)) { fits <- InvBoxCox(fits, lambda, biasadj, var(res)) attr(lambda, "biasadj") <- biasadj } return(structure(list( stl = stld, model = fit, modelfunction = modelfunction, lambda = lambda, x = origx, series = deparse(substitute(y)), m = frequency(origx), fitted = fits, residuals = res ), class = "stlm")) } #' @rdname forecast.stl #' @export forecast.stlm <- function(object, h = 2 * object$m, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ...) { if (!is.null(newxreg)) { if (nrow(as.matrix(newxreg)) != h) { stop("newxreg should have the same number of rows as the forecast horizon h") } } if (fan) { level <- seq(51, 99, by = 3) } # Forecast seasonally adjusted series if (is.element("Arima", class(object$model)) && !is.null(newxreg)) { fcast <- forecast(object$model, h = h, level = level, xreg = newxreg, ...) } else if (is.element("ets", class(object$model))) { fcast <- forecast( object$model, h = h, level = level, allow.multiplicative.trend = allow.multiplicative.trend, ... ) } else { fcast <- forecast(object$model, h = h, level = level, ...) } # In-case forecast method uses different horizon length (such as using xregs) h <- NROW(fcast$mean) # Forecast seasonal series with seasonal naive seasonal.periods <- attributes(object$stl)$msts if (is.null(seasonal.periods)) { seasonal.periods <- frequency(object$stl) } seascomp <- matrix(0, ncol = length(seasonal.periods), nrow = h) for (i in seq_along(seasonal.periods)) { mp <- seasonal.periods[i] n <- NROW(object$stl) colname <- paste0("Seasonal", round(mp, 2)) seascomp[, i] <- rep(object$stl[n - rev(seq_len(mp)) + 1, colname], trunc(1 + (h - 1) / mp))[seq_len(h)] } lastseas <- rowSums(seascomp) xdata <- object$stl[, "Data"] seascols <- grep("Seasonal", colnames(object$stl)) allseas <- rowSumsTS(object$stl[, seascols, drop = FALSE]) series <- NULL # m <- frequency(object$stl$time.series) n <- NROW(xdata) # Reseasonalize fcast$mean <- fcast$mean + lastseas fcast$upper <- fcast$upper + lastseas fcast$lower <- fcast$lower + lastseas fcast$method <- paste("STL + ", fcast$method) fcast$series <- object$series # fcast$seasonal <- ts(lastseas[1:m],frequency=m,start=tsp(object$stl$time.series)[2]-1+1/m) # fcast$residuals <- residuals() fcast$fitted <- fitted(fcast) + allseas fcast$residuals <- residuals(fcast) if (!is.null(lambda)) { fcast$fitted <- InvBoxCox(fcast$fitted, lambda) fcast$mean <- InvBoxCox(fcast$mean, lambda, biasadj, fcast) fcast$lower <- InvBoxCox(fcast$lower, lambda) fcast$upper <- InvBoxCox(fcast$upper, lambda) attr(lambda, "biasadj") <- biasadj fcast$lambda <- lambda } fcast$x <- object$x return(fcast) } #' @rdname forecast.stl #' #' @examples #' #' plot(stlf(AirPassengers, lambda = 0)) #' @export stlf <- function(y, h = frequency(x) * 2, s.window = 7+4*seq(6), t.window = NULL, robust = FALSE, lambda = NULL, biasadj = FALSE, x = y, ...) { seriesname <- deparse(substitute(y)) # Check univariate if (NCOL(x) > 1L) { stop("y must be a univariate time series") } else { if (!is.null(ncol(x))) { if (ncol(x) == 1L) { # Probably redundant check x <- x[, 1L] } } } # Check x is a seasonal time series tspx <- tsp(x) if (is.null(tspx)) { stop("y is not a seasonal ts object") } else if (tspx[3] <= 1L) { stop("y is not a seasonal ts object") } if (!is.null(lambda)) { x <- BoxCox(x, lambda) lambda <- attr(x, "lambda") } fit <- mstl(x, s.window = s.window, t.window = t.window, robust = robust) fcast <- forecast(fit, h = h, lambda = lambda, biasadj = biasadj, ...) # if (!is.null(lambda)) # { # fcast$x <- origx # fcast$fitted <- InvBoxCox(fcast$fitted, lambda) # fcast$mean <- InvBoxCox(fcast$mean, lambda) # fcast$lower <- InvBoxCox(fcast$lower, lambda) # fcast$upper <- InvBoxCox(fcast$upper, lambda) # fcast$lambda <- lambda # } fcast$series <- seriesname return(fcast) } #' @rdname is.ets #' @export is.stlm <- function(x) { inherits(x, "stlm") } forecast/vignettes/0000755000176200001440000000000014634702027014075 5ustar liggesusersforecast/vignettes/JSS-paper.bib0000644000176200001440000005323414323125536016325 0ustar liggesusers@STRING{advap = {Advances in Applied Probability}} @STRING{amath = {Annals of Mathematics}} @STRING{ams = {The Annals of Mathematical Statistics}} @STRING{amstat = {The American Statistician}} @STRING{annalap = {The Annals of Applied Probability}} @STRING{annalp = {The Annals of Probability}} @STRING{annals = {The Annals of Statistics}} @STRING{anneug = {Annals of Eugenics}} @STRING{anzjs = {Australian \& New Zealand Journal of Statistics}} @STRING{appstat = {Applied Statistics}} @STRING{ausjstat = {Australian Journal of Statistics}} @STRING{bioc = {Biometrics}} @STRING{bioj = {Biometrical Journal}} @STRING{biok = {Biometrika}} @STRING{chance = {Chance}} @STRING{cjs = {The Canadian Journal of Statistics}} @STRING{comms = {Communications in Statistics}} @STRING{commscs = {Communications in Statistics: Computation \& Simulation}} @STRING{commstm = {Communications in Statistics: Theory \& Methods}} @STRING{compstat = {Computational Statistics}} @STRING{csda = {Computational Statistics \& Data Analysis}} @STRING{debs = {Department of Econometrics \& Business Statistics, Monash University}} @STRING{ejor = {European Journal of Operational Research}} @STRING{ijf = {International Journal of Forecasting}} @STRING{isr = {International Statistical Review}} @STRING{jap = {Journal of Applied Probability}} @STRING{jas = {Journal of Applied Statistics}} @STRING{jasa = {Journal of the American Statistical Association}} @STRING{jcgs = {Journal of Computational \& Graphical Statistics}} @STRING{je = {Journal of Econometrics}} @STRING{jes = {Journal of Educational Statistics}} @STRING{jf = {Journal of Forecasting}} @STRING{jma = {Journal of Multivariate Analysis}} @STRING{jors = {Journal of the Operational Research Society}} @STRING{jos = {Journal of Official Statistics}} @STRING{jrssa = {Journal of the Royal Statistical Society A}} @STRING{jrssb = {Journal of the Royal Statistical Society B}} @STRING{jscs = {Journal of Statistical Computation \& Simulation}} @STRING{jspi = {Journal of Statistical Planning \& Inference}} @STRING{jtp = {Journal of Theoretical Probability}} @STRING{jtsa = {Journal of Time Series Analysis}} @STRING{mansci = {Management Science}} @STRING{psyka = {Psychometrika}} @STRING{ptrf = {Probability Theory \& Related Fields}} @STRING{sankhya = {Sankhy\={a}}} @STRING{sasj = {South African Statistical Journal}} @STRING{scandjs = {Scandinavian Journal of Statistics: Theory \& Applications}} @STRING{siamjssc = {SIAM Journal of Scientific \& Statistical Computing}} @STRING{jss = {Journal of Statistical Software}} @STRING{spl = {Statistics \& Probability Letters}} @STRING{statmed = {Statistics in Medicine}} @STRING{statsci = {Statistical Science}} @STRING{statsin = {Statistica Sinica}} @STRING{survmeth = {Survey Methodology}} @STRING{tech = {Technometrics}} @STRING{toap = {to appear}} @STRING{tpaa = {Theory of Probability \& its Applications}} @STRING{tstat = {The Statistician}} @BOOK{AM79, title = {Optimal Filtering}, publisher = {Prentice-Hall}, year = {1979}, author = {B. D. O. Anderson and J. B. Moore}, address = {Englewood Cliffs}, } @BOOK{Aoki87, title = {State Space Modeling of Time Series}, publisher = {Springer-Verlag}, year = {1987}, author = {Masanao Aoki}, address = {Berlin}, } @ARTICLE{Archibald90, author = {Blyth C. Archibald}, title = {Parameter Space of the {H}olt-{W}inters' Model}, journal = ijf, year = {1990}, volume = {6}, pages = {199--209}, fileno = {1151}, keywords = {Exponential smoothing; seasonal; coefficient choice; stability; evaluation}, pdf = {Archibald90.pdf}, } @ARTICLE{AN00, author = {V. Assimakopoulos and K. Nikolopoulos}, title = {The Theta Model: A Decomposition Approach to Forecasting}, journal = ijf, year = {2000}, volume = {16}, pages = {521-530}, fileno = {1047}, keywords = {M3-Competition; Time series; Univariate forecasting method}, } @BOOK{BOK05, title = {Forecasting, Time Series and Regression: An Applied Approach}, publisher = {Thomson Brooks/Cole}, year = {2005}, author = {B. L. Bowerman and R. T. O'Connell and Anne B. Koehler}, address = {Belmont CA}, } @BOOK{BDbook91, title = {Time Series: Theory and Methods}, publisher = {Springer-Verlag}, year = {1991}, author = {P. J. Brockwell and R. A Davis}, address = {New York}, edition = {2nd}, } @BOOK{BDbook91a, title = {Introduction to Time Series and Forecasting}, publisher = {John Wiley \& Sons}, year = {2002}, edition = {2nd}, author = {P.J. Brockwell and R.A. Davis}, } @ARTICLE{CH95, author = {F. Canova and B. E. Hansen}, title = {Are Seasonal Patterns Constant Over Time? {A} Test for Seasonal Stability}, journal = {Journal of Business and Economic Statistics}, year = {1995}, volume = {13}, pages = {237-252}, file = {CH95.pdf:CH95.pdf:PDF}, pdf = {CH95.pdf}, } @ARTICLE{CY91, author = {Chris Chatfield and Mohammad Yar}, title = {Prediction Intervals for Multiplicative {H}olt-{W}inters}, journal = ijf, year = {1991}, volume = {7}, pages = {31-37}, keywords = {Holt-Winters; Prediction intervals; Exponential smoothing}, } @ARTICLE{Croston72, author = {J. D. Croston}, title = {Forecasting and Stock Control for Intermittent Demands}, journal = {Operational Research Quarterly}, year = {1972}, volume = {23}, pages = {289--304}, number = {3}, pdf = {Croston72.pdf}, } @ARTICLE{DF81, author = {D. A. Dickey and W. A. Fuller}, title = {Likelihood Ratio Statistics for Autoregressive Time Series with a Unit Root}, journal = {Econometrica}, year = {1981}, volume = {49}, pages = {1057-1071}, } @BOOK{DKbook01, title = {Time Series Analysis by State Space Methods}, publisher = {Oxford University Press}, year = {2001}, author = {J Durbin and Siem J Koopman}, address = {Oxford}, } @ARTICLE{Gardner85, author = {Gardner, Jr, Everette S.}, title = {Exponential Smoothing: The State of the Art}, journal = jf, year = {1985}, volume = {4}, pages = {1-28}, keywords = {Bibliography; exponential smoothing; comparative methods; ARIMA; exponential smoothing; control charts; CUSUM; evaluation-forecasting monitoring systems; exponential smoothing; adaptive exponential smoothing-adaptive; coefficient choice; higher-order; review; theory seasonality-estimation; harmonics; tracking signal-methodology; use-inventory control}, } @ARTICLE{GM85, author = {Gardner, Jr, Everette S. and Ed McKenzie}, title = {Forecasting Trends in Time Series}, journal = mansci, year = {1985}, volume = {31}, pages = {1237-1246}, number = {10}, keywords = {Forecasting; time series}, } @TECHREPORT{Gomez98, author = {Victor G\'{o}mez}, title = {Automatic Model Identification in the Presence of Missing Observations and Outliers}, institution = {Ministerio de Econom{\'\i}a y Hacienda, Direcci{\'o}n General de An{\'a}lisis y Programaci{\'o}n Presupuestaria}, year = {1998}, type = {Working paper}, number = {D-98009}, pdf = {Gomez98.pdf}, } @TECHREPORT{TRAMOSEATS98, author = {Victor G\'{o}mez and Agust\'{i}n Maravall}, title = {Programs \pkg{TRAMO} and \pkg{SEATS}, Instructions for the Users}, institution = {Ministerio de Econom{\'\i}a y Hacienda, Direcci{\'o}n General de An{\'a}lisis y Programaci{\'o}n Presupuestaria}, year = {1998}, type = {Working paper}, number = {97001}, month = {June}, edition = {Beta version}, } @ARTICLE{ForecastPro00, author = {Robert L Goodrich}, title = {The \pkg{Forecast Pro} Methodology}, journal = ijf, year = {2000}, volume = {16}, pages = {533-535}, number = {4}, pdf = {ForecastPro00.pdf}, } @ARTICLE{HR82, author = {E. J. Hannan and J. Rissanen}, title = {Recursive Estimation of Mixed Autoregressive-Moving Average Order}, journal = biok, year = {1982}, volume = {69}, pages = {81-94}, number = {1}, keywords = {Autoregressive-moving average; best coding; martingale; recursive calculation; strong convergence; vector autoregression}, } @ARTICLE{Hendry97, author = {David F. Hendry}, title = {The Econometrics of Macroeconomic Forecasting}, journal = {The Economic Journal}, year = {1997}, volume = {107}, pages = {1330-1357.}, number = {444}, } @ARTICLE{HEGY90, author = {S. Hylleberg and R. Engle and C. Granger and B. Yoo}, title = {Seasonal Integration and Cointegration}, journal = {Journal of Econometrics}, year = {1990}, volume = {44}, pages = {215-238}, } @ARTICLE{Hyndman01, author = {Rob J Hyndman}, title = {It's Time To Move from `What' To `Why'---Comments on the {M3}-Competition}, journal = ijf, year = {2001}, volume = {17}, pages = {567-570}, number = {4}, keywords = {commentaries on the M3-competition}, } @MANUAL{forecast, title = {\pkg{forecast}: Forecasting Functions for Time Series}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{fma, title = {\pkg{fma}: Data Sets from ``{F}orecasting: Methods and Applications'' By {M}akridakis, {W}heelwright \& {H}yndman (1998)}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{expsmooth, title = {\pkg{expsmooth}: Data Sets from ``{F}orecasting with Exponential Smoothing'' by Hyndman, Koehler, Ord \& Snyder (2008)}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = {http://CRAN.R-project.org/package=forecasting}, } @MANUAL{Mcomp, title = {\pkg{Mcomp}: Data from the {M}-Competitions}, author = {Rob J Hyndman}, year = {2008}, note = {\proglang{R}~package version~1.11}, url = { http://CRAN.R-project.org/package=forecasting}, } @ARTICLE{HAA08, author = {Rob J Hyndman and {Md} Akram and Blyth C Archibald}, title = {The Admissible Parameter Space for Exponential Smoothing Models}, journal = {Annals of the Institute of Statistical Mathematics}, year = {2008}, volume = {60}, number = {2}, pages = {407--426} } @ARTICLE{HB03, author = {Rob J Hyndman and Billah, Baki}, title = {Unmasking the {T}heta Method}, journal = ijf, year = {2003}, volume = {19}, pages = {287-290}, number = {2}, keywords = {Exponential smoothing; forecasting competitions; State space models}, } @ARTICLE{HKPB05, author = {Rob J Hyndman and Maxwell L. King and Pitrun, Ivet and Billah, Baki}, title = {Local Linear Forecasts Using Cubic Smoothing Splines}, journal = anzjs, year = {2005}, volume = {47}, pages = {87-99}, number = {1}, keywords = {ARIMA models; Exponential smoothing; Holt's local linear forecasts; Maximum likelihood estimation; non-parametric regression; smoothing splines; state-space model; stochastic trends}, } @ARTICLE{HK06, author = {Rob J Hyndman and Anne B Koehler}, title = {Another Look at Measures of Forecast Accuracy}, journal = ijf, year = {2006}, volume = {22}, pages = {679-688}, issue = {4}, } @ARTICLE{HK2008, author = {Rob J Hyndman and Yeasmin Khandakar}, title = {Automatic Time Series Forecasting: The Forecast Package for R}, journal = jss, year = {2008}, volume = {27}, issue = {3}, } @ARTICLE{HKOS05, author = {Rob J Hyndman and Anne B Koehler and J Keith Ord and Ralph D Snyder}, title = {Prediction Intervals for Exponential Smoothing Using Two New Classes of State Space Models}, journal = {Journal of Forecasting}, year = {2005}, volume = {24}, pages = {17-37}, } @BOOK{expsmooth08, title = {Forecasting with Exponential Smoothing: The State Space Approach}, publisher = {Springer-Verlag}, year = {2008}, author = {Rob J Hyndman and Anne B Koehler and J Keith Ord and Ralph D Snyder}, url = {http://www.exponentialsmoothing.net/}, } @ARTICLE{HKSG02, author = {Rob J Hyndman and Anne B Koehler and Ralph D Snyder and Simone Grose}, title = {A State Space Framework for Automatic Forecasting Using Exponential Smoothing Methods}, journal = ijf, year = {2002}, volume = {18}, pages = {439-454}, number = {3}, keywords = {Prediction intervals; State space models}, } @ARTICLE{shortseasonal, author = {Rob J Hyndman and Andrey V Kostenko}, title = {Minimum Sample Size Requirements for Seasonal Forecasting Models}, journal = {Foresight: The International Journal of Applied Forecasting}, year = {2007}, volume = {6}, pages = {12-15}, } @ARTICLE{KPSS92, author = {Denis Kwiatkowski and Peter C.B. Phillips and Peter Schmidt and Yongcheol Shin}, title = {Testing the Null Hypothesis of Stationarity Against the Alternative of a Unit Root}, journal = je, year = {1992}, volume = {54}, pages = {159-178}, } @ARTICLE{Liu89, author = {L. M. Liu}, title = {Identification of Seasonal {Arima} Models Using a Filtering Method}, journal = commstm, year = {1989}, volume = {18}, pages = {2279-2288}, keywords = {model identification, seasonal time series, ARIMA models, filtering, intermediary models, calendar variation, intervention, transfer function models}, } @ARTICLE{Mcomp82, author = {S. Makridakis and A. Anderson and R. Carbone and R. Fildes and M. Hibon and R. Lewandowski and J. Newton and E. Parzen and R. Winkler}, title = {The Accuracy of Extrapolation (Time Series) Methods: Results of a Forecasting Competition}, journal = jf, year = {1982}, volume = {1}, pages = {111-153}, keywords = {Forecasting; Time series; Evaluation; Accuracy; Comparison; Empirical Study}, } @ARTICLE{Metal82, author = {Spyros Makridakis and A. Anderson and R. Carbone and R. Fildes and M. Hibon and R. Lewandowskiand J. Newton and E. Parzen and R. Winkler}, title = {The Accuracy of Extrapolation (Time Series) Methods: Results of a Forecasting Competition}, journal = jf, year = {1982}, volume = {1}, pages = {111--153}, } @ARTICLE{Metal93, author = {Spyros Makridakis and Chris Chatfield and Mich\'{e}le Hibon and Michael Lawrence and Terence Mills and J. Keith Ord and LeRoy F. Simmons}, title = {The {M}2-Competition: A Real-Time Judgmentally Based Forecasting study}, journal = ijf, year = {1993}, volume = {9}, pages = {5--22}, } @ARTICLE{M3comp00, author = {Spyros Makridakis and Michele Hibon}, title = {The {M3}-Competition: Results, Conclusions and Implications}, journal = ijf, year = {2000}, volume = {16}, pages = {451-476}, keywords = {Comparative methods-Time series: Univariate; Forecasting competitions; {M}-competition; Forecasting methods; Forecasting accuracy}, } @BOOK{MWH3, title = {Forecasting: Methods and Applications}, publisher = {John Wiley \& Sons}, year = {1998}, author = {Makridakis, Spyros and Wheelwright, Steven C. and Rob J Hyndman}, pages = {642}, address = {New York}, edition = {3rd}, url = {http://www.robhyndman.info/forecasting/}, } @ARTICLE{MP00a, author = {G. M\'{e}lard and J.-M Pasteels}, title = {Automatic {ARIMA} Modeling Including Intervention, Using Time Series Expert Software}, journal = ijf, year = {2000}, volume = {16}, pages = {497-508}, keywords = {M3-Competition; ARIMA models; Expert systems; Intervention analysis; Outliers}, } @ARTICLE{Meyer:2002, author = {David Meyer}, title = {Naive Time Series Forecasting Methods}, journal = {\proglang{R} News}, year = {2002}, volume = {2}, number = {2}, pages = {7--10}, month = {June}, url = {http://CRAN.R-project.org/doc/Rnews/}, } @ARTICLE{OKS97, author = {J. Keith Ord and Anne B. Koehler and Ralph D. Snyder}, title = {Estimation and Prediction for a Class of Dynamic Nonlinear Statistical Models}, journal = jasa, year = {1997}, volume = {92}, pages = {1621-1629}, keywords = {Forecasting; Holt-Winters; Maximum likelihood estimation; State-space models}, pdf = {OKS97.pdf}, } @ARTICLE{OL96, author = {Keith Ord and Sam Lowe}, title = {Automatic Forecasting}, journal = amstat, year = {1996}, volume = {50}, pages = {88-94}, number = {1}, month = {February}, keywords = {automatic, Forecasting, Autobox, AutocastII, Forecast Pro}, } @ARTICLE{Pegels69, author = {C. Carl Pegels}, title = {Exponential Forecasting: Some New Variations}, journal = mansci, year = {1969}, volume = {15}, pages = {311-315}, number = {5}, } @ARTICLE{Reilly00, author = {Reilly, David}, title = {The \pkg{Autobox} System}, journal = ijf, year = {2000}, volume = {16}, pages = {531-533}, number = {4}, pdf = {Reilly00.pdf}, } @ARTICLE{Ripley:2002, author = {Brian D. Ripley}, title = {Time Series in \proglang{R}~1.5.0}, journal = {\proglang{R} News}, year = {2002}, volume = {2}, number = {2}, pages = {2--7}, month = {June}, url = {http://CRAN.R-project.org/doc/Rnews/}, } @ARTICLE{SH05, author = {Lydia Shenstone and Rob J Hyndman}, title = {Stochastic Models Underlying {C}roston's Method for Intermittent Demand Forecasting}, journal = jf, year = {2005}, volume = {24}, pages = {389-402}, } @ARTICLE{SY94, author = {Jeremy Smith and Sanjay Yadav}, title = {Forecasting Costs Incurred from Unit Differencing Fractionally Integrated Processes}, journal = ijf, year = {1994}, volume = {10}, pages = {507-514}, number = {4}, pdf = {SY94.pdf}, } @ARTICLE{SKHO04, author = {Ralph D Snyder and Anne B Koehler and Rob J Hyndman and J Keith Ord}, title = {Exponential Smoothing Models: Means and Variances for Lead-Time Demand}, journal = ejor, year = {2004}, volume = {158}, pages = {444-455}, number = {2}, } @ARTICLE{Taylor03a, author = {James W. Taylor}, title = {Exponential Smoothing with a Damped Multiplicative Trend}, journal = ijf, year = {2003}, volume = {19}, pages = {715-725}, keywords = {Damped trend exponential smoothing, Pegels classification, Multiplicative trend}, } @ARTICLE{Wallis99, author = {Wallis, K. F.}, title = {Asymmetric Density Forecasts of Inflation and the {Bank of England's} Fan Chart}, journal = {National Institute Economic Review}, year = {1999}, volume = {167}, pages = {106-112}, number = {1}, } @Manual{R, title = {\proglang{R}: A Language and Environment for Statistical Computing}, author = {{\proglang{R} Development Core Team}}, organization = {\proglang{R} Foundation for Statistical Computing}, address = {Vienna, Austria}, year = {2008}, note = {{ISBN} 3-900051-07-0}, url = {http://www.R-project.org/}, } forecast/vignettes/JSS2008.Rmd0000644000176200001440000017272514473635572015543 0ustar liggesusers--- author: - name: Rob J Hyndman affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia email: Rob.Hyndman@monash.edu url: https://robjhyndman.com - name: Yeasmin Khandakar affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia title: formatted: "Automatic Time Series Forecasting:\\newline the \\pkg{forecast} Package for \\proglang{R}" # If you use tex in the formatted title, also supply version without plain: "Automatic Time Series Forecasting: the forecast Package for R" # For running headers, if needed short: "\\pkg{forecast}: Automatic Time Series Forecasting" abstract: > This vignette to the \proglang{R} package \pkg{forecast} is an updated version of @HK2008, published in the *Journal of Statistical Software*. Automatic forecasts of large numbers of univariate time series are often needed in business and other contexts. We describe two automatic forecasting algorithms that have been implemented in the \pkg{forecast} package for \proglang{R}. The first is based on innovations state space models that underly exponential smoothing methods. The second is a step-wise algorithm for forecasting with ARIMA models. The algorithms are applicable to both seasonal and non-seasonal data, and are compared and illustrated using four real time series. We also briefly describe some of the other functionality available in the \pkg{forecast} package.} keywords: # at least one keyword must be supplied formatted: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, "\\proglang{R}"] plain: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R] preamble: > \usepackage{amsmath,rotating,bm,fancyvrb,paralist,thumbpdf} \Volume{27} \Issue{3} \Month{July} \Year{2008} \Submitdate{2007-05-29} \Acceptdate{2008-03-22} \def\damped{$_{\mbox{\footnotesize d}}$} \let\var=\VAR \def\R{\proglang{R}} \def\dampfactor{\phi_h} \raggedbottom bibliography: JSS-paper.bib vignette: > %\VignetteIndexEntry{Automatic Time Series Forecasting: the forecast Package for R (Hyndman & Khandakar, JSS 2008)} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} documentclass: jss output: if (rmarkdown::pandoc_version() >= "2") rticles::jss_article else rmarkdown::html_vignette fig_width: 7 fig_height: 6 fig_caption: true --- ```{r load_forecast, echo=FALSE, message=FALSE} library('forecast') ``` ```{r load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE} library('expsmooth') ``` ```{r expsmooth_datsets, echo=FALSE, message=FALSE} bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ``` # Introduction Automatic forecasts of large numbers of univariate time series are often needed in business. It is common to have over one thousand product lines that need forecasting at least monthly. Even when a smaller number of forecasts are required, there may be nobody suitably trained in the use of time series models to produce them. In these circumstances, an automatic forecasting algorithm is an essential tool. Automatic forecasting algorithms must determine an appropriate time series model, estimate the parameters and compute the forecasts. They must be robust to unusual time series patterns, and applicable to large numbers of series without user intervention. The most popular automatic forecasting algorithms are based on either exponential smoothing or ARIMA models. In this article, we discuss the implementation of two automatic univariate forecasting methods in the \pkg{forecast} package for \proglang{R}. We also briefly describe some univariate forecasting methods that are part of the \pkg{forecast} package. The \pkg{forecast} package for the \proglang{R} system for statistical computing [@R] is available from the Comprehensive \proglang{R} Archive Network at \url{https://CRAN.R-project.org/package=forecast}. Version `r packageVersion('forecast')` of the package was used for this paper. The \pkg{forecast} package contains functions for univariate forecasting and a few examples of real time series data. For more extensive testing of forecasting methods, the \pkg{fma} package contains the 90 data sets from @MWH3, the \pkg{expsmooth} package contains 24 data sets from @expsmooth08, and the \pkg{Mcomp} package contains the 1001 time series from the M-competition [@Mcomp82] and the 3003 time series from the M3-competition [@M3comp00]. The \pkg{forecast} package implements automatic forecasting using exponential smoothing, ARIMA models, the Theta method [@AN00], cubic splines [@HKPB05], as well as other common forecasting methods. In this article, we primarily discuss the exponential smoothing approach (in Section \ref{sec:expsmooth}) and the ARIMA modelling approach (in Section \ref{sec:arima}) to automatic forecasting. In Section \ref{sec:package}, we describe the implementation of these methods in the \pkg{forecast} package, along with other features of the package. # Exponential smoothing {#sec:expsmooth} Although exponential smoothing methods have been around since the 1950s, a modelling framework incorporating procedures for model selection was not developed until relatively recently. @OKS97, @HKSG02 and @HKOS05 have shown that all exponential smoothing methods (including non-linear methods) are optimal forecasts from innovations state space models. Exponential smoothing methods were originally classified by Pegels' (1969)\nocite{Pegels69} taxonomy. This was later extended by @Gardner85, modified by @HKSG02, and extended again by @Taylor03a, giving a total of fifteen methods seen in the following table. \begin{table}[!hbt] \begin{center}\vspace{0.2cm} \begin{tabular}{|ll|ccc|} \hline & &\multicolumn{3}{c|}{Seasonal Component} \\ \multicolumn{2}{|c|}{Trend}& N & A & M\\ \multicolumn{2}{|c|}{Component} & (None) & (Additive) & (Multiplicative)\\ \cline{3-5} &&&\\[-0.3cm] N & (None) & N,N & N,A & N,M\\ &&&&\\[-0.3cm] A & (Additive) & A,N & A,A & A,M\\ &&&&\\[-0.3cm] A\damped & (Additive damped) & A\damped,N & A\damped,A & A\damped,M\\ &&&&\\[-0.3cm] M & (Multiplicative) & M,N & M,A & M,M\\ &&&&\\[-0.3cm] M\damped & (Multiplicative damped) & M\damped,N & M\damped,A & M\damped,M\\ \hline \end{tabular}\vspace{0.2cm} \end{center} \caption{The fifteen exponential smoothing methods.} \end{table} Some of these methods are better known under other names. For example, cell (N,N) describes the simple exponential smoothing (or SES) method, cell (A,N) describes Holt's linear method, and cell (A\damped,N) describes the damped trend method. The additive Holt-Winters' method is given by cell (A,A) and the multiplicative Holt-Winters' method is given by cell (A,M). The other cells correspond to less commonly used but analogous methods. ## Point forecasts for all methods We denote the observed time series by $y_1,y_2,\dots,y_n$. A forecast of $y_{t+h}$ based on all of the data up to time $t$ is denoted by $\hat{y}_{t+h|t}$. To illustrate the method, we give the point forecasts and updating equations for method (A,A), the Holt-Winters' additive method: \begin{subequations}\label{eq:AMmethod} \begin{align} \mbox{Level:}\quad &\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)(\ell_{t-1} + b_{t-1})\hspace*{1cm} \label{eq:3-44a}\\ \mbox{Growth:}\quad &b_t = \beta^*(\ell_t - \ell_{t-1}) + (1-\beta^*)b_{t-1} \label{eq:3-45a}\\ \mbox{Seasonal:}\quad &s_t = \gamma(y_t - \ell_{t-1} -b_{t-1}) + (1-\gamma)s_{t-m}\label{eq:3-46a}\\ \mbox{Forecast:}\quad &\hat{y}_{t+h|t} = \ell_t + b_th +s_{t-m+h_m^+}. \label{eq:3-47a} \end{align} \end{subequations} where $m$ is the length of seasonality (e.g., the number of months or quarters in a year), $\ell_t$ represents the level of the series, $b_t$ denotes the growth, $s_t$ is the seasonal component, $\hat{y}_{t+h|t}$ is the forecast for $h$ periods ahead, and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$. To use method \eqref{eq:AMmethod}, we need values for the initial states $\ell_0$, $b_0$ and $s_{1-m},\dots,s_0$, and for the smoothing parameters $\alpha$, $\beta^*$ and $\gamma$. All of these will be estimated from the observed data. Equation \eqref{eq:3-46a} is slightly different from the usual Holt-Winters equations such as those in @MWH3 or @BOK05. These authors replace \eqref{eq:3-46a} with $$ s_t = \gamma^*(y_t - \ell_{t}) + (1-\gamma^*)s_{t-m}. $$ If $\ell_t$ is substituted using \eqref{eq:3-44a}, we obtain $$s_t = \gamma^*(1-\alpha)(y_t - \ell_{t-1}-b_{t-1}) + \{1-\gamma^*(1-\alpha)\}s_{t-m}. $$ Thus, we obtain identical forecasts using this approach by replacing $\gamma$ in \eqref{eq:3-46a} with $\gamma^*(1-\alpha)$. The modification given in \eqref{eq:3-46a} was proposed by @OKS97 to make the state space formulation simpler. It is equivalent to Archibald's (1990)\nocite{Archibald90} variation of the Holt-Winters' method. \begin{sidewaystable} \begin{small} \begin{center} \begin{tabular}{|c|lll|} \hline & \multicolumn{3}{c|}{Seasonal} \\ {Trend} & \multicolumn{1}{c}{N} & \multicolumn{1}{c}{A} & \multicolumn{1}{c|}{M}\\ \cline{2-4} & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}$\\ {N} & & $s_t = \gamma (y_t - \ell_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / \ell_{t-1}) + (1-\gamma) s_{t-m}$ \\ & $\hat{y}_{t+h|t} = \ell_t$ & $\hat{y}_{t+h|t} = \ell_t + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_ts_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$\\ {A} & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+hb_t$ & $\hat{y}_{t+h|t} = \ell_t +hb_t +s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+hb_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$\\ {A\damped } & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-\phi b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-\phi b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t$ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t+s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+\dampfactor b_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$\\ {M} & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^h$ & $\hat{y}_{t+h|t} = \ell_tb_t^h + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^hs_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$\\ {M\damped } & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b^\phi_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b^\phi_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h}$ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h} + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^{\phi_h}s_{t-m+h_m^+}$ \\ \hline \end{tabular} \end{center} \end{small} \caption{Formulae for recursive calculations and point forecasts. In each case, $\ell_t$ denotes the series level at time $t$, $b_t$ denotes the slope at time $t$, $s_t$ denotes the seasonal component of the series at time $t$, and $m$ denotes the number of seasons in a year; $\alpha$, $\beta^*$, $\gamma$ and $\phi$ are constants, $\phi_h = \phi+\phi^2+\dots+\phi^{h}$ and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$.}\label{table:pegels} \end{sidewaystable} Table \ref{table:pegels} gives recursive formulae for computing point forecasts $h$ periods ahead for all of the exponential smoothing methods. Some interesting special cases can be obtained by setting the smoothing parameters to extreme values. For example, if $\alpha=0$, the level is constant over time; if $\beta^*=0$, the slope is constant over time; and if $\gamma=0$, the seasonal pattern is constant over time. At the other extreme, naïve forecasts (i.e., $\hat{y}_{t+h|t}=y_t$ for all $h$) are obtained using the (N,N) method with $\alpha=1$. Finally, the additive and multiplicative trend methods are special cases of their damped counterparts obtained by letting $\phi=1$. ## Innovations state space models {#sec:statespace} For each exponential smoothing method in Table \ref{table:pegels}, @expsmooth08 describe two possible innovations state space models, one corresponding to a model with additive errors and the other to a model with multiplicative errors. If the same parameter values are used, these two models give equivalent point forecasts, although different prediction intervals. Thus there are 30 potential models described in this classification. Historically, the nature of the error component has often been ignored, because the distinction between additive and multiplicative errors makes no difference to point forecasts. We are careful to distinguish exponential smoothing \emph{methods} from the underlying state space \emph{models}. An exponential smoothing method is an algorithm for producing point forecasts only. The underlying stochastic state space model gives the same point forecasts, but also provides a framework for computing prediction intervals and other properties. To distinguish the models with additive and multiplicative errors, we add an extra letter to the front of the method notation. The triplet (E,T,S) refers to the three components: error, trend and seasonality. So the model ETS(A,A,N) has additive errors, additive trend and no seasonality---in other words, this is Holt's linear method with additive errors. Similarly, ETS(M,M\damped,M) refers to a model with multiplicative errors, a damped multiplicative trend and multiplicative seasonality. The notation ETS($\cdot$,$\cdot$,$\cdot$) helps in remembering the order in which the components are specified. Once a model is specified, we can study the probability distribution of future values of the series and find, for example, the conditional mean of a future observation given knowledge of the past. We denote this as $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, where $\bm{x}_t$ contains the unobserved components such as $\ell_t$, $b_t$ and $s_t$. For $h=1$ we use $\mu_t\equiv\mu_{t+1|t}$ as a shorthand notation. For many models, these conditional means will be identical to the point forecasts given in Table \ref{table:pegels}, so that $\mu_{t+h|t}=\hat{y}_{t+h|t}$. However, for other models (those with multiplicative trend or multiplicative seasonality), the conditional mean and the point forecast will differ slightly for $h\ge 2$. We illustrate these ideas using the damped trend method of @GM85. \subsubsection{Additive error model: ETS(A,A$_d$,N)} Let $\mu_t = \hat{y}_t = \ell_{t-1}+b_{t-1}$ denote the one-step forecast of $y_{t}$ assuming that we know the values of all parameters. Also, let $\varepsilon_t = y_t - \mu_t$ denote the one-step forecast error at time $t$. From the equations in Table \ref{table:pegels}, we find that \begin{align} \label{ss1} y_t &= \ell_{t-1} + \phi b_{t-1} + \varepsilon_t\\ \ell_t &= \ell_{t-1} + \phi b_{t-1} + \alpha \varepsilon_t \label{ss2}\\ b_t &= \phi b_{t-1} + \beta^*(\ell_t - \ell_{t-1}- \phi b_{t-1}) = \phi b_{t-1} + \alpha\beta^*\varepsilon_t. \label{ss3} \end{align} We simplify the last expression by setting $\beta=\alpha\beta^*$. The three equations above constitute a state space model underlying the damped Holt's method. Note that it is an \emph{innovations} state space model [@AM79;@Aoki87] because the same error term appears in each equation. We an write it in standard state space notation by defining the state vector as $\bm{x}_t = (\ell_t,b_t)'$ and expressing \eqref{ss1}--\eqref{ss3} as \begin{subequations} \begin{align} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1} + \varepsilon_t\label{obseq}\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi\\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t.\label{stateeq} \end{align} \end{subequations} The model is fully specified once we state the distribution of the error term $\varepsilon_t$. Usually we assume that these are independent and identically distributed, following a normal distribution with mean 0 and variance $\sigma^2$, which we write as $\varepsilon_t \sim\mbox{NID}(0, \sigma^2)$. \subsubsection{Multiplicative error model: ETS(M,A$_d$,N)} A model with multiplicative error can be derived similarly, by first setting $\varepsilon_t = (y_t-\mu_t)/\mu_t$, so that $\varepsilon_t$ is the relative error. Then, following a similar approach to that for additive errors, we find \begin{align*} y_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \varepsilon_t)\\ \ell_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \alpha \varepsilon_t)\\ b_t &= \phi b_{t-1} + \beta(\ell_{t-1}+\phi b_{t-1})\varepsilon_t, \end{align*} or \begin{align*} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1}(1 + \varepsilon_t)\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi \\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[ 1 \phi \right] \bm{x}_{t-1} \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t. \end{align*} Again we assume that $\varepsilon_t \sim \mbox{NID}(0,\sigma^2)$. Of course, this is a nonlinear state space model, which is usually considered difficult to handle in estimating and forecasting. However, that is one of the many advantages of the innovations form of state space models --- we can still compute forecasts, the likelihood and prediction intervals for this nonlinear model with no more effort than is required for the additive error model. ## State space models for all exponential smoothing methods {#sec:ssmodels} There are similar state space models for all 30 exponential smoothing variations. The general model involves a state vector $\bm{x}_t = (\ell_t, b_t$, $s_t, s_{t-1}, \dots, s_{t-m+1})'$ and state space equations of the form \begin{subequations}\label{eq:ss} \begin{align} y_t &= w(\bm{x}_{t-1}) + r(\bm{x}_{t-1})\varepsilon_t \label{eq:ss1}\\ \bm{x}_t &= f(\bm{x}_{t-1}) + g(\bm{x}_{t-1})\varepsilon_t \label{eq:ss2} \end{align} \end{subequations} where $\{\varepsilon_t\}$ is a Gaussian white noise process with mean zero and variance $\sigma^2$, and $\mu_t = w(\bm{x}_{t-1})$. The model with additive errors has $r(\bm{x}_{t-1})=1$, so that $y_t = \mu_{t} + \varepsilon_t$. The model with multiplicative errors has $r(\bm{x}_{t-1})=\mu_t$, so that $y_t = \mu_{t}(1 + \varepsilon_t)$. Thus, $\varepsilon_t = (y_t - \mu_t)/\mu_t$ is the relative error for the multiplicative model. The models are not unique. Clearly, any value of $r(\bm{x}_{t-1})$ will lead to identical point forecasts for $y_t$. All of the methods in Table \ref{table:pegels} can be written in the form \eqref{eq:ss1} and \eqref{eq:ss2}. The specific form for each model is given in @expsmooth08. Some of the combinations of trend, seasonality and error can occasionally lead to numerical difficulties; specifically, any model equation that requires division by a state component could involve division by zero. This is a problem for models with additive errors and either multiplicative trend or multiplicative seasonality, as well as for the model with multiplicative errors, multiplicative trend and additive seasonality. These models should therefore be used with caution. The multiplicative error models are useful when the data are strictly positive, but are not numerically stable when the data contain zeros or negative values. So when the time series is not strictly positive, only the six fully additive models may be applied. The point forecasts given in Table \ref{table:pegels} are easily obtained from these models by iterating equations \eqref{eq:ss1} and \eqref{eq:ss2} for $t=n+1, n+2,\dots,n+h$, setting $\varepsilon_{n+j}=0$ for $j=1,\dots,h$. In most cases (notable exceptions being models with multiplicative seasonality or multiplicative trend for $h\ge2$), the point forecasts can be shown to be equal to $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, the conditional expectation of the corresponding state space model. The models also provide a means of obtaining prediction intervals. In the case of the linear models, where the forecast distributions are normal, we can derive the conditional variance $v_{t+h|t} = \var(y_{t+h} \mid \bm{x}_t)$ and obtain prediction intervals accordingly. This approach also works for many of the nonlinear models. Detailed derivations of the results for many models are given in @HKOS05. A more direct approach that works for all of the models is to simply simulate many future sample paths conditional on the last estimate of the state vector, $\bm{x}_t$. Then prediction intervals can be obtained from the percentiles of the simulated sample paths. Point forecasts can also be obtained in this way by taking the average of the simulated values at each future time period. An advantage of this approach is that we generate an estimate of the complete predictive distribution, which is especially useful in applications such as inventory planning, where expected costs depend on the whole distribution. ## Estimation {#sec:estimation} In order to use these models for forecasting, we need to know the values of $\bm{x}_0$ and the parameters $\alpha$, $\beta$, $\gamma$ and $\phi$. It is easy to compute the likelihood of the innovations state space model \eqref{eq:ss}, and so obtain maximum likelihood estimates. @OKS97 show that \begin{equation}\label{likelihood} L^*(\bm\theta,\bm{x}_0) = n\log\Big(\sum_{t=1}^n \varepsilon^2_t\Big) + 2\sum_{t=1}^n \log|r(\bm{x}_{t-1})| \end{equation} is equal to twice the negative logarithm of the likelihood function (with constant terms eliminated), conditional on the parameters $\bm\theta = (\alpha,\beta,\gamma,\phi)'$ and the initial states $\bm{x}_0 = (\ell_0,b_0,s_0,s_{-1},\dots,s_{-m+1})'$, where $n$ is the number of observations. This is easily computed by simply using the recursive equations in Table \ref{table:pegels}. Unlike state space models with multiple sources of error, we do not need to use the Kalman filter to compute the likelihood. The parameters $\bm\theta$ and the initial states $\bm{x}_0$ can be estimated by minimizing $L^*$. Most implementations of exponential smoothing use an ad hoc heuristic scheme to estimate $\bm{x}_0$. However, with modern computers, there is no reason why we cannot estimate $\bm{x}_0$ along with $\bm\theta$, and the resulting forecasts are often substantially better when we do. We constrain the initial states $\bm{x}_0$ so that the seasonal indices add to zero for additive seasonality, and add to $m$ for multiplicative seasonality. There have been several suggestions for restricting the parameter space for $\alpha$, $\beta$ and $\gamma$. The traditional approach is to ensure that the various equations can be interpreted as weighted averages, thus requiring $\alpha$, $\beta^*=\beta/\alpha$, $\gamma^*=\gamma/(1-\alpha)$ and $\phi$ to all lie within $(0,1)$. This suggests $$0<\alpha<1,\qquad 0<\beta<\alpha,\qquad 0<\gamma < 1-\alpha,\qquad\mbox{and}\qquad 0<\phi<1. $$ However, @HAA08 show that these restrictions are usually stricter than necessary (although in a few cases they are not restrictive enough). ## Model selection Forecast accuracy measures such as mean squared error (MSE) can be used for selecting a model for a given set of data, provided the errors are computed from data in a hold-out set and not from the same data as were used for model estimation. However, there are often too few out-of-sample errors to draw reliable conclusions. Consequently, a penalized method based on the in-sample fit is usually better. One such approach uses a penalized likelihood such as Akaike's Information Criterion: $$\mbox{AIC} = L^*(\hat{\bm\theta},\hat{\bm{x}}_0) + 2q, $$ where $q$ is the number of parameters in $\bm\theta$ plus the number of free states in $\bm{x}_0$, and $\hat{\bm\theta}$ and $\hat{\bm{x}}_0$ denote the estimates of $\bm\theta$ and $\bm{x}_0$. We select the model that minimizes the AIC amongst all of the models that are appropriate for the data. The AIC also provides a method for selecting between the additive and multiplicative error models. The point forecasts from the two models are identical so that standard forecast accuracy measures such as the MSE or mean absolute percentage error (MAPE) are unable to select between the error types. The AIC is able to select between the error types because it is based on likelihood rather than one-step forecasts. Obviously, other model selection criteria (such as the BIC) could also be used in a similar manner. ## Automatic forecasting {#sec:algorithm} We combine the preceding ideas to obtain a robust and widely applicable automatic forecasting algorithm. The steps involved are summarized below. \begin{compactenum} \item For each series, apply all models that are appropriate, optimizing the parameters (both smoothing parameters and the initial state variable) of the model in each case. \item Select the best of the models according to the AIC. \item Produce point forecasts using the best model (with optimized parameters) for as many steps ahead as required. \item Obtain prediction intervals for the best model either using the analytical results of Hyndman, Koehler, et al. (2005), or by simulating future sample paths for $\{y_{n+1},\dots,y_{n+h}\}$ and finding the $\alpha/2$ and $1-\alpha/2$ percentiles of the simulated data at each forecasting horizon. If simulation is used, the sample paths may be generated using the normal distribution for errors (parametric bootstrap) or using the resampled errors (ordinary bootstrap). \end{compactenum} @HKSG02 applied this automatic forecasting strategy to the M-competition data [@Mcomp82] and the IJF-M3 competition data [@M3comp00] using a restricted set of exponential smoothing models, and demonstrated that the methodology is particularly good at short term forecasts (up to about 6 periods ahead), and especially for seasonal short-term series (beating all other methods in the competitions for these series). # ARIMA models {#sec:arima} A common obstacle for many people in using Autoregressive Integrated Moving Average (ARIMA) models for forecasting is that the order selection process is usually considered subjective and difficult to apply. But it does not have to be. There have been several attempts to automate ARIMA modelling in the last 25 years. @HR82 proposed a method to identify the order of an ARMA model for a stationary series. In their method the innovations can be obtained by fitting a long autoregressive model to the data, and then the likelihood of potential models is computed via a series of standard regressions. They established the asymptotic properties of the procedure under very general conditions. @Gomez98 extended the Hannan-Rissanen identification method to include multiplicative seasonal ARIMA model identification. @TRAMOSEATS98 implemented this automatic identification procedure in the software \pkg{TRAMO} and \pkg{SEATS}. For a given series, the algorithm attempts to find the model with the minimum BIC. @Liu89 proposed a method for identification of seasonal ARIMA models using a filtering method and certain heuristic rules; this algorithm is used in the \pkg{SCA-Expert} software. Another approach is described by @MP00a whose algorithm for univariate ARIMA models also allows intervention analysis. It is implemented in the software package ``Time Series Expert'' (\pkg{TSE-AX}). Other algorithms are in use in commercial software, although they are not documented in the public domain literature. In particular, \pkg{Forecast Pro} [@ForecastPro00] is well-known for its excellent automatic ARIMA algorithm which was used in the M3-forecasting competition [@M3comp00]. Another proprietary algorithm is implemented in \pkg{Autobox} [@Reilly00]. @OL96 provide an early review of some of the commercial software that implement automatic ARIMA forecasting. ## Choosing the model order using unit root tests and the AIC A non-seasonal ARIMA($p,d,q$) process is given by $$ \phi(B)(1-B^d)y_{t} = c + \theta(B)\varepsilon_t $$ where $\{\varepsilon_t\}$ is a white noise process with mean zero and variance $\sigma^2$, $B$ is the backshift operator, and $\phi(z)$ and $\theta(z)$ are polynomials of order $p$ and $q$ respectively. To ensure causality and invertibility, it is assumed that $\phi(z)$ and $\theta(z)$ have no roots for $|z|<1$ [@BDbook91]. If $c\ne0$, there is an implied polynomial of order $d$ in the forecast function. The seasonal ARIMA$(p,d,q)(P,D,Q)_m$ process is given by $$ \Phi(B^m)\phi(B)(1-B^{m})^D(1-B)^dy_{t} = c + \Theta(B^m)\theta(B)\varepsilon_t $$ where $\Phi(z)$ and $\Theta(z)$ are polynomials of orders $P$ and $Q$ respectively, each containing no roots inside the unit circle. If $c\ne0$, there is an implied polynomial of order $d+D$ in the forecast function. The main task in automatic ARIMA forecasting is selecting an appropriate model order, that is the values $p$, $q$, $P$, $Q$, $D$, $d$. If $d$ and $D$ are known, we can select the orders $p$, $q$, $P$ and $Q$ via an information criterion such as the AIC: $$\mbox{AIC} = -2\log(L) + 2(p+q+P+Q+k)$$ where $k=1$ if $c\ne0$ and 0 otherwise, and $L$ is the maximized likelihood of the model fitted to the \emph{differenced} data $(1-B^m)^D(1-B)^dy_t$. The likelihood of the full model for $y_t$ is not actually defined and so the value of the AIC for different levels of differencing are not comparable. One solution to this difficulty is the ``diffuse prior'' approach which is outlined in @DKbook01 and implemented in the \code{arima()} function [@Ripley:2002] in \R. In this approach, the initial values of the time series (before the observed values) are assumed to have mean zero and a large variance. However, choosing $d$ and $D$ by minimizing the AIC using this approach tends to lead to over-differencing. For forecasting purposes, we believe it is better to make as few differences as possible because over-differencing harms forecasts [@SY94] and widens prediction intervals. [Although, see @Hendry97 for a contrary view.] Consequently, we need some other approach to choose $d$ and $D$. We prefer unit-root tests. However, most unit-root tests are based on a null hypothesis that a unit root exists which biases results towards more differences rather than fewer differences. For example, variations on the Dickey-Fuller test [@DF81] all assume there is a unit root at lag 1, and the HEGY test of @HEGY90 is based on a null hypothesis that there is a seasonal unit root. Instead, we prefer unit-root tests based on a null hypothesis of no unit-root. For non-seasonal data, we consider ARIMA($p,d,q$) models where $d$ is selected based on successive KPSS unit-root tests [@KPSS92]. That is, we test the data for a unit root; if the test result is significant, we test the differenced data for a unit root; and so on. We stop this procedure when we obtain our first insignificant result. For seasonal data, we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $m$ is the seasonal frequency and $D=0$ or $D=1$ depending on an extended Canova-Hansen test [@CH95]. Canova and Hansen only provide critical values for $21$. Let $C_m$ be the critical value for seasonal period $m$. We plotted $C_m$ against $m$ for values of $m$ up to 365 and noted that they fit the line $C_m = 0.269 m^{0.928}$ almost exactly. So for $m>12$, we use this simple expression to obtain the critical value. We note in passing that the null hypothesis for the Canova-Hansen test is not an ARIMA model as it includes seasonal dummy terms. It is a test for whether the seasonal pattern changes sufficiently over time to warrant a seasonal unit root, or whether a stable seasonal pattern modelled using fixed dummy variables is more appropriate. Nevertheless, we have found that the test is still useful for choosing $D$ in a strictly ARIMA framework (i.e., without seasonal dummy variables). If a stable seasonal pattern is selected (i.e., the null hypothesis is not rejected), the seasonality is effectively handled by stationary seasonal AR and MA terms. After $D$ is selected, we choose $d$ by applying successive KPSS unit-root tests to the seasonally differenced data (if $D=1$) or the original data (if $D=0$). Once $d$ (and possibly $D$) are selected, we proceed to select the values of $p$, $q$, $P$ and $Q$ by minimizing the AIC. We allow $c\ne0$ for models where $d+D < 2$. ## A step-wise procedure for traversing the model space Suppose we have seasonal data and we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $p$ and $q$ can take values from 0 to 3, and $P$ and $Q$ can take values from 0 to 1. When $c=0$ there is a total of 288 possible models, and when $c\ne 0$ there is a total of 192 possible models, giving 480 models altogether. If the values of $p$, $d$, $q$, $P$, $D$ and $Q$ are allowed to range more widely, the number of possible models increases rapidly. Consequently, it is often not feasible to simply fit every potential model and choose the one with the lowest AIC. Instead, we need a way of traversing the space of models efficiently in order to arrive at the model with the lowest AIC value. We propose a step-wise algorithm as follows. \begin{description} \item[Step 1:] We try four possible models to start with. \begin{itemize} \item ARIMA($2,d,2$) if $m=1$ and ARIMA($2,d,2)(1,D,1)$ if $m>1$. \item ARIMA($0,d,0$) if $m=1$ and ARIMA($0,d,0)(0,D,0)$ if $m>1$. \item ARIMA($1,d,0$) if $m=1$ and ARIMA($1,d,0)(1,D,0)$ if $m>1$. \item ARIMA($0,d,1$) if $m=1$ and ARIMA($0,d,1)(0,D,1)$ if $m>1$. \end{itemize} If $d+D \le 1$, these models are fitted with $c\ne0$. Otherwise, we set $c=0$. Of these four models, we select the one with the smallest AIC value. This is called the ``current'' model and is denoted by ARIMA($p,d,q$) if $m=1$ or ARIMA($p,d,q)(P,D,Q)_m$ if $m>1$. \item[Step 2:] We consider up to seventeen variations on the current model: \begin{itemize} \item where one of $p$, $q$, $P$ and $Q$ is allowed to vary by $\pm1$ from the current model; \item where $p$ and $q$ both vary by $\pm1$ from the current model; \item where $P$ and $Q$ both vary by $\pm1$ from the current model; \item where the constant $c$ is included if the current model has $c=0$ or excluded if the current model has $c\ne0$. \end{itemize} Whenever a model with lower AIC is found, it becomes the new ``current'' model and the procedure is repeated. This process finishes when we cannot find a model close to the current model with lower AIC. \end{description} There are several constraints on the fitted models to avoid problems with convergence or near unit-roots. The constraints are outlined below. \begin{compactitem}\itemsep=8pt \item The values of $p$ and $q$ are not allowed to exceed specified upper bounds (with default values of 5 in each case). \item The values of $P$ and $Q$ are not allowed to exceed specified upper bounds (with default values of 2 in each case). \item We reject any model which is ``close'' to non-invertible or non-causal. Specifically, we compute the roots of $\phi(B)\Phi(B)$ and $\theta(B)\Theta(B)$. If either have a root that is smaller than 1.001 in absolute value, the model is rejected. \item If there are any errors arising in the non-linear optimization routine used for estimation, the model is rejected. The rationale here is that any model that is difficult to fit is probably not a good model for the data. \end{compactitem} The algorithm is guaranteed to return a valid model because the model space is finite and at least one of the starting models will be accepted (the model with no AR or MA parameters). The selected model is used to produce forecasts. ## Comparisons with exponential smoothing There is a widespread myth that ARIMA models are more general than exponential smoothing. This is not true. The two classes of models overlap. The linear exponential smoothing models are all special cases of ARIMA models---the equivalences are discussed in @HAA08. However, the non-linear exponential smoothing models have no equivalent ARIMA counterpart. On the other hand, there are many ARIMA models which have no exponential smoothing counterpart. Thus, the two model classes overlap and are complimentary; each has its strengths and weaknesses. The exponential smoothing state space models are all non-stationary. Models with seasonality or non-damped trend (or both) have two unit roots; all other models---that is, non-seasonal models with either no trend or damped trend---have one unit root. It is possible to define a stationary model with similar characteristics to exponential smoothing, but this is not normally done. The philosophy of exponential smoothing is that the world is non-stationary. So if a stationary model is required, ARIMA models are better. One advantage of the exponential smoothing models is that they can be non-linear. So time series that exhibit non-linear characteristics including heteroscedasticity may be better modelled using exponential smoothing state space models. For seasonal data, there are many more ARIMA models than the 30 possible models in the exponential smoothing class of Section \ref{sec:expsmooth}. It may be thought that the larger model class is advantageous. However, the results in @HKSG02 show that the exponential smoothing models performed better than the ARIMA models for the seasonal M3 competition data. (For the annual M3 data, the ARIMA models performed better.) In a discussion of these results, @Hyndman01 speculates that the larger model space of ARIMA models actually harms forecasting performance because it introduces additional uncertainty. The smaller exponential smoothing class is sufficiently rich to capture the dynamics of almost all real business and economic time series. # The forecast package {#sec:package} The algorithms and modelling frameworks for automatic univariate time series forecasting are implemented in the \pkg{forecast} package in \R. We illustrate the methods using the following four real time series shown in Figure \ref{fig:etsexamples}. \begin{compactitem} \item Figure \ref{fig:etsexamples}(a) shows 125 monthly US government bond yields (percent per annum) from January 1994 to May 2004. \item Figure \ref{fig:etsexamples}(b) displays 55 observations of annual US net electricity generation (billion kwh) for 1949 through 2003. \item Figure \ref{fig:etsexamples}(c) presents 113 quarterly observations of passenger motor vehicle production in the U.K. (thousands of cars) for the first quarter of 1977 through the first quarter of 2005. \item Figure \ref{fig:etsexamples}(d) shows 240 monthly observations of the number of short term overseas visitors to Australia from May 1985 to April 2005. \end{compactitem} ```{r etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."} par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` ```{r etsnames, echo=FALSE} etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ``` ## Implementation of the automatic exponential smoothing algorithm The innovations state space modelling framework described in Section \ref{sec:expsmooth} is implemented via the \code{ets()} function in the \pkg{forecast} package. (The default settings of \code{ets()} do not allow models with multiplicative trend, but they can be included using \code{allow.multiplicative.trend=TRUE}.) The models chosen via the algorithm for the four data sets were: \begin{compactitem} \item `r etsnames[1]` for monthly US 10-year bonds yield\\ ($\alpha=`r format(coef(mod1)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod1)['beta'], digits=4, nsmall=4)`$, $\phi=`r format(coef(mod1)['phi'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod1)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod1)['b'], digits=4, nsmall=4)`$); \item `r etsnames[2]` for annual US net electricity generation\\ ($\alpha=`r format(coef(mod2)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod2)['beta'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod2)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod2)['b'], digits=4, nsmall=4)`$); \item `r etsnames[3]` for quarterly UK motor vehicle production\\ ($\alpha=`r format(coef(mod3)['alpha'], digits=4, nsmall=4)`$, $\gamma=`r format(coef(mod3)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod3)['l'], digits=4, nsmall=4)`$, $s_{-3}=`r format(-sum(coef(mod3)[c('s0','s1','s2')]), digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod3)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod3)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod3)['s0'], digits=4, nsmall=4)`$); \item `r etsnames[4]` for monthly Australian overseas visitors\\ ($\alpha=`r format(coef(mod4)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod4)['beta'], digits=2, nsmall=4)`$, $\gamma=`r format(coef(mod4)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod4)['l'], digits=4, nsmall=4)`$, $b_0 = `r format(coef(mod4)['b'], digits=4, nsmall=4)`$, $s_{-11}=`r format(12-sum(tail(coef(mod4),11)), digits=4, nsmall=4)`$, $s_{-10}=`r format(coef(mod4)['s10'], digits=4, nsmall=4)`$, $s_{-9}=`r format(coef(mod4)['s9'], digits=4, nsmall=4)`$, $s_{-8}=`r format(coef(mod4)['s8'], digits=4, nsmall=4)`$, $s_{-7}=`r format(coef(mod4)['s7'], digits=4, nsmall=4)`$, $s_{-6}=`r format(coef(mod4)['s6'], digits=4, nsmall=4)`$, $s_{-5}=`r format(coef(mod4)['s5'], digits=4, nsmall=4)`$, $s_{-4}=`r format(coef(mod4)['s4'], digits=4, nsmall=4)`$, $s_{-3}=`r format(coef(mod4)['s3'], digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod4)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod4)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod4)['s0'], digits=4, nsmall=4)`$). \end{compactitem} Although there is a lot of computation involved, it can be handled remarkably quickly on modern computers. Each of the forecasts shown in Figure \ref{fig:etsexamples} took no more than a few seconds on a standard PC. The US electricity generation series took the longest as there are no analytical prediction intervals available for the ETS(M,M\damped,N) model. Consequently, the prediction intervals for this series were computed using simulation of 5000 future sample paths. To apply the algorithm to the US net electricity generation time series \code{usnetelec}, we use the following command. ```{r ets-usnetelec, echo=TRUE} etsfit <- ets(usnetelec) ``` The object \code{etsfit} is of class ``\code{ets}'' and contains all of the necessary information about the fitted model including model parameters, the value of the state vector $\bm{x}_t$ for all $t$, residuals and so on. Printing the \code{etsfit} object shows the main items of interest. ```{r ets-usnetelec-print,echo=TRUE} etsfit ``` Some goodness-of-fit measures [defined in @HK06] are obtained using \code{accuracy()}. ```{r ets-usnetelec-accuracy,eval=TRUE,echo=TRUE} accuracy(etsfit) ``` There are also \code{coef()}, \code{plot()}, \code{summary()}, \code{residuals()}, \code{fitted()} and \code{simulate()} methods for objects of class ``\code{ets}''. The \code{plot()} function shows time plots of the original time series along with the extracted components (level, growth and seasonal). The \code{forecast()} function computes the required forecasts which are then plotted as in Figure \ref{fig:etsexamples}(b). ```{r ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE} fcast <- forecast(etsfit) plot(fcast) ``` Printing the \code{fcast} object gives a table showing the prediction intervals. ```{r ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE} fcast ``` The \code{ets()} function also provides the useful feature of applying a fitted model to a new data set. For example, we could withhold 10 observations from the \code{usnetelec} data set when fitting, then compute the one-step forecast errors for the out-of-sample data. ```{r ets-usnetelec-newdata,eval=FALSE,echo=TRUE} fit <- ets(usnetelec[1:45]) test <- ets(usnetelec[46:55], model = fit) accuracy(test) ``` We can also look at the measures of forecast accuracy where the forecasts are based on only the fitting data. ```{r ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE} accuracy(forecast(fit,10), usnetelec[46:55]) ``` ## The HoltWinters() function There is another implementation of exponential smoothing in \R\ via the \code{HoltWinters()} function [@Meyer:2002] in the \pkg{stats} package. It implements only the (N,N), (A,N), (A,A) and (A,M) methods. The initial states $\bm{x}_0$ are fixed using a heuristic algorithm. Because of the way the initial states are estimated, a full three years of seasonal data are required to implement the seasonal forecasts using \code{HoltWinters()}. (See @shortseasonal for the minimal sample size required.) The smoothing parameters are optimized by minimizing the average squared prediction errors, which is equivalent to minimizing \eqref{likelihood} in the case of additive errors. There is a \code{predict()} method for the resulting object which can produce point forecasts and prediction intervals. Although it is nowhere documented, it appears that the prediction intervals produced by \code{predict()} for an object of class \code{HoltWinters} are based on an equivalent ARIMA model in the case of the (N,N), (A,N) and (A,A) methods, assuming additive errors. These prediction intervals are equivalent to the prediction intervals that arise from the (A,N,N), (A,A,N) and (A,A,A) state space models. For the (A,M) method, the prediction interval provided by \code{predict()} appears to be based on @CY91 which is an approximation to the true prediction interval arising from the (A,A,M) model. Prediction intervals with multiplicative errors are not possible using the \code{HoltWinters()} function. ## Implementation of the automatic ARIMA algorithm ```{r arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."} mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` The algorithm of Section \ref{sec:arima} is applied to the same four time series. Unlike the exponential smoothing algorithm, the ARIMA class of models assumes homoscedasticity, which is not always appropriate. Consequently, transformations are sometimes necessary. For these four time series, we model the raw data for series (a)--(c), but the logged data for series (d). The prediction intervals are back-transformed with the point forecasts to preserve the probability coverage. To apply this algorithm to the US net electricity generation time series \code{usnetelec}, we use the following commands. ```{r arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"} arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ``` ```{r arimanames, echo=FALSE} # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ``` The function \code{auto.arima()} implements the algorithm of Section \ref{sec:arima} and returns an object of class \code{Arima}. The resulting forecasts are shown in Figure \ref{fig:arimaexamples}. The fitted models are as follows: \begin{compactitem} \item `r arimanames[1]` for monthly US 10-year bonds yield\\ ($\theta_1= `r format(coef(mod1)['ma1'], digits=4, nsmall=4)`$); \item `r arimanames[2]` for annual US net electricity generation\\ ($\phi_1= `r format(coef(mod2)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod2)['ar2'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod2)['ma1'], digits=4, nsmall=4)`$; $\theta_2= `r format(coef(mod2)['ma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod2)['drift'], digits=4, nsmall=4)`$); \item `r arimanames[3]` for quarterly UK motor vehicle production\\ ($\phi_1= `r format(coef(mod3)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod3)['ar2'], digits=4, nsmall=4)`$; $\Phi_1= `r format(coef(mod3)['sar1'], digits=4, nsmall=4)`$; $\Phi_2= `r format(coef(mod3)['sar2'], digits=4, nsmall=4)`$); \item `r arimanames[4]` for monthly Australian overseas visitors\\ ($\phi_1= `r format(coef(mod4)['ar1'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod4)['ma1'], digits=4, nsmall=4)`$; $\Theta_1= `r format(coef(mod4)['sma1'], digits=4, nsmall=4)`$; $\Theta_2= `r format(coef(mod4)['sma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod4)['drift'], digits=4, nsmall=4)`$). \end{compactitem} Note that the \R\ parameterization has $\theta(B) = (1 + \theta_1B + \dots + \theta_qB)$ and $\phi(B) = (1 - \phi_1B + \dots - \phi_qB)$, and similarly for the seasonal terms. A summary of the forecasts is available, part of which is shown below. ``` Forecast method: ARIMA(2,1,2) with drift Series: usnetelec Coefficients: ar1 ar2 ma1 ma2 drift -1.3032 -0.4332 1.5284 0.8340 66.1585 s.e. 0.2122 0.2084 0.1417 0.1185 7.5595 sigma^2 estimated as 2262: log likelihood=-283.34 AIC=578.67 AICc=580.46 BIC=590.61 Error measures: ME RMSE MAE MPE MAPE MASE ACF1 Training set 0.046402 44.894 32.333 -0.61771 2.1012 0.45813 0.022492 Forecasts: Point Forecast Lo 80 Hi 80 Lo 95 Hi 95 2004 3968.957 3908.002 4029.912 3875.734 4062.180 2005 3970.350 3873.950 4066.751 3822.919 4117.782 2006 4097.171 3971.114 4223.228 3904.383 4289.959 2007 4112.332 3969.691 4254.973 3894.182 4330.482 2008 4218.671 4053.751 4383.591 3966.448 4470.894 2009 4254.559 4076.108 4433.010 3981.641 4527.476 2010 4342.760 4147.088 4538.431 4043.505 4642.014 2011 4393.306 4185.211 4601.401 4075.052 4711.560 2012 4470.261 4248.068 4692.455 4130.446 4810.077 2013 4529.113 4295.305 4762.920 4171.535 4886.690 ``` The training set error measures for the two models are very similar. Note that the information criteria are not comparable. The \pkg{forecast} package also contains the function \code{Arima()} which is largely a wrapper to the \code{arima()} function in the \pkg{stats} package. The \code{Arima()} function in the \pkg{forecast} package makes it easier to include a drift term when $d+D=1$. (Setting \code{include.mean=TRUE} in the \code{arima()} function from the \pkg{stats} package will only work when $d+D=0$.) It also provides the facility for fitting an existing ARIMA model to a new data set (as was demonstrated for the \code{ets()} function earlier). One-step forecasts for ARIMA models are now available via a \code{fitted()} function. We also provide a new function \code{arima.errors()} which returns the original time series after adjusting for regression variables. If there are no regression variables in the ARIMA model, then the errors will be identical to the original series. If there are regression variables in the ARIMA model, then the errors will be equal to the original series minus the effect of the regression variables, but leaving in the serial correlation that is modelled with the AR and MA terms. In contrast, \code{residuals()} provides true residuals, removing the AR and MA terms as well. The generic functions \code{summary()}, \code{print()}, \code{fitted()} and \code{forecast()} apply to models obtained from either the \code{Arima()} or \code{arima()} functions. ## The forecast() function The \code{forecast()} function is generic and has S3 methods for a wide range of time series models. It computes point forecasts and prediction intervals from the time series model. Methods exist for models fitted using \code{ets()}, \code{auto.arima()}, \code{Arima()}, \code{arima()}, \code{ar()}, \code{HoltWinters()} and \texttt{StructTS()}. There is also a method for a \code{ts} object. If a time series object is passed as the first argument to \code{forecast()}, the function will produce forecasts based on the exponential smoothing algorithm of Section \ref{sec:expsmooth}. In most cases, there is an existing \code{predict()} function which is intended to do much the same thing. Unfortunately, the resulting objects from the \code{predict()} function contain different information in each case and so it is not possible to build generic functions (such as \code{plot()} and \code{summary()}) for the results. So, instead, \code{forecast()} acts as a wrapper to \code{predict()}, and packages the information obtained in a common format (the \code{forecast} class). We also define a default \code{predict()} method which is used when no existing \code{predict()} function exists, and calls the relevant \code{forecast()} function. Thus, \code{predict()} methods parallel \code{forecast()} methods, but the latter provide consistent output that is more usable. \subsection[The forecast class]{The \code{forecast} class} The output from the \code{forecast()} function is an object of class ``\code{forecast}'' and includes at least the following information: \begin{compactitem} \item the original series; \item point forecasts; \item prediction intervals of specified coverage; \item the forecasting method used and information about the fitted model; \item residuals from the fitted model; \item one-step forecasts from the fitted model for the period of the observed data. \end{compactitem} There are \code{print()}, \code{plot()} and \code{summary()} methods for the ``\code{forecast}'' class. Figures \ref{fig:etsexamples} and \ref{fig:arimaexamples} were produced using the \code{plot()} method. The prediction intervals are, by default, computed for 80\% and 95\% coverage, although other values are possible if requested. Fan charts [@Wallis99] are possible using the combination \verb|plot(forecast(model.object, fan = TRUE))|. ## Other functions {#sec:other} We now briefly describe some of the other features of the \pkg{forecast} package. Each of the following functions produces an object of class ``\code{forecast}''. \code{croston()} : implements the method of @Croston72 for intermittent demand forecasting. In this method, the time series is decomposed into two separate sequences: the non-zero values and the time intervals between non-zero values. These are then independently forecast using simple exponential smoothing and the forecasts of the original series are obtained as ratios of the two sets of forecasts. No prediction intervals are provided because there is no underlying stochastic model [@SH05]. \code{theta()} : provides forecasts from the Theta method [@AN00]. @HB03 showed that these were equivalent to a special case of simple exponential smoothing with drift. \code{splinef()} : gives cubic-spline forecasts, based on fitting a cubic spline to the historical data and extrapolating it linearly. The details of this method, and the associated prediction intervals, are discussed in @HKPB05. \code{meanf()} : returns forecasts based on the historical mean. \code{rwf()} : gives ``naïve'' forecasts equal to the most recent observation assuming a random walk model. This function also allows forecasting using a random walk with drift. In addition, there are some new plotting functions for time series. \code{tsdisplay()} : provides a time plot along with an ACF and PACF. \code{seasonplot()} : produces a seasonal plot as described in @MWH3. \newpage # Bibliography forecast/vignettes/jsslogo.jpg0000644000176200001440000005221314150370574016263 0ustar liggesusersÿØÿàJFIFÿÛC    $.' ",#(7),01444'9=82<.342ÿÛC  2!!22222222222222222222222222222222222222222222222222ÿˆ"ÿÄÿÄÿÚ ÄX»ù*,QY0‰)-eŽ+s+YJÐÌÆ•.u¨2ÊÊj!ªQ•k-Æõ‘êfg¡™Í!žW4Î^Ìåì¡ÜÊåV¯ŒT\Ê•ís­1²…¡GBJ Â¥p´ÊÇ%%ÎZKPM•-Ž$(»Ÿ£ÒJsb‚åJNXDY`Vä\¡ºŠ˜AÈ æKdµJaÈØ”‚,0MÈD‹ L"¤r‘L®Mlˆ¡dQ)¥J@”Œ”¤K$W)2hÎ×W?¥Ï¿j6C(@‰j€˜D‘*jR¥ ŠiS”fµ"LJÕ…V¦X›h Ù0aQr,‰6AX$ÈBÔVXŠ‹AÉÄ”¶6–MX×Ït¹»õ¯EáÒ1™$›ŠÈ*Ky´õêqqå¦!X4@*”e LQ ´!Êȹ4‰#HŒH¶Q ¹À HÇY½açWyyÞŒ½Ë¤³gË¢p³mécÄž5Õu˦{ã/”èàÛÛ=ËñkóõiG6j!"!_0¯®;Ç6|µ¾<œ;Ï¥8ýI,+V\røÚzÓ};qáÍ{G ¯ÆÈãI m4@Y)AÓ&Åfš_ž+³—=K³,Õ¶ÞD5žÔ¸ÚN”£fnjz¹yÑp»OJY“ÍÒQÁ›oDÔ»ùeQåµbÑÒöú>ÿ-RZ¢©eãWwƒ“Ðêq:½¢8œogã« 'ùÛÞó}©ÙãCy= úk²ô<Ý`LˆQcŠ•¡Ih•«‘Y7edòãQ‡ZÕ·6™¬¶Wi–½]JæGW:Ì×DZ[©»O›¥Ü»áîÃ<²åèñû=r°<¤¶cǼz““/W©_=;ò]:Ïo61¿!§RžãÑ&Ùe2Ò±³_ö^J°Æf·gW—ÕeWelY‡=¯GÔçtyÐ!$ ‰R Tˆ6fAÔÒ,Íç}N ¿9_cƒnž·Ìo|ðÆ^¦Lp²s››«¯‡g…ù7m™âhªXº™FråÛ“¿§ÑaéŒÃ:ðíãêdzŸiíÝU劫èÔ ’jËèÇ/äßÇÑÚçd’U(ÙÓ2ÝÎèÌÛEÙ®tsïÈׯ¿ÉG3ÖÃÊI=T¼¬«ÓnièNguñUw¥ÀQèeçí;¯‘&ºž{tùoo|»s ©ÒšôCy¢d+~Þ_kL°ç»Å –{a¬ïÁ6†c?~=L±ŒšŸ(ïçëC’Š·âÛž½ÜÜeyo§!nÃ-3N‰<î·!¶ÔTzY4³j&Æ ¶Á¼ÅÊj™4cd["1°+˜•I«Õ*]/0hyÑ­ãk¸ÂåÙ ­7C+4™Zè2 ¬ÊV•‰Ë§¬”M7@šÆh†mNJÂBÐÔ ŒÒ³}6e;&;ÏEÔM¦FVˆhC4Æ™(ÐÓ›Â#*26¡€‹T$ ŠkQ›óëË‹NP¬  €F$Y¿›3¡…ÔMÄ–d$˜!ƒ@Ú¸ˆÜEm8 b€j!'c" ɱ¤‘ˆ§µFIœÝ·œxÔ¢¢¬JÆ"À‹*b  “ˆH‹‡(2DZÉD&EÄœ‚H‘“­Ù2Y  œR&¢I$ˆ€"˜‘!\µäƪ݇v³5EHTI•…E¥RhEò(†ÜµHæVÐ @ÜBB pĉ¨±‘U' ‰‘$@$D$Œ@Ä–B0ˆƒU[aU¢¬ )A07 1 š™šÞ ÈâÆ!X&€5@bHÓ½iºýòñ£ë°'kÉI4€ 10LlAË É”JÝŽ{«ßœmÝ—EÔªÚÓ?¤ó óÝÊèóyõ•ãÔ¬œ˜¦Èƒ%e³«Áº_QO§š¸¾ƒe‹<¦§R£Y,LL ``‰‚ ß^EÏ®¼ð5”2ái£¡4â×.ƒM¢I𯵋ëNhÁ«/L\€#0 ¶å­›4`#+ÀÀ@ ˆ€hˆ H€Ó¨9ô‹ ì`¥ar Jï h€Ð0°` € ?ÿÄ.! 1"02@A#3P$B4`CÿÚT«¢ºë¢•q¥J¸R®šU•*T©R¥J•*UÑ\+äR¥J•*X¬V+ŠÁ`°X,V+‚Åb±X,V+KJ•*T©b©RÅR¥J•*T©RÅb±X¬V+ŠÅRÅb±Xô}?Tf¨óÁ`± J•tW T©R¥\iR®•*UÀñ¥J•*U•*á]Ó÷J¸…AVÔ±Xí¨‡—$QDCðúÕ•*UÑ_&¸R®Šê­©Rx¦<ïh¨*ã\k¦•*é¥^…*UÄuÒ¥\+…tW è¯Bcøšnr|Çÿ¨ýQûܤHky®™ÿ¸R®«ãÝ ‡F ~8½Ïög¡$¸ËªsZßá—´)5QF¨u?V¥Ö½Á™Á“5Ú™ÕD±•.­‘#®yW,§”ò9 è”\mÚNâ”rÈ"Žéæþ ©5ļs\+ž0)gµØ¥™iyÄ;ʤÄS‘ÉÆ(Š*ܬÛÐwÉ{i¿Uðµ}÷ù!Ô6&dDLIÚÆ£©qZ}Vk ² 1Z™?EÌ—Ä9QBIœ«RŒ³Æ¢97о¯FVºLÞwWA„bdFUÏ\÷ ezCÈ­;¿x‡·ü—ûcüsóƒÃŸJG’|9èú—ÞO“Hí áiòµ‚}cž¢ÓÉ+† Tñˆ_Á‡`i&š’Z,ì£kK0gÀ´ùº‡sF¨Ç“‹ÀE“ž[¥}rØÀwQDé_•°¨ãËS 5ÐÙ‡¼¯*„,'c¹îaèy¦IÝ¿¯JïÉÃdýDlRë‰!²Î`б‚€áKVÈT£öåµ 1OZCqôÐT©PT*á\_¨cúœÍÚdø7c(s aQi²,k4áò’ µT4‘òÓä¡–7ÉdÜíœåyj ÝïÜ»ÍÿÔÌëã ¸ž£ö烆¶Jv¹èË3Ó4ï”ÃÿÐÆF/£WÿaRƒÞÝôè~Òvq±¡ýwèNâÖHÐW,V)Ž1»Å'ê˜c˜³Ðæ‡*)ÑØ´NÏ‘ZbäNÚa–¡ÝŽæêMBF‹.cûMßYîôØ \—ø?ZÒrÛCÕf{Çg? DX!À8,‘’ƒž^Ytñ1 áqå1ÉÕ dÙø÷ú¼V×I ¢K\é-¼ûűnM;³Æ PjÔÏÖÏs zÂú{9o­‰A…è01³Ë›¾`+± Ü·Lœ§÷pî@¡'xÔg™D§ùä•üÉéa_q›OL;]ðÔûC¶Ü Ô'½ÎùÛ[…Vn“’’>T’øDÍŠrìtGÎSû]1}‘ܨžÚüíáªö4¦î¦v þ`E¤š]–6)‰ï4 ²š2w`QE@òÉlKúßúÑïÒ ¯icö Dv­jŽÁ1²•¨#b5óšrU³†Í^ïvN‘Ücn ñrˆàœÔRКoXp(lƒöc¶$„èÌ«•ÉRJæi£zÈ95Ø2ÉókƒÛ¿ÒsñÙLnG€_nT¢%úg›WeH}êY„סԬæä‹3rÊå•[Ìmÿ3ë)ÒRïÄ [Ðä60MÊ9ù88ù½ i Ó&s s“±œc/1svÏçdtŒ¨ºú"ný'€DŠGaꇠíÛ9rÔñ»‘åo¾5Ñ!Û×ÈäD7ôßîþÿÄ%! 01@A"2QPÿÚ?ô Ž˜#µÚ*T©R ‚*T©R ‚#¦ ‚4¨±B¥J•(P¡B… *T¡B¥ •*T©R¥J• ‚v cH ŽŽ4‚¦ÛÕuIsq¨_$—EËvø8<˜âPÍA‡yÁ¸n¼…ËE–™i‰tÄn†á¸n¦á"Ó1ÆXñþ B<±½'D2K"Äô®ÊÑH±þ˜¨ŸBüQ–ÞŒž±&5CˆrrrrrrrA X² •?$`Ûòq£)ù6ãÁY]%X±éz.Õ™f]—eٸ˗.\¹}q#±ö1ÿ €ôZÁ"Òö£°û‘èxòn#p¹sÊïÁ¹$ù$ ýhŒ;LÄ^ŽXÉõ¬˜ÏmzÑTD÷Ø‚4^ŠR=Ÿ“=ÅÞá ȹ<kŸ“#öXŒ1ƒ/×LFgý>÷íD‰rAòd-23ð|?¯T’?Iû2Î4ÅhÈ"—³¸Ç›zc«#L½Ì|u?gÿÄ% !01@APQ"2ÿÚ?Ó$’I$’I$’IbÄ–$’I$’I$’I$’ÅË,X±bÅ‹,X¹bÅ‹,I$“Œ’IbÄ–,8¹bÅ‹3 Ã0Ì3 —.\Ì.\¹rå‹,\±bÅ‹õ,'“®IÆHRܯédOœpí‹'…TÉS!ÆA’¦P¬ŽR¤Õ(¥`s‹ž7Nã·Q”báí.ÁzþGt3®8#Ê7¼§ÀÏÃÈG zmAWŸ'“ýj\ÔFàí*7‡q9êWjo¸½ûÿÿÄ2!1 A"02PQaq@‘B`Rbpr¡Á±ÿÚ?þÃÛ+Býf×Qz‡áB÷oЛFPúpÒ¿¡TB*%ìÄl€ôŸ§™\}å27.~Ä0ÁÑ0䕈}ªD"=Ú‡épD›Ò€+iÕj©[pÑq„?%³U%4q–èµÈf72¿‰ê[§#xŠÙiŒE— á•B¥ h£CÕa«§@„WÖwÈd70Á«¦ˆ\Kˆ-’¶JÃò5¯…uô¶q-U_ä{ÄŸøF7a @’ª±Eõ’Š„º†3Å‚-]Ô^=‘GvÑRBªù‚#3’›Ópœý•µâ+ ¼áYÄWŠ®~úöWaÙœ®ž#Mm ’¶‹"NÑ–.Þ"hBl ¦4GRf}O¬§q°µOr©8§P²>$|!ãØ÷žñ>”(ïÑÚk-·n±›”éϩ˘„\¬?Œ9bñ’êêëRélŒ±LŽÊ-"‰_ aEÆäàUZª»¯êë.óÖM™úN"©I\+E¢ÑÃ%U­SZ-‹Da¥$f×#"„ÛzÒ5¢¢âWWÉlHŠ&¤Ÿë1Œê¶vdHúPúcå•q †õR¹ªh¨ŒAp‰šp¨¿ÎQL{# N©EƸ•Ó«J¡ gá_DH"$Ø*±~Z£Ñ[!˜lÎWA‘ŒáGü¥ì‡j«++++b¬V«UªººÕX«ù+­~•¢Z­Sa+e碸W áVè:ºþ'3®ë Ak¡(rÖýœÏ¤âó##ß¶›­‹EYá Áh¬ãô´úNoŸÔ¨½H{!6éËÊ;ꦃ¥Ðˆüª[—Õ4Žþ©°„JÀ~¾0>Ïåªõ"=9€GÙázs¹&côAÉíîhœ£²¤¶¡[%pžCeeiPûŠ'êc%oÔ- ªÜ2yá‰+N©ô@´=£­VH·UVT*ë‰q.%b]–ZÊÊ »eЇ Q6ˆõ¸GÒ%«Ñ7©ÇÝX/“ u†‡¼º­W 8‡Ê¦o”ýwÞ[+i¤û•o1ÐÐòSJºá Ë…p.fFø{ËåAãxÁ Y7†²t"(¯”y,BÉŠáTiYZG‹ÀB—,‡•àn¨ÔJƒ/ýX"Uº¬»2|˜‚¤?k„.­+{~ÛƒáñAº„[𦙮˜ÂS8“ ˜y ¥uE†;VÒ>WÃå;ŒG¢hNÉ’.KU}/?N?Àà= ‡ýŒ§ß:í˜"‰í>àçŠ}³×!÷¬».˶X|Ïfë‰×Îrsb†Æ]ÕNZ¦º¨dðD&$O¹(2¬ËÚM¤ÞCÊÅŠ&T0£˜Ìdì»Jór˜pûçO&UX?†Z" ­f!Ѧ™È˜ˆiC‘ûËYUlµ>ù—l‚r¢Bx²SYÅ»¦XõÕvÌç·"2PW#e#á Ûhµy‰R ñIÅy ž+xM¹ôÏdzÙwUËeÔ§:§ÕT.©áúäT]÷PµáMüµ›nÜ-¥C(½=täÔ¾FÎÚ(»Ìï(«,HÕ__;î*˜[ØU<(ByK~†üûÿÄ*!1A Qaq0@‘¡±áðPÁÑñÿÚ?!šŽØB ^GàN‘¢ †Š„fé¢è«àðÜBDÕ4h„Ò-[鄸ž‹+±èzž§¡è=‡¡èzž‡¡èzOSÐõ=tzÀõ=:Þ‡é¯}‹/¥ýÃ[²û¢§¯ÈÚxcJ¬’F‚B{IEÐA@’""‰tLiˆ‚HDBh]Ð$L`„Á‚.É.¢h"¡°Ò®Â7-´3Â5ãªHJCa:ºCe&HBiKª‡¯™ÑëNM˜½˜"jBDÒi‰NÚ.1¢Ûh%¨¡]‰çDÕ hˆ‚ÐÙ¹BBXBtÈ! ‘¢]Mi4G¡¢]÷cd]$ J{íÑtz.ç%z.aK­0`¨eÑo¬Ñÿ'-^Â&ú£}a¾e¶œ1\söhÙY5Bï£Á‰¹µ”Z”½9èº-fˆ/ £f[]S¯‚êÒŦšRÎL¶6üZ¹ ã«“ü«2©Êýˆn#ŽÈN‡ÕõÐÄ-qGNúÃc>Åom6û››þl&â²c–^0©‹¹ jý¤¦çÙˆp ¦çôNt¼ qûêÞ‚jùÏt\ö!G£Ò™™›"mØm„Š>º™±G¥ê¢ÒÁitl§%.„ÁöÙé˜#°MåôX0ÿ™Ù>Áy>„"2•åöD²K¼"BdÑ4QRòÏ ¡bîµ+琢.¨ålòš4pg±2Û†r¿c«b&­ôQ¹ɳ*'‡üöQÊÁÏýÌr÷nù&š]6‹¯r,Ðgñy iPR·wdd9<˜”X'“Ö¡4UOYúBI2.IºOrÛ‘9\AÚE“GÖÒçØŒ8ÞtǸmÞJ¹p?¯R³Ù ô ?Ðp¿FpÉÕƒ›5±¹¿v†jh—-ØfÍyØË)®ß)p]Íýþ¸½ª]‰ó73;°ÄàÏEð]øãbû+ƒ7 °ä³±¢Ñ—`ô‚ÔÙ©¬Ä» n9ºpß65r&cÛK‘MoãIÊ’ùÁ v¬àçë’Ä#§D¿o„¼H†“ÑOpÒßG÷ƒeÇH{Œ 6Œ>½îÆ;"""í¡öäKØ2¦hJÂßè_xz ¶R6D¸19±±$ø;ôWX7h7b¡{a c JÌiþ¤xÿSÅúŒöt0ÀÇ{žlÿ>Q}D˜ÎÈnàŸc,ÿá#¢`ãJNúcgÜúv6 ©†)n4^°¥ŠR”³¼–ÈoBÓæa™¾`©&¾{ ºÖÕÈ¢Çôv<†Ú8Ý–ý”W,çq¾“¾‚´Ž±OûƒØß"–iÓqé3+ žÁ-‡³?Mi˜»¤¶ ÈÙmþÅAo Žàýì£çLðŠ!‡î8)nt|¨gf_})Œ†OrIØTHŽæ[èŽpYîË7äßè{Wýþ‰ä3È[7ñ±ð·Ë&O}Ç^îÆ†Œ!©FÄ (¸ÑìIY/ì†î+ŸìLk²6ÌV õâ¿&ÑÏa¯'ñW©,"xÒ»FùŽÂ´ÐÏ- qê m°8Ä;¦ô8ó¥¾ú±Y¼Ÿøi©gᾆy~ˆícÿšmŸì‡{…;!èóGqÞåx)‹ûÿ@?òâOþå0[ƒÿ!øYó ʼn“ÊÒ SÏÑá; Л¹±Uà§—bײûxt‰úŒˆB ;¾6WÙ˜=dÛ¸ô'A’{‘kL™ÕÈ™ GÜö7à®ç¹î{ ̃Ê6r:{"÷Üpo=ºŸOcÆ<̔ɞåzYewéh]²û¦;‰S£y:èÙ”rZ³fáL¡·‹öǯ$!4OЧJÐß{Òî=¾ Ž”mRR¸¹¢°ø,2RÅxNïø³öbm¦íœ³–1Œ]7àMÜx`KˆÃ¢~MøŸ^Ê$h±wáÍÎÎGè$Z/Ç~ ¥Ñ^«ÑtRõn3(m¼œ½éz_Í~ ðRéz©tzÝ)zoNìQÚì Ëñ/–õ])F]n”¥ÑK¢—JR—áM‹Ò¢Ã·Rû”ù+¹Z…¾zO÷Ž—ª—SrôRéJRéKòSQ]â礥)K¥(žD2/žü¹møõÁM•%Ê„ˆÂÏ~‡à]ßMô÷#Ùþ>û *v4×CüEϹۃ’Hý´7ñí¬ñøTºæÚƒ€!({¥zo¯m–üéžÅݘì{4s/ÝÅ£‚Q£Ã†×lRs [h<ºWTbœ!(8 G7ŸÇâ¹dfÓpâ*åÓ^Ì^ðd¸¶QÊîÆèŸ‘FdcMgȾtzÁZHÇÏô¨ff i¤ö8/Éû;{phžñ®ŽVæe=™Ü&A:øÖ%…0¹ù†ëãXãÐÙA¹.ÂhíÚØù&+6Âá‘'q‰{¡ílƒdî~4VƒIÊ2Ü_Ø-ï¦ãbÖËcyÄMƒ3l˶H[Žz³ŽHç¤<›íû õî*Áû*‚Vè–% +èîX jgó„ã9H‹økFˆ’Y‘„3°V±oÒ>ûCis?|º7T¨Ð$]m<7¾Õö&ÆÁ\ò[ÑŽy ™ñÁèLãJO~CwG‡øp1M‘;Fζ⠪¯Óa¨vÞ•¾ñ\f!%¹M•úÕ&ª&Ú¢[Ò; ¸D]Än`1²Áªª M˜Ž §E­üKy̱4À©ÉI–ÂFíe ÄY®„ÚBðî=Ç|=ˆßhÕ]Êÿ®¥Æ”1í®îxÌ<öv+œEDŠÞCãžLšb·3G¿àoò¤Ãš3%”žÂ¾>SA@È==O#cwCScÝk6ÖñAä½1¯†ÁMÆu¨¦°‚Ñ)*óJìÒ; FåÆ2øš•2ÒÝî«¢UÁXYçT‡¸†äk«-‰oݦtE¾9íàeæ;N\°™ðy)wžá¼À÷üä—ìßE+²©i˜Ñ2-DÍ VnþM†®á ˆÆ[ûÃ¥€H&›<.åèE91—à.@Ûn¿’þ3éúSΈº>”5où/ÿÚ â¤üÌ1õiç9­ ²è)é/`]­<—üƒ#ý@À ±e b‚-}&˜ˆÀÜX‘ÇH¢íB¡7÷–ò ¿%R0|ÑÐv«\ש®ÚL±€+C–*禷‘œåÏëŒð6øF7äy3cŸ5ÞïG±@â³€–eÚ¯µZÆ”vJùšÔE¨ö½å/>XK¾ý&/(šÆ[Å­dZÑ«–õÀxQóûì^d³¶O¨²×ŒkÌ0ZrîãpN|ÊÍRiÔ%©SbŠwNe;»+xDï[‰aæGžo+áSí>ŸæÀó¦±õW”ŸpÏ…‡_Xåy•A&ÙY¦Æ§Öè(€µÜWÊTbYE7I7“$£1Ìuùéé']A„Ù]§†³Lð„ADrò_NªÑÒ.#$ÝÀAÕÒ}Å÷ßòYMç?ó/|û=ö´9ëŠYø´Ì²Ã <ó}Ô˜Ã0Ç I^û‡¾KÜxóLüÃ=É!ünË7Ð$;=)LS|׌uªÖ´³©ÎÛR =þÿa½e;Òí°.õ€sÍQ»Ižÿçüz™Ù>MwB Å!f!Æ{˱=ÇßúÍÓµL{×DÏüÃUÕ˜ £1ëÿßÜä’µkÅܯKÿ˜þÃEÕ89Í0Ã]½Ë¾M×€l{3‡Õÿï ¼bJŒ 8Ç]tÛý0×ís¸ü]{é~h§ÇM0üóãl²ó ,Š)‘ÔÑ/ßOöår-\øÓ,°×¬pÿ,ÒƒÏ ÊˆêÿóÛ>û"-Xà qÃýÿþóÉ÷áç|zÿÿÿÿýðà 7à 0ÃýøÿÿÿÄ ! 10AQqa@ÿÚ?Ê#dx„!²„! š„ÉÈÈFB2Šœ%¼†Ü²„åsð¸B‚X¡©ºbBH$‘‰$Xä„5“̸#†©Á `€GÁ+*..j!6@ØÖˆ©¯±Ú!;´D|!úx#áù¶$v6t°š(%6äGÌ ‚~ðüÑ uÞ$%)ØÐK­"¡ š;4X Húw¡2 £eBc2—¡·CRM+(Üf:£}ˆa¶ÂÆÄ~‰½ !¯Xê.„Cù?'äüூõ£çM¡ÿHK¨ ©¦Í>…ª Ã@·5CÛ4”B\Rs…bF%õ?¨±Ž½ ½ï4å(Ñqbßè3°óKÂQËsJ>,6QøV./ R—4¥Í%„Pˆˆ‚2H/óFû \Ú¿¢ÎâPöCðHO K×…±*%0ѱHŒ„ddäàHÚ*é‘(¤ HM ²m”6l6àØbÄ&8Ž˜½ý´b¯‚ká YÙÆxj o6R”l¾Œu‰ö‘PlÍ ½mÁ#ðH˜8:É—à¢cm A#Ça££¨}+ycBhcò$ˆ±_¤·Ðé½ Wgyº!#‡¼.ÅÑ4=1<1ÆRëËSlØCV϶lÕÜKm±kà˜†ŽŠ6?:Nˆè$œBÖ…ÙòLAØÝÿ ˜ØH¤ÙÔu¡F†‹œ:eC󶆲èv;s/ùE–#·úÿÄ !1 0QA@aqÿÚ?ÖiQ$’A’A$ç&™à’I$’ £֌¿IOÒHñ¤žO¥Ï#ɲZERË,³Q±yÛP˜¯BÀ³ïÉôQFv1²•Š·öÊÊËšRŠ.S+ŠØê Ô9 ;…%ZH66Íã4cÆÄ­u›6„ñR[fÒ—XÔ´ž4»‡ùº$ƒ7ýB·ÈLØ·ÁIZô¤íÒ.-þ‹èu6ÏÌà—¶8ó`˜ŸéKúWÒ¾—ô¿¥ýû-³ñQ¯HIº.¢Uìmš‹ô?¬¦6,kBKâÐáQÃymÒ”WW§x=ÒŠ&!³`fÚLhÜ76!\¿M/Bá´‘w¡;! „¾à"#éþÄDCŸK´:Ÿ°_lmë$6(êE®š1œ×:Ræ:HkYn,—”ÑÄŠ¸A$xLV?¨×ó’ÂÁxLBa¢Ù’^%98¿3³ëpSÊ«@ý¨®"›"-¸­¦f0«TBYˆ›q¦v" êdú³Ùö‰E'5jZº¦c{­'p‰CAzî8Qj³æg*4oÄku÷bay•ˆÿ\ÊE:ˆb"¦÷Ä®2Mo2ª­ Ö%b* z³¹ƒ *Pˆè÷-Ï`,¸š Ì9õÕjTåšÔQÆaH·ž%åêdçPQßæ,"εøŽñ.ÈbÅܼ~þý%RÜQb ]æW#Ü« É,r¶\/Ķ¢–ÙÖR-Çómþ`œ )¯Ä 5‰“©Tß}OwÚÈ}%týq  aäh+»ÿä!ýýÌ]*ÏíÞ%ýÜrΈ‚ÕzÄlòJ-ç¼Ämq¹Œs•=Àçø”kó-æü’×wÔêRœ¬¥Â•ûû¨ü PœJÖå2:Ô Þæ =}áKüâR±my˜&2µ¼x•ÆcÇŸQ«8šÕb- ˜V€åÁ†ÈÝåÎ<â´@ƒüÜ©1ÈL·–ZðâœÜ«ÛlOQÅ¥æVšÌÁ…·ˆtçÄãÜefË~³Xwr’Ù¼Ü `ñ ‹Î=C¸ tžaqoÜpJwl"ƒ“1YDE×RŠÅžR™H‚Ž&D^+2Ú­AZ`,ˆÉ›‡þ¢‹êYt¤sW2Óä—¥™ˆ¢.¾Ñ¸Rg¸Ia¨70æVo Gû¦nî`¥Ñ¦»`2gPÊ.ÚæS(0Vkw y”kÀi`ÌG)H7 pbSé=7(9¸Ë1ÃIÌÍêóÔÃâWdq£™fý° â©FVys1H’ÎàÃ&»©X£<Rª‡ïíÌÄW®¦Hs4úJ!MÔhñ _XëÞ˜ˆT.ñf®r…Ž¡b*§DNömŠP+µæ,µ»—0¦X‹j¶ÄU`cÌÍÞþ)“ܧ’]B¬¸ ú‚úbï Sƒ‹¸ålÂ=ó17²€YYŠËñ)•‰±YüAÈÜlÝbà PLY^­~Ðsf –?xÕ`fò2š­ñãR›eórÔÜD5*¨0ÑÊNÍ„¾ ¸×uîS·jΊ="à>²Àó[–.ÝgQ©º€9Ú<×b€¨@n +²*iÌDœÕŒ¢òF—TJ‡†c'/³ – BW#Ì9Y8"ÞEGx /Ç–#†%´fSy´¥tÄy.Ì&nëâÚÕÀV壵Â!‰ml¾ItÕÅ÷ŠÀ”Vê A¥ó º¼ÿ3F2‚åó,+9ê uÂÔ¾æáØþ%Û$)%#Š¸Ð·XzЗ“qh~ÑÅr³’¥{†Êa´x/ìUÒ­+Ô¨h¤¯«0„ó˜W#Á S=’TW‚sÞÖØ:¿‚‡¤R4††ŒmþA°ýýÿä7EÌŒÑÌö §0²à¼â44Å™zõ„¥à•Fó-U[ƒÓ °öLœ"µRÌ¢>ù‹gTÔÅ?‰yÏæsCpNîoeAE#éŽn\һ̪å‘-ÝE]å–Ž9‚×2ñ¿YÔ¦é¸1KOqÎF^"Zbø‡/—DÁ(’ëÂtF¥Ù¼œãëR´Ãƒ&Þ?ÂXqØ,ÄLK<¶ÄGš¨:c÷÷¸0bWÌ+$É€¸Ùù•±— µi¬6 ^¥{±³#X‹—cI ™p*^íR¹ºPÅÁºõûõÌ«’[ …f8–$! æ&ÖÑÁoï‰9xžâÛ¸Cwô3Å–®å§Ž€.ð ·Êz?Ž*€ïõ™OîR2¡®°„†¶ßS ¹ÜHi×qVz"_#®â£™«æS¢=E°HÇ­k(F›‡Å VŠã$64U‚‰€ÞeYË^#ÊÁÄh~#Fjß,ºj`ÞfsÃ-†¥Öc— hyi]E•‚è-ÅÉ _D{# €~õEÈïQ‡H{-©Eã&8jRnGQ(G˜Jç7 oC›ËV%óHÜ×Ù̾ø-N…Ò¸HÂĵN&±üÊ•Œ^aâ‘F ÔkT<1umÓýÇ>[ÜS9ðöFÕc0¾Ðü@9e.ú”¯Hl¢h™ôž RÑânRZ*Zcá/æÎ‚|ñ Y¯8‡ Ü (;‡k– Ù­¬²°¬Ùψ·¨=}%`k^=ƶ‹OM ©¸ësÙÜt^:=ôóx¸è–"øÞí©PóT;pl5Gpœ–™yýÿä®7D»¬lŽûÀ6Ë\Y_¾ábÀy˜ ú–/‘=-_óÈšõÔÏ;æ>:ûG}åËÂávÖ`Ñ‘ÚÍ#¢µR ¨mU¯¼úáO¶b¼eŠâÑÔâþ±pä®e¦XªüANöeb¼Ì…˜ˆ0Ô²Cê¿ÙxPÏ9ýê<—Nó™5q Ówó12f²ñßç¨-V×ï÷9p6jãOE…£˜ï’³Ä0• Jû°Yý¿Ø•‚û Rñ—UQàšÌ=1ê¯`BˆvÔbØP%0ŒºÍ~ÔÛ¾uÿ°Y»ñý™@~råYE¬.÷0 Ë”Dõ-FêÆ¦Íã>ex2ãù„›4b±›ýûE6ÆYN UD´Vå«Ì¥ÞÎâ—OÚ/§ÔYµ%[0úÃþfóÈï3BÜÍæÔÊ]sÞ*·>ÿòȯIhg Ù¼ ÓQW5ýg„Í¥Î>j,x†)®Kí.8EiúG>~^àà¹Ûc³£Ð@«9+û–5·Y„l.î‡(4s,ýX%¿åqÖý(ÇþBÀ:ê.}–)†÷Æš}Wî;ýe5:dÆþð¯L¼w*ór³.*f#)·,]‡ß YòBZ8©K²³Ó©Uujó{”KÒÃP+×Ö aJâçWR£p]ï^ê 40P”i<€† ­6ä'xˆ-@[jÌ€ü1_¢£:¼Xåâ76†;‰eÚ k:…Doļ®=Å],%‹_x ¶«Ì­Åª)ɬ·©c—9€ šî¹«–8n äÀEÉo¨b¯ëœ 2áº@¢´ÓCŒÃÆ7 ×"ï:å%«1oá«/x…ÄBGvíp"Áº€ “mÀIOw¢=ÕÆÄÈ,9¥ûÎ=KZ…7¤jZñV”Ü,/7RËðÙÔÞdæ @ÎsÕJh§Þaò*õÛwÌ R])åûQ¢•ÿØ y+pˆÐna[{™ E¿÷÷÷+ ‡/¨ô;÷*ÈÝœe·¸w`ôh—kÿ_¿›4 h~÷ˆ‹·#[ˆš=ë™n¸JâóÖcÍpy<ÇdºkõŸÖAÒÌû…Tô:†ï3$lQÊ²Ä ·-pĄ̃€¾Aô0E4S Gx‚ f,ZòhœAGó~füJ LŠJ„M#dÀ]ÔF‰°b}‡}““ª­÷ûû™t£[c!J¨Š~RÎ*¥ùÃ)Ü(Ë(vúÊT1ƒ‰h¶ÃJw)¼fâ*n ¶ ›—3ny‡e7ÙƒÉy ›…9ú3UÓÔ5Êû†"¦Zú° f8ƒdz¶-Áî¦:ÜÁÚò}%1}ŒÖ¥×y–½Åwˆç—¿ýCˆ¾‘à‚®hê¡K”°AHâÝeYÜ¢64Œ(fî*1õàRãˆöœÆìrqà‰÷øw©â5r¦]ø–¯ÍÃH0G± pßé3òú ð#ûûˆ—”m€Z¥J¸3™ÌJs3ñp-ˆÃRBÆQÌ£ðpš˜¿¤Dÿä¥w9.\QX‰+2‡>#´51M™™72wÜÁT«ncˆMÜϼûœõs$tÏ:%J8˜V«30àÔYY®ÜþS³ùƒì††±Ã V-|:‹©bÑW0 JSLÚ‘x]j­Éù…“@˜sœ‰±Ä3_ÔjÜ·.+Ì©ô•ž¾>ÒêœÁjf¯â¥J•+1ÂJÄ¢¢ .å`Ç Ä ÈiÔ\nSˆUjÔXjÈæ"¦±\Jæ;—07p²ÃÍCb+›4‘ ÒÝb]• Ç/2êË—Žû‰™lâúKó/Ì3Ìu/1Æþþã§×0¤B‚ê<l…TÛ‹˜í…^ߤ²åÅÅ0ýB®5­ÔúDÇqÞ¥oP7™¹WaYš{. G¢sC?O¸î/RñPXÔ¸¹ø¹›…4‹>ó–æÐm‰róšŠ2Kb°Y³Ô¿‘ó/ÆÒüÌAîľó]ËVø—Fj¥bõÌÁ˜½ê+y…^%ÖÙx¶¢ñ/ÌÏËä¼c™x—æ_íÇS§nzüxŒáD5/í/Ôr}%ë1\|ܹĻ—™u-ܸKšEê˜6L^àßqJæå—6¨:Æ¥Ü\LKâ[.µ-_ ÇÂðœK¼\° œ2ù¹HؽA/ X«Äº*åÅnZÌ4Å<Å—Ë—æxKÏÅêcpÀ¡Úܾî…ËYí¸¨ÜD æ8©u-© ľåÅ|\?âø—ñpj[,c/9̼ÁÌ´ø^w,ü‘£Ç5i,¸Rµ|+,JVfÖJ}=KSáƒq/1møÌ¼Áù  Ì«ÚÅ„°ŒÊÑ| PQÇqtä‹yBqǤ³’q‰ í‘UÒÙ—c:ñ÷—ã1Õ•ÌÔuù—Sæ"ËÄfÑҠ˶\¼K›cãjª–ê-® Båã—<§„\JDNüDXÛã|Ü*ó2åÔ»eË‹rø„ôüM—ÜT ˆ òu«šÇqk2ݘ…7<`+$2n0BÑk\ÁVæâ}^â pÃRøÊyšøÌ«’_qDž¡4ÊL$Yx‚Ëó.=¹sˆÒEBp̦¡‰vËî\¼Ì|³ˆç3#ÿ‰õ—çó-Ÿim¶Ú”NÌJÍŽbvL€™¼‰â/ŒBª#xLLj8Ž`å‹ORÞ¥¨nìðAµÏÉóråÍó/¹âuñpK¦Y.[.)ú|¨[6„V‘ú‘^a½Ì8Z€4ÁĹyøãáÅE#æ¨Î`Â\¿Ûˆàbaâ©[}À"ÁNÖ[Bîmâ fxw73 Ó2í‰ÍÀ³;†T@Îh9ŽÈãDL«©q<Îf%¾ç2ñQÔ%Ã;•‰Ž'ÌX¿>çê *«´«˜fnå"ç‰BUš%°ŽÍ$TuÉPlbf$¨Ç˹X'1ßǤ5:÷+þ)—ˆªAæâç_õÄ×.[´%ALm‰•&YÔ®-i}ó4Ñîe ¶Kæ lcÅ/ñ)Î`6õñ­Gàœ°eóÇ_ð¿8¿øµj/J=}(ŠÖ »åÏ1ЇÞTð‡pco °àJÌóè0§ûlÃö8Kêf£Q'˜Ë·à†>n_Ââ–Ý3ÔFtâuÜlÀÀƒEæ s2Æ%Àq׿^!h~òÖ휵Zñ*&îÊ~›j1u7ŸÞ`jlq5ϸtM&‡û)pÂõ K©¸“™Íüó9ø¹MbjT¢ h.-W7ê[D8-O BÍÕ^õþÆŠµOæ ‰Ì©LC@JÌhc®â´Ei÷â mœßïˆS„/Þ ëTœEšÞæQW©G ÷)É>“ˆ} HW~£™'e‰hÂ!ãp©Gˆ[`—Á.šZ‡°¶·ÁK?òPÒ‚&_D·((y>³RÅ K÷!(?¤S QD·‚Q1îd×5ûúEjç4‡?¿Ü7¼Ñµ¾á  §ØŠ˜°÷†‡ÅQÆ×Ï3@V‰sì¹|9àHY°ÿQà(^óîÂ0¶LÕž/`'›Áb® œ}ê=º6«†.,löËöQ+Ô6¢G%Kð¾)k½ÀŽ:ŠåeL½Äã1Cš&î "ˆ·6¨lAÅ–×ïþÊVíôýýõ–úŠ-j#ux›Ïóo2ã¾8‹—æ”»b`KAÈc•›í˜ÂSs.Úu <ˆ¬u°–@t¸üý”“œ«wĦ §,“ 3ÑûýÌ}øä‹†êufûýúJ€Å÷ÃLù— §d3Ñ4aõ0eâqâ;©Ôúüs/ϯâ¡C)…ÝÜy€ÕÕ2òt@O“EnT>@·Ì4ìQûŒµ±}@†Qúñ-8#^Hô7î$ÀÃ* cN¿éã@Ä7>¯ÚQ¨Žáuñвb¼Ù÷=Ç)ÄâD‘[wª«ÄÉ–ƒ‚eÛ¸ ]ù•¬:´ ˬY–¢¤ä§â !ÄÛã_ð„±2\àÔ,eˆÚ Ä X¼' p4yaÓw®an˜w'=AŠí3c §@+ù–}ÿXîWgsvšŠÛçqNæx­À‚Öð™œÕG2ùGiRéÄâ"Ü6Ç‘ð7¯ŒL©ŸŽ¼Nk¹êS1Ô¤LÑ,×–¥•]ÃÔ¡udRÁ¨î}âF& ôFì5äfxýµ7æ¼Íãiéq¼å¨ŠÚZ–¾±˜4† ]éWÿGœ  y›\f`e©€Ë[‰Æ²á{™ÕOÚ‰sÏ…@¶^Ö_ØÎã•K½LêÞŒÄ6—ƒÓüF€!³gÖ åÎ* -ºýýþeHÖ3¹µé㈊œÂ‹42…ŽqS|@T[ĨúFg‰¶s |õðns3)SYË]0¥ùæ– @”}¦* si‚¸¡Ô´žHü¥Ù± ½˜–°W–\Lã>â[WõZ—8šX¹¹é.2†åŠèJˆ¦[‹r²Ä£ë/&©K¬é×Ptõî#:M™p±qœK²f Ö:OËVõ¿äJb‘C£1²Xä¡Ë¾³ÇîeŠkkýüB%7Q‡í¨`«¼°s˜\…J ˜a¹\ÄœfSQ¿ûaƒãDÊ. êGé-Ó~âËéX3&Ö2Ìe!XÁUu\ÜK%#Î2¢ûyŠr‚YPixuP¯Ú,JÍ)³îñãÃlµÀÂhŒ"³y•ê˜c[VKòãðÀElÆ7æ5µÑ-È5”ƒ#w[„ÌÛNÝŪ±8‰t^Kã÷ú÷…­íÆŸÜq.JSÃ^ßÜËŠÄi±€·lËæa˜ËuqNÙw5 ÇS¨o3Z‹Ÿ‡,>åÊø%ù>ò’Š;‘# ….CÅêb7PEìl›ª¦W ¡N¾¤ ›«ó ËÇÖ1FS¾P/±xkÂ- °! î=ðÕ°UD¢J›î $[h8¿–sÏEjjâ(¯ÂÕVƒd2åÃ9™aÚVPf»ê!^ÀÔ0õ!V;—!u_˜Õm‡X‰¼êWÐÏ¥ðf»”;¨Yg`ê§@æ74­„¸—7>’á8æc™Wÿ[øÇˆ+ŒW(rˆçÆHô h86“df5·awq<Áõ»æ`Õü\¨Ç\ñir­œ ±kô‹`¡Î,{8‰!rªå!Ž€ªAVg*•—2Ý®&¾ Í2üž?«—hºWÖ&U™tî)\È)ìz˜HSmæ100Û¹HÝëóexÌC‘î(¡ò»ŽŠ1î¹x”UKCs)¼ÌUNf';œ_à Îc œÄÏPøê:˜¬|cþ=¥c¤ÕŸì:aK‹âV¦Ô=óÕ¾ÈÕ n‹12šè\įˆ8{Ϧþ/¼Ä ZÄW º~`RïÂ]KµÄ€ÆÓeœL‹ÂÀSÜÞ+ lÖÆ\Dɉü\Øp—¾å%ª\dE $´?˜ˆ»:…¶su,ʶ£Ë¹Ã÷%eyÌ5aYµ³uê·,\„èUIXœÜÙ9ùçqÛ`ÏÆfq ü^5_§0âéÿ>¡>Ÿ‰L¸ DÙnÀªPÕI0§ U dõšæX…$Ì¿˜ØwÍA´Ñ8œy€TµÄ&ößpøƒb¤ˆp$è«>×/DUš–¶À7ö—u«Ñ.³Lß _.Ï£ ·fFe*ÈÜ(0Z£È/04î#•#^\V*kaîØjë)LwyŠ0UF!sŒÄÎ"7I8œÎ31OÁÁˆ ó™\@ÆcÜðŽçÒsSüž~?u1&³ c®`¦k…è‰VõÞ +r ê `­õH´´^XŽ£ˆåý]Dht keg/Ö) k\ˆ!ÂOÞ!ÕSþ%À)ÌÀˆ+ÔÉ{ø3ò o0_ÛfªJr¾ZLŒ08½«jìÎb¸Ù~#,ۢм ƒ+¢ %f ×ÅßÃ-»&n:•ðø©¨n?™V`—D©ÇÁ¯‹‹›–1*™Å|}?oLU#¶`^«‹Di¼bÈW$ÄÄ”§Ü(5y—¥·ìs<Í¿ÔÏøæU·y†€ÃÈ–ÇÈS/)xƒƒ”>°Z@hs˜›a8” ~ƒQm¿?|3ú…“Z·NG‰xPæó “hÅèu0y`õZuAûSÚÌi‘¿*¶FºA!ea„ÔK<"0 Ꭷ,âf0ÝFSyœ8ñ*æxffßàa†¦‡?'ÆøÄw3úÊ8‚õH!{@RàðãÇñÚ;©Ø â*%«..‚™L±—<ósa‰È¼Ë‹]wuqçvßíÎ\7­ TÈ»¸%gr²{~M|^?á²›:aŒðbñO´&àÛ.5³˜ù1ôá¿éŠ"Êx€Vƒ¶RÿbQQÚXy¯Ÿ_ ïà†â\º‚Þ±.ãæ¦j‰Äê~/â±AÎ!m§¿Ô>3ÇïîàBq¨;6¢"oámq œ›wïQ/Ü©¬ãˆ— Ƥ‰6XnéÏ16Ð]Çqb]:kç_­M)ëá%LKm‘At8îUŠ™wzØG<Ó]8ŒU®„µTŠ>¥ aî%K¸N¡ŒÅÍê.5Sˆ)ê–qϬ»¿Ç˜Î#î{„¾#¯‹òÄ‹¨’¶õRß–7cZ¯„øZÎ»å‰ Æïr®³,Pê`¬gpdüFä6QÚÊ·w3l]¯üŸ˜ç7àÎMcp‚«|LA¤8ÂÙ{ƒ 7,1k£uÜ©ô—Eoä—q×ÍÀñ9ž#-Á¿Ž~á×à “ŸŒw>³$æ'3Ä¡ XnU ¬Ú33p[2®åŸªa•£·_ùŽqM@­A%Bâó1ÿ'ƽˎèüRZîgãoÅY=ÊÅÜæ‰ô˜¨EUþ1,œÁ*bãÝËóq¬›Éÿ;•âlø©yœÂ=™á}Ͷ™]gRÔ5æ¡ÐÇSdÕE*¢ôÎ&_S¹hÇüÊœ|;•ñ¸÷îþk‰ýÀÄu±sÃTC~å7Q—™ss¸N!ÄÍJe.%kþ8•Ìãá†âæSúOÿÙforecast/vignettes/orcidlink.sty0000644000176200001440000000433314323126533016614 0ustar liggesusers%% %% This is file `orcidlink.sty', %% generated with the docstrip utility. %% %% The original source files were: %% %% orcidlink.dtx (with options: `package') %% %% This is a generated file. %% %% Copyright (C) 2020 by Leo C. Stein %% -------------------------------------------------------------------------- %% This work may be distributed and/or modified under the %% conditions of the LaTeX Project Public License, either version 1.3 %% of this license or (at your option) any later version. %% The latest version of this license is in %% http://www.latex-project.org/lppl.txt %% and version 1.3 or later is part of all distributions of LaTeX %% version 2005/12/01 or later. %% \NeedsTeXFormat{LaTeX2e}[1994/06/01] \ProvidesPackage{orcidlink} [2021/06/11 v1.0.4 Linked ORCiD logo macro package] %% All I did was package up Milo's code on TeX.SE, %% see https://tex.stackexchange.com/a/445583/34063 \RequirePackage{hyperref} \RequirePackage{tikz} \ProcessOptions\relax \usetikzlibrary{svg.path} \definecolor{orcidlogocol}{HTML}{A6CE39} \tikzset{ orcidlogo/.pic={ \fill[orcidlogocol] svg{M256,128c0,70.7-57.3,128-128,128C57.3,256,0,198.7,0,128C0,57.3,57.3,0,128,0C198.7,0,256,57.3,256,128z}; \fill[white] svg{M86.3,186.2H70.9V79.1h15.4v48.4V186.2z} svg{M108.9,79.1h41.6c39.6,0,57,28.3,57,53.6c0,27.5-21.5,53.6-56.8,53.6h-41.8V79.1z M124.3,172.4h24.5c34.9,0,42.9-26.5,42.9-39.7c0-21.5-13.7-39.7-43.7-39.7h-23.7V172.4z} svg{M88.7,56.8c0,5.5-4.5,10.1-10.1,10.1c-5.6,0-10.1-4.6-10.1-10.1c0-5.6,4.5-10.1,10.1-10.1C84.2,46.7,88.7,51.3,88.7,56.8z}; } } %% Reciprocal of the height of the svg whose source is above. The %% original generates a 256pt high graphic; this macro holds 1/256. \newcommand{\@OrigHeightRecip}{0.00390625} %% We will compute the current X height to make the logo the right height \newlength{\@curXheight} \DeclareRobustCommand\orcidlink[1]{% \texorpdfstring{% \setlength{\@curXheight}{\fontcharht\font`X}% \href{https://orcid.org/#1}{\XeTeXLinkBox{\mbox{% \begin{tikzpicture}[yscale=-\@OrigHeightRecip*\@curXheight, xscale=\@OrigHeightRecip*\@curXheight,transform shape] \pic{orcidlogo}; \end{tikzpicture}% }}}}{}} \endinput %% %% End of file `orcidlink.sty'. forecast/data/0000755000176200001440000000000014150370574012777 5ustar liggesusersforecast/data/taylor.rda0000644000176200001440000002402414150370574015003 0ustar liggesusers‹]Û ˜l[Yàí 1M ŠFQŒ(*JУLçžsº»ºº»ªº«Çsºº«»ªç©z«ºæêsîqNTˆ3ŠâEEEƒD( ‰Ñ(EÅ$"¢˜wÝUæyÌyž}ªºjïµþáû¾ÿ_kïúÚ±/zæØ3“$y$yäYþŠ·O}ÄãøGާmLï.•×’ä)ÿ8IžýÓI2ò—I2ü7^ß—$cáø§I2ú"¯$IzÎwJ’Ô˜cÝgß’$S¯N’âk’d÷ó’äôÃIríœÆÏ$Iû÷’¤ûýŽ÷$Iï’äæ+“äÁ¿öúWþÞL’ÎÛc¬ÎÓ½þA’´>âý–óqó‰Îù,ŸUŒ÷MIrù IÎÞ$ÇæÜÿ¹$9øÍ$92Þñ÷®Ûù¾$)›þ?%ÉÌÇ&É$Ǧ–¼ÿë$™.{ÿrŸ’$?›$WI’{¾cÃߢpï]û[Æùdó|ÛGÍûl¶ü7¶<êµëuœ=¯g¿È=È{ÿï}ö9¾ãO‡=¯áà ¼~¾Ïùßûz¯¿˜$Í?N’Úe’\¸ö„_‡;ìwý®Ï+ωï~”ýßš$+ÇIRøU6‹Ç¤øLízÿ]ì›÷?Ëó{Â5¹=‡óóŸÊþaÿoç3ÌóãIR/ñá:Æ®ýoòÖ~?Ûþ¹ãK¼—ßÖ÷8î²O ¿ãȹN›/õ¹9ÛÞ·]_7îÛÎÿûMÌ?)Iöþ‚Ý¿Á—ÿíoyß7Ööw³ÿÓÙŸbóSÙ*^Sl˜–ßéWù[®&ä{b!IÆÙÿê0æÄW±_|Ëb·.þ_aN±iþ[Óøë8·×‰1¿ùlÇW‹-|uijýŽißZòÞùøß¯ý?ø3Õo—wæyùºo¼½ÿ þbyï‡r·ï+š$ b5ó';Ó/¾LÁVþ{Ù:Ãö_vÀ{†íÙ/ó¾Áþö§c./ß”Ã6ŒÝ8ï8<€‡|züÐñ/¼‡‹›×ñg›¯`oÏùb× x×ÖùûK½¿€>^²ûTŒŽŒ³Ç§cm±yË|[ïM’Í_J’µ£$Yúâ$¹½®Ÿ§‰eǛوò9cë(¬§Ù7$¯ƒ>úw>ãWîƒ|…©…Aqð÷Æ{#§öð~ÿö^³°µ'[÷}ÿÊ$Y¥¥w%É2 (¾3IÙ±‹®]ú/¾ƒù’X.ãÚ¬ |-ü3vÊë<ÿáe‘ämîËÅY^3â™–‹‘‡lûLïçiÍO±õ›_œ$wåó1óÝ—[0y¿äuXÌÇ\?áÚ¸º—Es—Ÿ}Z;H’uóo˜O+ô`ÙQ$‹U¶ýzß~±/~›a»hÞ"~ñ¶¾ÿ'Z¹H–7¶8¬„Ïö“döyb]có³™/iñÅ‘Qc¦ÅrXì‡_Ë—ÿÉz–‰1ŸÂ³EüÞù[y†Óš¹ê´¥;yìˆGW ºÿnî8¨v/èÌvà¨#þzÙr}ÛÑ¥=xíò«Y„ù¸`ï©üNÈ£˜íÀÚ–í‹Ivঠûš‘óI±¤ÛSô1ÿ„˜ÒÕqº£§99ËÈo¿æè[‰ן›ëúÏðn‘-ÆîÌFýìÀA×kvz#þVÚ8Ø¿–yZß耽¶xtñ¸kŒÎûéUgÎåõÔœGæÞÏÀ£ØïÍÀÑ·ã+|,wæ?³n¦^c;éu¸ãôhœ½9çdßêu êÑ=~•Åö@l®œÛ„—–¸vå¬÷cvÞ°û<”³‡™˜‡[ºðÓŃÎL¬aÝOó¹º÷à;#Çíß õ³vŠÇÿJ £SÇòz„»G8|$Ïd!ÙÕUX]P?fhïTàñÏÓ µd òÓjÊÄ·ñžfÅ"‹Û4ó>»ËßÁ~õä 6š4¥ß]sva¤kœ.]éÑöÞÀj÷õQw:_ín«»­ £Œéи.´ÕÄͨÖb]95îüîÃqE®àíÜïÓ¨ù]ùIö³gÆ÷“I±›ðý8îçhxϲ^ÇÔ ŒùÇqfwKô¯òÍæÁ»úÓømú׆í¶ý¿ÅbágÅW6ñkO^*úž}9¯üŠƒNìýK6°gËûuuu•Æ”?Ê6,³e™]%ׯÈûš>`õ?úþÿøŠâVPïÁâ=…•›–‚öòëþ§ÀËÓcLÇø0B ÒìNáÖ® ŠÕ¾Ü}ýÄ©[êÿ­V’܆ëAŸ¥Å/+S°y/hó™w<òjÕùkú¢uŸ­ÁªùJzŠ%=Ú"n.Ð…8/Èõ‚sø°ˆ_‹x¹ ‡ z’ó,ÀFÁ˜ ¾[`ß²sWÅ¢dìyõjJýÏÂÊ(ù(õÝNÓÌ´öuXžSr?ûÃð> ¿ÓÿFlèæ®yÎøp ‡mšÛ{#ÂÁø(†Oû Úø>¤yàì&Ôbµ»Ç§.Í쩯7bÑ£!ÝÇàŸ_ÕÀ%îœÂÏQè}̳û°v¨&Òµ Œ¬‡:(s82ú8¥g™ô}žMàÌ„8Ž¿)bk‚¾ÜW¿ÊWtïR üoã\Ïø7#±_~ÈÏÇéÌôø ã>ôù8¸yQcÏ‘q­³*üÞwþ>›+¯zPÒïÌ; Æ&B¿iŽ Ú2ŽãYk‘ {2ÁnsŽÒÃ1\ÎâÓŒX,Ëß.»Î`£ƒõÐßãD'Øñh-óʨ=5¢Ã–&]hн:lÖhOÕwUú[ÃÙkþÖàôÒ¹gøp,Vr¿ë³-µd“>m»aoÖWhð¢;‡Ûâ›Ã©,¼fþ(ö@#êWšÖ «))5`€=wåjà÷}¾ûÓ)xWëJjâº×m9ß…×=u»ÂŽ=š²‡K;«æ¥}ë4x… e=GIî–͵LSKì\…©5õj æVág…&–ôdKø½(§ zÉ‚ZZ¤ñEkÈÜÏŠe^Ýʨ#bŸ¦i±Ã!þ ˆï,ܵš~ŒÖ¿†6Ü’§Û^ÄvŸÇÔÖ çOó÷¾>pQ®J[}ý„­ ØØäçWÞÂ^ýA_Šæ_üBvYã-:·Èæ%6,Áî—ø¿j€5T_‹úÁâfÔç’qÕYÇ8µþ†¯4HXn‡Ùœºá‡ YG ~r¬]iØÊ‹ßÿ¶éâ ž^ÉMOMëÉ–´ð¦­GëàzÇÜÝ +âßÒï5Å» ‡Më↚Ѥe-õ¨Í÷–žª_WjÑ™ãHÿ¹ïóÝïˆ9ÜÅ…=}Ôž5ÓfÈøÞï)=È„žn¿'¾Ö+Lçà5‹#u6#w£ì•ó¬x̆ºi¼=6²»¦Ç«ÃM‹~´ÙÕæWκtµ Ë]Ò6v o[¸ÒR§›øÙ„ñ&=n«ÝçDn7Ôë«ÐªëÇø@3÷Å~Ïz·‚Çûb±ï³-v—õ;qœÆ‡¼\åq2ÿYq͘ƒ±l8ä'c°9ÆÿœÏÊDzØíÑÄsß×ÙØ§¶¹;+1î]œì~Jän÷]‘¿íçÆž¹¥n6ÕŦó[®ëÀw÷÷b®š0U¥‘—jè©ñOhÁÁÛãžCX¼+öq[°TÒÍ›o¶ó8ú#ã?u?«oÊÒŒ,½È°%#9q&O®+´ïœ¾Öá°q÷vºbÓåc׺rÐ ú/§XèÀd{,Ö¨–¾®åï–:ÖZzgxhˆk5쟈ç)¿Ž},‡>?çC¹ß££›¸·ìûùçÇ}‡ ¼™ )y˜ÛîgøŸÉŒú6ÆÎ ÍcVΖaxO=:“ók9hÐáüwÕ宨vÙÒûº¾þg\›´ñ¼¡¦6ä¦þܨ—5ý×5\\ãK Æ.q÷NáYx²GÙ†õMñÜ Ú)O4~… EÏÉU^ýËámN_“¥WcaüR\…Ó´Þlv†ÔÿAkñ¡£U´: óS´uÞu%k—u±ßá ìï¿(®“*ò]¡ »ëç&{×àxoÊòQ‹’¼•ƒ~âå:X—«U=Tù+cºǫ̂÷õ°óp´솻ýŸ£y“ð•UóGÞáö¦Ø3¤>¨ âzGø˜÷·ðé–ÝÑKè!‡Ù2FOòôægc-/êŸVŒ¿æüMclÉÙ6Žn³cvW䤗ËúüebcJžG¾.îË-~qÔ´Sy¨©½ ñjëÝ»ìéŠÕÍ‹´ëÁ›ôàæµ±W |èM}iÄTè1n^Ïí…u[jbq‰g°~,¶aÍ+އŽ#×L‰‡X•äu^6´GO–ÇÛ|茟»ŠXÊÒÇ,}Íàz–6æhÏ>—Ø^ѯ]ˆYÃø­ 9¾ï©#7jÛö?PK²ã¡koÔôzÙ£+]è®õ÷éÂyòõPüà`Oÿ؂ѺØVÅëÜu§j쉜ª Ç/c?ìl«M+|)àñ´¼äéX^ÝÉ«mzìq¸ÌÁQŽdñ:˦¬<‹ó\Øk‡Šï\ ¯å©ÖŽ¸Þ¯.»nàå†V>ðú~{¡Ð’uøÜ5‹ÎwØÕÃ÷u«ú9~]ÃÞ{ÎÕéý×1>ÊížÉåó·éWY~ 4q&`‚-“ô#ú!1Ê…ýÂߊûYýS†¾e`rœnÌéõK¡Ä ñ«‹Q“ï]ßõÂ~‚º"<ÉXyÀÎzäóôĹöHá¹C{ÛrÞùvŸ«¯7bÙËǽĚx^©Cçz õêˆh”Ñšc=ê]ÜæÏŠZQß|œ¤™“ú²<žL¨—ãŸ׸¡ß̳=¬‹Õ \Ø÷úë¾~Â×9]ÃóQ?;/ŒÛûÖ¸Ôæ±ì¯Ýžª>W]_3ϵÜT?÷ãOà÷=ãoóoCl×ô8kÆÛàϦsVq`‘VÌÁvÞç9ãeB¼õ£b?BÃÒ´y˜§Œ=$ø7ÈÖ4aÎÒò)غ¯.[­Ó¢müÙƒ=Û£Ù{lßsÝŽ¼lée×ù¾º÷íKz”eug‰Ëð·Bg×àmõ-Ѷ'×ø[q½»„[‹â¼(/KÖô‹bT S³r˜›Œz™~Cì‹Ó°šÂóAñ€Á»á€§Ûƺ…ó·ðìôÅÃø5&683ƒóoŽëâÕ ãôf“^o;w'·qpØk|[…²ú\¢¥gƽø°ÿúÓ×­È}Ùç%úº '%=â2 ,½=îO¬ÊIQo>G'`cTýL›#î!©aiš9 Ã)ãôqN=-£"îç°x Ÿuãâ×9œ«Gì<„•c=ÁñŒyq›eØ/|B¬_“ü›„ƒIyëkëqqÈ}Äá¼,û³p1ο¹Ð[ÁtåKbž¯a¾ñ‹±¿oÓÀN±O‹_]œì•û{ aŸBpÍŸššPåCÞjbx-—59¸zLß@'Nhß^`–·Äw—6ô[âµ «Î]dë¾äa=‡Ó9\Ì…õ°óÆ^…«úÒ´X§quȘƒ02„W#4 Ks¦å ðÙñ>캶íü]<¬X+ìooÃß®ÝÏM¹Ysý L•åfY^¶¶^úÖ¨‘¥pæÛbOúђׯؽ×óø¶ÀžÅ°w(?³ð>¡~}4öÄi6¤å{ØùC°2(VâqW}¼-áá-¹ºMïԘṸ§î¯NÿvÔ‚¢˜”]³*to6õˆ›tkõûÙª*Ҥŗ°Ž t­ ~-ÐÝŧ÷í£ë ò³ðûñûôKô«\uÐÇ" ÌÂÀ8Õï›?=Ü߇àÿ°˜¤Øš¢)½ù?Sr9òUk ¡Ñ?œè ªŽš¼5¿<ÞÇhá`›Ïqéú¬Ë¿®´\ßü¼x¿£ÁŽú~¼Üú™x/»ù‡±o»ÔkœÒ꣹¸÷¹ƒË»üßcëß÷qz;ôÜâq_­’»¼|å?¶¿þÅÁ\ÐN¹ÌÊ}ö2z÷Ì^Üåß²~nWÜNťꨋgs?®±Úëýû`¥¨ñÝ7öï·ëå[ê˲©N5ÙÝêDÝ ¾†½¯®`ïœöÃöÏöðzOÍ©ÐìCx:À§mºW†½y\˜QÛ&­Áóáù€·÷÷?Õ˜œüçä.Ëï¬ødhx7fù¼îI©÷gb}mÍÕàcÛ¼Ð/Ç=ÍÎÇ{1=õ§û™1'­ßŒ÷]šŠ{Í-:׆íÎF¼ø}ùÆeÛ‘W±®Ùƒƒ] ÜÞ\¼÷¾/ÆÛô{Åù zø™ á«<Lüv_÷?ï}åhIV ³jÏ8ÌÜãSIï²çÚs5¼þ1±…ýÛP‡žÔœgÅžù&ì1ÿaCÁÖwÇç<Ú…>nÞÔ¯_xßf_þ®äñÜgÇjÇ>܃ï]öTèÚ!<ÐÛmq[ÁÙycÏÈÓ$\åÅs‚–œ°++nY뛌5î˜X©ÙY}äÌTÜ k ÐßVÕåë7Düü·_ÑöáùQGÛøÞTgþ¾æÓ5=¯‰a•W}_õ¼\¹ö<÷³áù–Àýuz³*þkr±)Ž›úÛñ[ e³÷ú³¡ƒ±16„ÞŒß)=ß¹‡àaPŒàd€]©à‹Z‡ã9¹Xä_Ž×»É¾-6l…=O}æ&›6ºÝçe_vM¶ù»À–¶-ÀR‘6.ë—a¢¨†ñxÁõóܧa÷Ô¯‚ú±hÜ‚\ÜsÞ”|äpyÄz+eŒNÉó<Þ¥•wpé¶¿ƒû[ôû5ðýjuã¬ÝÑÁÄýÝLàØÎâÖ}˜[4Öò­¸»¢V­†û^ð_bK1Ü ªÅº1¯ŽÎÓÄyXY a A+­‘à¦|“ïþ/ègôËt¡Lc‹xußSüÍ„ûëï`‹yõ“ƒüø ö‹×]x¼ ówiö]k…ú:°¥ÏœeSÙZpÇ\ÇÎ?‡ª|­Ï«‡ý°Wîhê­šá%Î_ãE-ìû¨Uñ¬ŠA•}5õðZÏt­îUñëœ^Ÿè]W±®Øç=:³öüèù–¾c 04 k9¼£GcïˆÏBŒÒ˜\LÃÛˆ˜¦“Ö+¤ácD¼óa?†¾®¾;jÃ)<^ê#j8{ýühǵóêpWÛ:®õÄ5ë÷ª¿«8tõ]±Ç¹boõ÷ûþàÕ[Ž/Þç ÷|wؽ ›;Ö¯•°§;Ûz¨Uv/¨¡3®§±ÜƒÁ1Z> gi¹LÃEšÿZ|"öWÔ½‰ŸbØ‹áÇ.­:Á³ ßWùs=Ú?êt·zëªöxo®Š/U¹¼bSN«øP£{5}ì%|ZG}iì¯vä{›¶o}8ö&{r¸ãœ5}ô¢xÏÊïÄóú{Í®sÞh¸°Ÿ› k™4þ¥?ßñHÌKèµïÃÏ ïê™NØpáš½®;§N+ô£ñî~½ ºî“Ñ‹ÕhFÍ9µP·nÔ’ºšpmmr%gp¤‡Ý‡¡Ýöë'wÔÒ=1ªw›Î¬âà‚XÌ>[ü[l}P7jäˆø¤Õ‡áp/€®ëgRzþ”Z”6θsçFbßö àóî¯hJU>j¡§„ƒkù¾¦!׿«âñ~\ê'.ÔåsöŸáÌ8S›ÏþÂgÓï;åm^l;w]Í[}µXѵ°÷ÉÞUýÅÒ¬†g‚¦ãì(¬ª#4rÞ‡ôÝCOœ€­»^oÓâÛl œpMN ¦Å ÓËz€5|ÞXˆ÷©6ñiS®7éÝÆsâÚ¼ ÓËÇû,Å1Z÷±ô™K7¾ ß¿7ÚV„‡ü,°éž^õžXÌÿŠsùX€û{ü™’»\Ø7üéþ=#\dÏ]¾Ü¦O…}O½Æ-zÜ¿¾^«¯•ÇÞý®ÄûªyýÑ,¼Í«±‹0¶¤-…E}Zyy¬Kð°`¬‚˜ÎãÒ}uìžšw®çéý¼Ì¿+öÉ÷­[ï[gÝÓ{jèý·Åg²J°¶D·æŸïUgéc»z5Hß=î-ÜuÞ]¼¸Kcï~’nîŠÃ ^Œáî4YƒMµâàÍqôʘÕwÆû‘×jr]Îê__¯q£&?Uù½Ç+~_¼Ç¿«0S¥âu‚S‡t»Â¶]óoÁðL쫱Xßúƒø V¦þ6Þ•‡Q=í¨¼¦_Ÿ¹Jáiª÷R0˜âß0þåÄ6ì‹é–˜Ê͸„…+ñ»úÛx¢ú¾þó%çqu5×›—tüRMºÀ‡ ¸¿4×û.é喝Áå¾ÜíÀÔ¦Z·Î¶uš·cMµcî-z]>‰{oS´ ÷²¸= ·#ðÖ禬›RæH…µÌßEl¥ôÃÓñÞØ,][6Ç­=ïSºsñc}ÞëPÍ|5Ÿ]ÓÜjø\ ¸ø@ì//èòÅÇýÇK8¹ 5A~öŽX.â>Ø=ÞÒ7o³c·×›a #¶cL…½fõlL/0 £êWšÿÃ?Ì^qVçS´,Ŧaö¤Ã½=ø™ =‚š½E{äý„6\ˆóåwD[ª|¯Á]m/êz•Þ^Á襜]Â᥵ïe°½Ÿªâå•xŸ‡žðwãsc•tÜç ÏNnóµ¢.ìŠã¦X—]WÀßé×ÇçÂó3£07JÓ>xQ#Sð–¢7©à‡k‡Ã~¼Ï6ã~‹†ÀÀ©˜]ÊËå{"®p¿ú¸F¿rÝ¥¾æþÏñæLLOiÛ1œ‹É1.œð÷DO}ˆOûá¾mÚrÝ:׆`o+ôŸò° KêÛ½÷Äg—²êûè ûkG½vê»ûÏÿ„ž“Nßýó¸‡xŽîèõiqÉÑÇi½Táí±w\U¯×ïô÷ ±Á·u¾­ÑÁ•¿Äud Î qÍ[ÀÇ_xVÿùI5bÙÈZyM>‹4nNÜóâ™Ï\#®3lûŒ¨G£ú¸ÑWÄž(OcMÉc×Å«bœ³¾¶„ç°´¬¡—hÒ†–¸µŸ×ôMØm>/>§^÷ŒqîúCq½ÞÀŦZØý¾€ÃSsâàLíÒÎ}ŸŸÀõQ&>²®Ÿ*Òò9Ú›ÿH|Ö3×ÏYeØ>†O£4t×FávT>ÇÂ=1˜’‡Â—EN…ë\ý¬ú¾.M½\>ZGqŸ³Í®6]jY·5ô½ ˜­‹q]YÿÊèGCýl¿õĵ×ÅgÇΆbüÄîgŒw*WG•¸îÞ ÛKo?­Ë^æp)³ׇ£;ýc%>ã1&ÎY¸™ ûJjãºÏöùwöþØ7^mh¾>îÆgþÛz¾6þ´ä´ñÞxŸ².õÆ=Ãkã\Ã_¦®¿¢ÿì>Àþ¡s+zÜý‘¨§§áþLïÂÛz¸÷!·saý+9ãäø™ýéþóJêè(ý ýh؟É1Z8ö)BŸÇkjYå&>ƒu%¾×p[W?êc¿ÜøœþÑëЗVC­ziü}Èÿ.ôK4òBox¡öŸÒÓCóí³}—=›ð»î»-šR‘ƒ]ùßdwYSPGfÔ‹ ½}Öwc²e·†­³Sz•!ÜÒ Ò¼A|ào 'Çô°ùoŒþÃú–ý›ý½ÎxÙ k'¶oÃÖõÇzX3½$®9—õ;K|(ÖâZýÉ{Ó·ã~øJÿ¾Dx–~Y¬‹°Tüp¼Ÿ½b]X ×™£À§Ù™¸g’ kxý]š=Ã0=D“¬-îÈË1½£FÜVOnë×û«¸zW|Sò<¦ÞMÐÝicÞwî¢yKa¯“ž®Y¬ÿ¬ÂÉF¸·E«ÊæZ†•">ÍWüåh_Xï—Â>.J¿ä U¥×Æç Vä`Ã|›r¾nž²ü/Âùl¸w—£ÖIixJãwÚºa&Rð4$¯CüOñ)uï…çh¦pmG6`ø@m¹ kF9輬ÿì3ýïÁLOºÁ·ã÷Þ#Ò}šóØÐöŸÆç‚:?ÛÓ›:»«ÙX«Oµ¿OOÃúçQWÂøÓâ=²y›vî$|å?=þæe\O› ÷O÷ãþÄ“Ï!~(ö ã¸>ƒ¯E=ئ¹iö%½»¦‰-qoÓ®vØÃ2Nç³âþU‡ÞµÙÔâK®šâÔäwƒýMµ­Å¶ŽX´á¯¡>׌}Öes±§8¡gçú–+x95ÎÁ÷Ǿ·üñqhF¤GyùËÃõøÏ°¿²ô"ûýg€Äsœï|žeC϶ôQ‡òp±ŸÁ õ§ cM5³%'-¾·Ã½_˜hÝê?;#&u~×Þ/ë0Ö¨Åûßáò5Ì_ºîLþOØr¤Ž±çœf_ÐéÜ<ЫlëEËÖMz?C3&¿™íò:!çãô#ûgñ¹‡,³úÇ,Ìæèù¸<ÌâzQÍÙ§#1¹ ÷TÞŸAoÂrëŒÝük[ uæâ=övØ?§ÑM8müCØx^ÜGoê™ZÄ|\‡ÞÏÔÀ<<ÒËáý¹X]8ÿ˜.îË÷¦ºY¶^*ÐÙ™PƒélÎ'Œ—ówÖ¼YçgÄ)™ØkŒëSfųh¼-wäõÒ·®WnÁ}Ûùõ¨sïÇ…ßãõ`¸›ÍP#ĩηëÕø¼|U\«¸^Åçªñ.hÌ™×cñ8ÔTŒ·Ï®CZqH«*jéÖ7ÇýТ}Ÿ“|ž Ï¹PŸÄ~Ì|#aÇiï‡gâ½à°îÆgЧéĵÍ×ÎõŸ#†å.]ió³z qk²©ùºx?²iÜ–kÛúÞ–5áµzPÅÑ \;“£¹=iÄß´]šóD-ÝdzM¶-‹ûý;ñùϼš<Á¿ ~çÂý–÷Åß¼dÄ4c¬¬¸ç¾>Ö‹Y˜.ªÓ[Ï‹¿O¼|kì%ƒ6¶Â½ ¯íÓø[œ-éˆk[}mŠW“67~0êd]¿ÝxZüÍQ ~›¡'RC®äåæiÁ‘µèÉq|¨jî3\9 ;[ú™²zX€‹iú•·nΫys«ç9sæFûϽ;ÞK7Nž}s|[²&ÛÖC[; ü“Zž‹oÃ}g!Ö§ ûOÞÿzm|·µï‘…ßl6­exÕз7Íßz">~ÍŽ+±=‡—X;úθ'q¥g¹2ßü†½ Ú·Âö…jüÍì$Mò=OÇá!g¼,Ÿ³´0{ûºp_f’–ÍNNâë1ý­>;>·~wúä};ZÙÑ£vÓƒ¯Þ£ñwí·Åúž_m=-jiøMjëwúÏÓœë÷Çý‹ Xñ¾º» ƒk¡“{|‚͉WöqO2Ö‹£t~d)>o˜~O¼'œ–£ʘggøU€ÝrX³ãø.½ª¨7ûúê¹Þ7~E­ÚïOŒú~ǹ¦Ç_‘ë2l•éXYß³Fç6\¿¡Z7þª|¯„½[q[“›/ÚSþñ¸Xð~_òöŸŸýl¤Ø5ô…ýç>iÞuåNÐLsÞ~K¼÷~'ÜczNÜåû8š6Ö=óhtGÊ0´òCæWƒ×ôB«j{ —̵(Æš0ÿÁø›Òyuª .~1Þ+ ÷Ú ÖIóâ7ö™po…>­sùsãïî5â³&Ùñÿï×ñOÙX—ÒäYáíW#iø÷è‹>ºû¾oÚý£G7“þ¿pÉÇÎ.M¯ë÷µ.y¤ÿáS—×7Öûï ïþÁ%ÿm˜æ‘G_ÕŸæÿ Ÿ|4ÿ4ÃaË?forecast/data/gold.rda0000644000176200001440000000532414150370574014420 0ustar liggesusers‹uÚ Ð¦Õð·ÝõQ²2b’PÄ„J%ldz’2IÌæ³” «¶}îûyÏ9÷¹Ÿˆ!Q}! k„$,’¤LÆFEhÈ׈Y„ØwŸß9yîêÙ½ö:÷¹ÎÇõñ¿®ë¼{ð¾+÷ØbåsssKæ–,ßø÷Òÿ\¶dã_›mü³ù"sôq«Žœ›[ºñã²CæG;Î-þ̯Y‡þã ‹?ókÞ9åG÷åW ôõzô(ó_ç>Çâß„£OâùÓèBôû毧χ8m˜Õk#~FâvTâòzö)~Pü¾øOÑ׿¬Sæ=¿ ª½†|‰›#œï]¾Û¿-ûÁ¡æ¶)mLþúY?hnñ]|´e?r üi¾d\ŒnÆ/õ¾V»Â¡f;<|Á•ѺAüÿuooqžwÿþÝøc¬¿‡ñ'Ï"¿–=4~¸ñãȯôýehÑ3¿j^aÝçâË:eßò¿5¯´îKðeÞ1Ö_Uö›Å‰æPëïç{¹Ç^Æw(8füqöÙ_ìRä÷Æ?ÜAƒ} ŽÁ›Š·.?mÆÎ·ñ‚seßt ;~ÅÓè2TÞÝd¾yzÏ–þÛÅÅiS:ŽÓõƇL¿·Féc,Ÿµ×áÙk Çꋱ8»ïX¼´ŸŸ®ß}þ×yè§¥¯Ö~í|”{Âæ ò%OÊ­|Vp¢…?mký‚#§âŠÂÁªŸr8ÔÊ›-=·â©-Üjo0./¶ß ¯¾iå‰ö÷¾ÿ¾å»±º§e¿öŒ»Éû?å¯p°Ýœ~þiÝ{ícÑG ûú.?Vþ€^Š»öhóN$/Ÿµç¢o6žµ¯võÍ&õ/ò‡Îâs ŸZþݾͼ7“+zà×­¸máNåó}_TýÒò¿öÀÁ¾‡Kuâ'ÅÞ/Ç¿Ø|uS¥åû:û™ökôõrp¨Ugÿoá\»ëÀï‹ÔOõþåÿíYäÞ€ƒùoµ~Ùÿ×å¼ø/“ÿˆyðýJ~´Ÿå§êãñ6äKþU·´â±øû¸Ü—Ÿ/dòâeLïcöŸlŸB'8'½ù8À›uÚ=.¨ãô#aZT?×NÏ· ^ªëÁ«@¯¾¢x‰óS>Ág"=DuSäGI=•¬¯›îÕSñßÓu“ùz´“÷;çé–¡øôcrêÍÄ/ ížOž>:ñÚÉ{ÝÞÓs¤Ë§|‚IÝ”Ôõ‰\šPugÒ¦÷Må#?ˆâ>ª÷#ûýE~õe‘_DûFøáKT×Gç 졞‰úŸÀ‚}‚¸ìÄ±°~ÖŸnâð>ÀÕ Ÿ òbàAýøq` ñô‹¾ÂÙxz îÎÃË—ކÕw}8Žõó(>‚:?ªÿƒüÕÏ‘?W=Ããøü†YÿNꄤ¾¯ös¿¨Î‰OÄ«s“üÅglÌ“g"ÿŽêêÏ#¼ŒGŸm_øYä<?pOý^W‚{uL,ñûpëê³âî³ç®û=Ÿð_ij<\ñßâŸð$òÏ(?G~õyQ=[åÕí‘ÿU;\Œ¿]ܪK %y&ÉïIÔÑ©Ä??Nâ"±GÅ zëè«;å®ý0IþgçÏêÞûO†YŸé-?cÖo{~’ù'™=²ü‘õÏNgï,=éár/¿÷êÎ^ÜögÎÖY^ìè5¿È¹·“׳üV¿‹ŸÌ2{æyóÞŽW'eýCVßf8‘á|æ™ý:ue§ÏëÄOWòýtúÒîÏäálß«œ¼Þ9Gw:y±£×î+Óûtqîr>ù%ó“¼í€?z`'~ž_à\êëj·÷²‹~»'}±§z«_ýÜÀîúë,ÿä~£?ÍúÙ¬~ªß/þiÞÚ¼¼”'³~Zïs9悔º>«²÷„,ŸÖs~Ç<¸”½ëÖýåÙüCãŸB‹=®ñý?ƇëªãòEƒ{ò›¼ÆøÉe_ë•ûªŸòÏ|î•ï=¿è啉¼ÒÃ>õâ wþýìíݨ?×<ù»÷îÐ;wß2¿ÌòM¾µè åÇ^¿ª÷¹Õ>Åßô¯½8îÅ]ÿ]ßÙ¯ÿ‰sê÷&úë ¿š°ïÄ;ÎDŸ?ÑßMœryñ2qÎÄˉì1YmÜœÀïÉV³zž¨7&{ß5>OÔaõœÇ̾'L¼§ôôV×õ>RíZν=*ÿ×ûy·è¿j=õU_ü¤à1êÕ/•ªcªýÕ[ùµüŽ¿|ëôqÝåßà—>¡»ï½´SOV<-8§ì¼ßtü»ûžyúºŽuê…λfbŸä½5ñ¯ÊóŸÎ½:uDGïzªó¾Ô©Ã:qXð·ƒ\èÔc™²:({¿Ë+¸[òÓa3xtç|®^ÉÞ‰Š^²÷†ì=!?ÒzòuÇ3¼Îü®â㹃| ÏÛ òŸw!uAù^Ï×]á\¢Ç¢—_à‹}K| Ê^y7çáoõÏsŸ’¯× ꌂ7…–:g¿ÙóÕûéK²:®÷û§Þ沈¼\q«à²wª¼Ó¬òŽèþE/øbo~UÆ;ýK±KíÅg§~îäÕZ÷ÑGG?µî(ñG:upçœÝ3énU9ïeÕÏå©îsæê="É£©ôÇp¤»yêñnùlßÚ‰ƒä}?É‹‰ýÒgŒÛ?‰ûTâK]Q¿ÿÊ~òyòþVÏÁŽ]‰“Ÿ&¿WIòzÒ_VªKòjåÕ=IO¥ï‡¯‰½kÝ®-}pôû„ÒßÕ>öý‘>#¸gׂz=ÀÓÀA|…k}“<ýþ ö9â¨ö‘¥?º¨¼8/‹êê¨þˆú‰Úÿèã#ÿ«}#¨ý¤ü’øg‚I<&¸TÇ,zEÅUR/&û—¾(éLìŸäƒô4|‘?I$ù³¾×¨7“>»úÝéƒ}ÕËé òc|¯Ù}¢>+²gÔGù6òƒ´Í&ù;á~•“÷âyƒ÷„ònµv`oïñ9(¿Œ+}ñŠAï]/îRÞ!ØÕw#zŠìTûfõZ}ŸQWÖ÷;ýbЇöÞ‚>-øýl(ïúÇ ¯Y÷A~Z€# pjáæÁû!;xÔuaçÁ{æf ×=y ¾•x=uð.å÷[Þ‰îØ×;A½_yoºìnÞ“ÔM ·Ì¾‹züß•¥Íhq¿å‹ÿÜgƒ©§N¯^øÈ=ŽXuøhñ«lõƒKšÅ‘Ûÿüsqk1#forecast/data/wineind.rda0000644000176200001440000000127014150370574015124 0ustar liggesusers‹m”KHTaǯwÔHËÜ´¨UA’fD ‘›Õ¢Œ Q”Z”ô¡DP¨Ò,jу¸èR)Q›²v-šUÙÜqfîçq#A[Tô€Òÿ?¡.Üû¿ç;çûçü¿ó}MÛÛjKÚJ Ã0 ³Ìûú¼ßBÓûxïbï]t²£óhGçÃðy~ã©5zÕÈ=V°ÕšÇð%Ù¡FðF`Ç_ íŠs6ÈŽÜGܬüÑ:áDëí–›V¼»~xí[ŠKÓo”wõ]¡þèñŒz· íAñ$ª„™ð±î9tß#žÌ°p¼¾ø]Ös5ž8¦ñXž]ò§›„N¯01BÞ•w¶.-]üè[ÿ}ô˜…·R²]Cv:³Jõ•3ï¢prŸÖËþaÿmêøE½ýä :s`ùU+¯X#úU‰/‰^émòO=T|¤X~ûõšÌ›·]ã1]^ÁO}ö7Å%;à/—?ýöúç:uÜE—jø~°õôWžf+¯œ›äÓL_õÊκÒïÃ#Î×jö/Ž3ôéöç=}ÿ=¿ßaÖ–ò£0³Fþ1Îot@ëFžsn—òΕÓÀ¹l_¼4¹ZñŸ‘’NÁ>ús#úÜ‘ßÙD^'ïÑxj¡¿fÐqá\/[ ?:‡ÊÈ·Þt¨‚žµœÿ'èìçÜÕÐÿö¿Tºg×)þÝcúÇBýü‚õ¶Pÿ4÷ÚoâÇ =y׳¯§»ËƒÜÕì³î}Uhÿú–ÜcUÿ›Rtøx{w·÷SþÏ Ù“™Ë½‡æ(„forecast/data/woolyrnq.rda0000644000176200001440000000067214150370574015366 0ustar liggesusers‹m“K(Da†sL˜“b!¥,,ì•Ò K%ƒ‰Rî3î6n)ŠXˆbÌÌ9cf "eaíeI‘²$IÎé{¦L9uÎsÞïöÿ­¶Ê]ìp;”R6eK7¿šù›l3?Iæ›f¾©Ã½½]£=ýJif€vEr•õ¸ŽJ…Ñt2œ‚˰vC—0öóaÎ׎Ö`ÜÀlø*<]@ÏŽnB7OJ'‚}Mxxƒ¿3±^,-óÞÄ>+ — CõèwôŽÐOÝOa0J\&u¯„ÆyŒšêóƒròˆ¿DW Ø©BëÓøYwýžqÙýzŒ:ÄϘGþ%껉KÁϾñþØ·@pÏ‹þ"•z¬o€óeT²>Œº&n…¾‰. #Cäáa¿ N3óØBÓwô”ú·ägÏ9Ÿ é׿N\|ž9ôÅù÷st{âºÄýûÏèqühöÁg°ƒØÙ¿ ãùZð¿‘OÁ øDßÌÿ[É“p¯5¯§Ï„u¥5×Cn ê¿{[W«Çcþ8ÿm^Ëòc½¿¹¸£‘Mforecast/data/gas.rda0000644000176200001440000000273014150370574014243 0ustar liggesusers‹]— LÕeÆÍ sÎ9ÛlZ©•š:ÇÔ¥«9OfZêœsè°´I–åeÕÂÙehX$fÍš3MA8r ïf^PASç΃'׬•©ëàóûo}‡ ß÷{¿ç½~ß÷wþôÄñq‰q111±1±ý"»GþÙ#6ò§[ä·Oä·û{IÉ‘¿‘µnͶ]Gbº~l;O3{ w' í÷„{† s_Cv súc·U˜UOš0c±p×RÖcÀxúÁsM˜—‰Ÿ¡¦Þ0÷Yñe1ù3š…ÙøÉñÁ'Ƕ½ñÈìÛ3»ð…¿¾Þ&ÿîøG™ûsÿ0ùó6²¾ÖÌ3»0Šõ¢NÙägŸÏÇÂüA‚ÑÈÛñ[iúÉ™gúÉŒ7ë”ſь»`=ü©ð¯÷sðE³¾»ã£â¿ÿ)x?À_hæ‘»ÙD{¾§U'Ë5g9ÌëÞÄÛ!,zÖô“WA}~c_<àM3󱧘ýÚ{¾3ð“Á}“?×iÖÉÊ?+ªßVrãˆ;VX¸\XÌ9+œÅ?‹¸&›qZ¼Ö¹°ø­¹Ì¯7ãÞÇ<¿Àú4üÜaß±¨>3çY©æy²ì¬>Í–Ч}ÌuÁ6øK£ætK‚Ù+Ï¢¿LÞô¹¾Â|³ÿyãÙo7ûlŸ:N4ï™"æh?çëçÿà`ai:ëÜ7Å—É/ÎìW)ý8Œþ0çòøTáIò>;ExzÈœlþ²Hxâö?<’õó¦ÝYò:ÿO*(¹Þ²*éÏ•|æuö5!¿-ðúµ~s¶°q¥ÖÜZ¯UûjGI®¥ž5Ì—£žºÞ®LýÄßÈü8{R‡'ˆÃ)>_¬Ö}aÉþÅë=!>Oyr߸§Hn·y(|Ãà‰¼'ˆÏÂÏñ¾”ï8uúPzïDüñn¹šd×ú+ý✸ÿŸë(¼½¨{øß’Ç!x¨Þ±Ò»Ó…­Ü“Î ÌÁâï/{O<O`Šø‚¼?çéà â¥>¸3‰³—x\± óàå]òÈÞ?UëtÉ¡RñµoV<Á§$&1keçÙ$>÷âåñ‘ïÙîÉ.ȽѱCúŽmŠ¿­”ú –Þ÷#uK§ ò¾åô/‘øèWçdígøSéƒÉÁ?…êv:œÃ_‰âöoa½Q|íÅÔ%[ûCåòÛùó4œÁ¾O¶s΂۵î9Çœð=ë³æ|'ñpß„øþ ?'?w¦©·çë!ú¯ïšä"Ðõvu·eñ‰•=nA×M_¢Ñ[[¶:)¹ëñéÿ?eìš.Íîßÿ:±\pforecast/src/0000755000176200001440000000000014634702030012646 5ustar liggesusersforecast/src/etscalc.c0000644000176200001440000001710314150370574014441 0ustar liggesusers#include #define NONE 0 #define ADD 1 #define MULT 2 #define DAMPED 1 #define TOL 1.0e-10 #define HUGEN 1.0e10 #define NA -99999.0 // Functions called by R void etscalc(double *, int *, double *, int *, int *, int *, int *, double *, double *, double *, double *, double *, double *, double *, int*); void etssimulate(double *, int *, int *, int *, int *, double *, double *, double *, double *, int *, double *, double *); void etsforecast(double *, int *, int *, int *, double *, int *, double *); // Internal functions void forecast(double, double, double *, int, int, int, double, double *, int); void update(double *, double *, double *, double *, double *, double *, int, int, int, double, double, double, double, double); // ****************************************************************** void etscalc(double *y, int *n, double *x, int *m, int *error, int *trend, int *season, double *alpha, double *beta, double *gamma, double *phi, double *e, double *lik, double *amse, int *nmse) { int i, j, nstates; double oldl, l, oldb, b, olds[24], s[24], f[30], lik2, tmp, denom[30]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; if(*nmse > 30) *nmse = 30; nstates = (*m)*(*season>NONE) + 1 + (*trend>NONE); // Copy initial state components l = x[0]; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } *lik = 0.0; lik2 = 0.0; for(j=0; j<(*nmse); j++) { amse[j] = 0.0; denom[j] = 0.0; } for (i=0; i<(*n); i++) { // COPY PREVIOUS STATE oldl = l; if(*trend > NONE) oldb = b; if(*season > NONE) { for(j=0; j<(*m); j++) olds[j] = s[j]; } // ONE STEP FORECAST forecast(oldl, oldb, olds, *m, *trend, *season, *phi, f, *nmse); if(fabs(f[0]-NA) < TOL) { *lik = NA; return; } if(*error == ADD) e[i] = y[i] - f[0]; else e[i] = (y[i] - f[0])/f[0]; for(j=0; j<(*nmse); j++) { if(i+j<(*n)) { denom[j] += 1.0; tmp = y[i+j]-f[j]; amse[j] = (amse[j] * (denom[j]-1.0) + (tmp*tmp)) / denom[j]; } } // UPDATE STATE update(&oldl, &l, &oldb, &b, olds, s, *m, *trend, *season, *alpha, *beta, *gamma, *phi, y[i]); // STORE NEW STATE x[nstates*(i+1)] = l; if(*trend > NONE) x[nstates*(i+1)+1] = b; if(*season > NONE) { for(j=0; j<(*m); j++) x[(*trend>NONE)+nstates*(i+1)+j+1] = s[j]; } *lik = *lik + e[i]*e[i]; lik2 += log(fabs(f[0])); } *lik = (*n) * log(*lik); if(*error == MULT) *lik += 2*lik2; } // ********************************************************************************* void etssimulate(double *x, int *m, int *error, int *trend, int *season, double *alpha, double *beta, double *gamma, double *phi, int *h, double *y, double *e) { int i, j, nstates; double oldl, l, oldb, b, olds[24], s[24], f[10]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; nstates = (*m)*(*season>NONE) + 1 + (*trend>NONE); // Copy initial state components l = x[0]; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } for (i=0; i<(*h); i++) { // COPY PREVIOUS STATE oldl = l; if(*trend > NONE) oldb = b; if(*season > NONE) { for(j=0; j<(*m); j++) olds[j] = s[j]; } // ONE STEP FORECAST forecast(oldl, oldb, olds, *m, *trend, *season, *phi, f, 1); if(fabs(f[0]-NA) < TOL) { y[0]=NA; return; } if(*error == ADD) y[i] = f[0] + e[i]; else y[i] = f[0]*(1.0+e[i]); // UPDATE STATE update(&oldl, &l, &oldb, &b, olds, s, *m, *trend, *season, *alpha, *beta, *gamma, *phi, y[i]); } } // ********************************************************************************* void etsforecast(double *x, int *m, int *trend, int *season, double *phi, int *h, double *f) { int j; double l, b, s[24]; if((*m > 24) & (*season > NONE)) return; else if(*m < 1) *m = 1; // Copy initial state components l = x[0]; b = 0.0; if(*trend > NONE) b = x[1]; if(*season > NONE) { for(j=0; j<(*m); j++) s[j] = x[(*trend>NONE)+j+1]; } // Compute forecasts forecast(l, b, s, *m, *trend, *season, *phi, f, *h); } // ***************************************************************** void forecast(double l, double b, double *s, int m, int trend, int season, double phi, double *f, int h) { int i,j; double phistar; phistar = phi; // FORECASTS for(i=0; i NONE) { if(trend==ADD) r = (*l) - (*oldl); // l[t]-l[t-1] else //if(trend==MULT) { if(fabs(*oldl) < TOL) r = HUGEN; else r = (*l)/(*oldl); // l[t]/l[t-1] } *b = phib + (beta/alpha)*(r - phib); // b[t] = phi*b[t-1] + beta*(r - phi*b[t-1]) // b[t] = b[t-1]^phi + beta*(r - b[t-1]^phi) } // NEW SEASON if(season > NONE) { if(season==ADD) t = y - q; else //if(season==MULT) { if(fabs(q) < TOL) t = HUGEN; else t = y / q; } s[0] = olds[m-1] + gamma*(t - olds[m-1]); // s[t] = s[t-m] + gamma*(t - s[t-m]) for(j=1; j 1) { for(R_len_t s = 0; s < (LENGTH(seasonalPeriods_s)-1); s++) { position = position + seasonalPeriods[s]; gTranspose(0, position) = gammaVector[(s+1)]; } } } if(*p != 0) { gTranspose(0, (adjustBeta+gammaLength+1)) = 1; } if(*q != 0) { gTranspose(0, (adjustBeta+gammaLength+ *p +1)) = 1; } arma::mat g(arma::trans(gTranspose)); seasonalPeriods = 0; p = 0; q = 0; gammaVector = 0; if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { arma::mat gammaBold = gTranspose.cols((1+adjustBeta), (adjustBeta+gammaLength)); return List::create( Named("g") = g, Named("g.transpose") = gTranspose, Named("gamma.bold.matrix") = gammaBold ); } else { return List::create( Named("g") = g, Named("g.transpose") = gTranspose, Named("gamma.bold.matrix") = R_NilValue ); } END_RCPP } /* SEXP makeFMatrix(SEXP alpha_s, SEXP beta_s, SEXP smallPhi_s, SEXP seasonalPeriods_s, SEXP gammaBoldMatrix_s, SEXP arCoefs_s, SEXP maCoefs_s) { BEGIN_RCPP NumericMatrix alpha_r(alpha_s); if(!Rf_isNull(beta_s)) { NumericMatrix beta_r(beta_s); bool indBeta = true; } else { bool indBeta = false; } if(!Rf_isNull(smallPhi_s)) { NumericMatrix smallPhi_r(smallPhi_s); bool indSmallPhi = true; } else { bool indSmallPhi = false; } if(!Rf_isNull(seasonalPeriods_s)) { NumericMatrix seasonalPeriods_r(seasonalPeriods_s); bool indSeasonalPeriods = true; } else { bool indSeasonalPeriods = false; } if(!Rf_isNull(gammaBoldMatrix_s)) { NumericMatrix gammaBoldMatrix_r(gammaBoldMatrix_s); bool indGammaBoldMatrix = true; } else { bool indGammaBoldMatrix = false; } if(!Rf_isNull(arCoefs_s)) { NumericMatrix arCoefs_r(arCoefs_s); bool indArCoefs = true; } else { bool indArCoefs = false; } if(!Rf_isNull(maCoefs_s)) { NumericMatrix maCoefs_r(maCoefs_s); bool indMaCoefs = true; } else { bool indMaCoefs = false; } arma::mat END_RCPP } */ forecast/src/Makevars.win0000644000176200001440000000017614567526540015161 0ustar liggesusersPKG_CXXFLAGS = $(SHLIB_OPENMP_CXXFLAGS) -DR_NO_REMAP PKG_LIBS = $(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) forecast/src/updateTBATSMatrices.cpp0000644000176200001440000000247414456202551017136 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP updateTBATSGammaBold(SEXP gammaBold_s, SEXP kVector_s, SEXP gammaOne_s, SEXP gammaTwo_s) { BEGIN_RCPP NumericMatrix gammaBold(gammaBold_s); IntegerVector kVector(kVector_s); NumericVector gammaOne(gammaOne_s); NumericVector gammaTwo(gammaTwo_s); int endPos = 0; int numK = kVector.size(); for(int i =0; i < numK; i++) { for(int j = endPos; j < (kVector(i) + endPos); j++) { gammaBold(0,j)=gammaOne(i); } for(int j = (kVector(i) + endPos); j < ((2*kVector(i)) + endPos); j++) { gammaBold(0,j)=gammaTwo(i); } endPos += 2 * kVector(i); } return R_NilValue; END_RCPP } SEXP updateTBATSGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s) { BEGIN_RCPP int adjBeta = 0; NumericMatrix g_r(g_s); //Rprintf("one\n"); g_r(0,0) = REAL(alpha_s)[0]; //Rprintf("two\n"); if(!Rf_isNull(beta_s)) { //Rprintf("three\n"); g_r(1,0) = REAL(beta_s)[0]; adjBeta = 1; } //Rprintf("four\n"); if(!Rf_isNull(gammaBold_s)) { NumericMatrix gammaBold_r(gammaBold_s); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); arma::mat g(g_r.begin(), g_r.nrow(), g_r.ncol(), false); g.submat((adjBeta+1), 0,(adjBeta+gammaBold.n_cols), 0) = trans(gammaBold); } //Rprintf("five\n"); return R_NilValue; END_RCPP } forecast/src/calcBATS.cpp0000644000176200001440000002170514323125536014740 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP calcBATS(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es ){ BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); int t; arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); for(t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } return List::create( Named("y.hat") = yHat, Named("e") = e, Named("x") = x ); END_RCPP } SEXP calcBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s, SEXP sPeriods_s, SEXP betaV, SEXP tau_s, SEXP p_s, SEXP q_s ) { BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); NumericMatrix xNought_r(xNought_s); //IntegerVector sPeriodsR(sPeriods); int adjBeta, previousS, lengthArma, *tau, *p, *q, *sPeriods; R_len_t lengthSeasonal; tau = &INTEGER(tau_s)[0]; p = &INTEGER(p_s)[0]; q = &INTEGER(q_s)[0]; lengthArma = *p + *q; if(!Rf_isNull(sPeriods_s)) { sPeriods = INTEGER(sPeriods_s); lengthSeasonal = LENGTH(sPeriods_s); } if(!Rf_isNull(betaV)) { adjBeta = 1; } else { adjBeta = 0; } arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); arma::mat xNought(xNought_r.begin(), xNought_r.nrow(), xNought_r.ncol(), false); if(!Rf_isNull(sPeriods_s)) { //One //Rprintf("one-1\n"); yHat.col(0) = wTranspose.cols(0, adjBeta) * xNought.rows(0, adjBeta); //Rprintf("one-2\n"); previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("one-3\n"); yHat(0,0) = yHat(0,0) + xNought( (previousS + sPeriods[i] + adjBeta), 0); previousS += sPeriods[i]; } if(lengthArma > 0) { //Rprintf("bg-1"); yHat.col(0) = yHat(0,0) + wTranspose.cols((*tau + adjBeta + 1), (xNought.n_rows-1)) * xNought.rows((*tau + adjBeta + 1), (xNought.n_rows-1)); } //Two e(0,0) = y(0, 0) - yHat(0, 0); //Three //Rprintf("three-5\n"); x.submat(0, 0, adjBeta, 0) = F.submat(0,0,adjBeta,adjBeta) * xNought.rows(0,adjBeta); if(lengthArma > 0) { //Rprintf("bg-2"); x.submat(0, 0, adjBeta, 0) += F.submat(0,(adjBeta+ *tau + 1),adjBeta,(F.n_cols - 1)) * xNought.rows((adjBeta+ *tau + 1),(F.n_cols - 1)); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("three-7\n"); x((adjBeta+previousS+1),0) = xNought((adjBeta+previousS+sPeriods[i]),0); if(lengthArma > 0) { //Rprintf("bg-3"); x.submat((adjBeta+previousS+1),0, (adjBeta+previousS+1),0) = x.submat((adjBeta+previousS+1),0, (adjBeta+previousS+1),0) + F.submat((adjBeta + previousS + 1), (adjBeta+*tau+1), (adjBeta + previousS + 1), (F.n_cols-1)) * xNought.rows((adjBeta + *tau +1), (F.n_cols-1)); } //Rprintf("three-9\n"); x.submat((adjBeta + previousS + 2), 0, (adjBeta + previousS + sPeriods[i]), 0) = xNought.rows((adjBeta + previousS + 1), (adjBeta + previousS + sPeriods[i] -1)); previousS += sPeriods[i]; } if(*p > 0) { //Rprintf("bg-4"); x.submat((adjBeta+ *tau + 1),0,(adjBeta+ *tau + 1),0) = F.submat((adjBeta + *tau +1), (adjBeta + *tau +1), (adjBeta + *tau + 1), (F.n_cols-1)) * xNought.rows((adjBeta+*tau+1), (F.n_cols-1)); //Rprintf("bg-5"); ////error is HERE!!! if(*p > 1) { x.submat((adjBeta + *tau + 2),0,(adjBeta + *tau + *p),0) = xNought.rows((adjBeta + *tau + 1),(adjBeta + *tau + *p-1)); } } if(*q > 0) { //Rprintf("three-12\n"); x((adjBeta+ *tau + *p + 1),0) = 0; if(*q > 1) { //Rprintf("three-13\n"); x.submat((adjBeta+ *tau + *p + 2), 0, (adjBeta + *tau + *p + *q) , 0) = xNought.rows((adjBeta + *tau + *p + 1),(adjBeta + *tau + *p + *q - 1)); } } ///Temporary fix! //x.col(0) += g * e(0,0); //End /////////// x(0,0) += g(0,0) * e(0,0); if(adjBeta == 1) { x(1,0) += g(1,0) * e(0,0); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { x((adjBeta+previousS+1),0) += g((adjBeta+previousS+1),0) * e(0,0); previousS += sPeriods[i]; } if(*p > 0) { x((adjBeta + *tau + 1),0) += e(0,0); if(*q > 0) { x((adjBeta + *tau + *p + 1),0) += e(0,0); } } else if(*q > 0) { x((adjBeta + *tau + 1),0) += e(0,0); } ///////////////////////////////// for(int t = 1; t < yr.ncol(); t++) { //Rprintf("point-x\n"); //One yHat.col(t) = wTranspose.cols(0, adjBeta) * x.submat(0, (t-1), adjBeta, (t-1)); previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //mod here //Rprintf("point-xx\n"); yHat(0,t) += x((previousS + sPeriods[i] + adjBeta), (t-1)); previousS += sPeriods[i]; } if(lengthArma > 0) { //Rprintf("bg-6"); yHat.col(t) += wTranspose.cols((*tau + adjBeta + 1), (xNought.n_rows-1)) * x.submat((*tau + adjBeta + 1), (t-1), (x.n_rows-1), (t-1)); } //Two //Rprintf("point-x4\n"); e(0,t) = y(0, t) - yHat(0, t); //Three //Rprintf("point-x5\n"); x.submat(0, t, adjBeta, t) = F.submat(0,0,adjBeta,adjBeta) * x.submat(0, (t-1), adjBeta, (t-1)); if(lengthArma > 0) { //Rprintf("bg-7"); x.submat(0, t, adjBeta, t) += F.submat(0,(adjBeta+ *tau + 1),adjBeta,(F.n_cols - 1)) * x.submat((adjBeta+ *tau + 1), (t-1), (F.n_cols - 1), (t-1)); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { //Rprintf("point-x7\n"); x((adjBeta+previousS+1),t) = x((adjBeta+previousS+sPeriods[i]),(t-1)); if(lengthArma > 0) { //Rprintf("bg-8"); x.submat((adjBeta+previousS+1),t, (adjBeta+previousS+1),t) += F.submat((adjBeta + previousS + 1), (adjBeta+*tau+1), (adjBeta + previousS + 1), (F.n_cols-1)) * x.submat((adjBeta + *tau +1), (t-1), (F.n_cols-1), (t-1)); } //Rprintf("Three-L-9\n"); x.submat((adjBeta + previousS + 2), t, (adjBeta + previousS + sPeriods[i]), t) = x.submat((adjBeta + previousS + 1), (t-1), (adjBeta + previousS + sPeriods[i] -1), (t-1)); previousS += sPeriods[i]; } /* if(lengthArma > 0) { x.submat((adjBeta+ *tau + 1),t, (x.n_rows-1),t) = F.submat((adjBeta+ *tau + 1), (adjBeta+ *tau + 1), (F.n_rows - 1), (F.n_rows - 1)) * x.submat((adjBeta+ *tau + 1),(t-1), (x.n_rows-1),(t-1)); } */ if(*p > 0) { //Rprintf("bg-9"); x.submat((adjBeta+ *tau + 1),t, (adjBeta+ *tau + 1),t) = F.submat((adjBeta + *tau +1), (adjBeta + *tau +1), (adjBeta + *tau + 1), (F.n_cols-1)) * x.submat((adjBeta+*tau+1), (t-1), (F.n_cols-1), (t-1)); if(*p > 1) { x.submat((adjBeta + *tau + 2),t,(adjBeta + *tau + *p),t) = x.submat((adjBeta + *tau + 1), (t-1), (adjBeta + *tau + *p -1), (t-1)); } } if(*q > 0) { x((adjBeta+ *tau + *p + 1),t) = 0; if(*q > 1) { x.submat((adjBeta+ *tau + *p + 2), t, (adjBeta + *tau + *p + *q) , t) = x.submat((adjBeta + *tau + *p + 1), (t-1), (adjBeta + *tau + *p + *q - 1), (t-1)); } } ///Temporary fix! //x.col(t) += g * e(0,t); //End /////////// x(0,t) += g(0,0) * e(0,t); if(adjBeta == 1) { x(1,t) += g(1,0) * e(0,t); } previousS = 0; for(R_len_t i = 0; i < lengthSeasonal; i++) { x((adjBeta+previousS+1),t) += g((adjBeta+previousS+1),0) * e(0,t); previousS += sPeriods[i]; } if(*p > 0) { x((adjBeta + *tau + 1),t) += e(0,t); if(*q > 0) { x((adjBeta + *tau + *p + 1),t) += e(0,t); } } else if(*q > 0) { x((adjBeta + *tau + 1),t) += e(0,t); } ///////////////////////////////// } } else { yHat.col(0) = wTranspose * xNought; e(0,0) = y(0, 0) - yHat(0, 0); x.col(0) = F * xNought + g * e(0,0); for(int t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } } return R_NilValue; END_RCPP } SEXP calcWTilda(SEXP wTildaTransposes, SEXP Ds) { BEGIN_RCPP NumericMatrix wTildaTransposer(wTildaTransposes); NumericMatrix Dr(Ds); int t; arma::mat wTildaTranspose(wTildaTransposer.begin(), wTildaTransposer.nrow(), wTildaTransposer.ncol(), false); arma::mat D(Dr.begin(), Dr.nrow(), Dr.ncol(), false); for(t = 1; t < wTildaTransposer.nrow(); t++) { wTildaTranspose.row(t) = wTildaTranspose.row((t-1)) * D; } return wTildaTransposer; END_RCPP } forecast/src/calcBATS.h0000644000176200001440000000463714323125536014412 0ustar liggesusers#ifndef _forecast_CALCBATS #define _forecast_CALCBATS ///////////////////////////////////// // if unable to compile, please comment these lines // #define __GXX_EXPERIMENTAL_CXX0X__ 1 // #ifndef HAVE_ERRNO_T // typedef int errno_t; // #endif // #if __WORDSIZE == 64 // # ifndef __intptr_t_defined // typedef long int intptr_t; // # define __intptr_t_defined // # endif // typedef unsigned long int uintptr_t; // #else // # ifndef __intptr_t_defined // typedef int intptr_t; // # define __intptr_t_defined // # endif // typedef unsigned int uintptr_t; // #endif // #include // #include // #include // #include // #include // if unable to compile, please comment these lines ///////////////////////////////////// #include #include RcppExport SEXP calcBATS(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es ) ; RcppExport SEXP calcBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s, SEXP sPeriods_s, SEXP betaV, SEXP tau_s, SEXP p_s, SEXP q_s ) ; RcppExport SEXP calcWTilda(SEXP wTildaTransposes, SEXP Ds) ; RcppExport SEXP makeBATSWMatrix(SEXP smallPhi_s, SEXP sPeriods_s, SEXP arCoefs_s, SEXP maCoefs_s) ; RcppExport SEXP makeBATSGMatrix(SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s, SEXP p_s, SEXP q_s) ; RcppExport SEXP updateFMatrix(SEXP F_s, SEXP smallPhi_s, SEXP alpha_s, SEXP beta_s, SEXP gammaBold_s, SEXP ar_s, SEXP ma_s, SEXP tau_s) ; RcppExport SEXP updateWtransposeMatrix(SEXP wTranspose_s, SEXP smallPhi_s, SEXP tau_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP p_s, SEXP q_s) ; RcppExport SEXP updateGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s) ; //TBATS Functions RcppExport SEXP makeTBATSWMatrix(SEXP smallPhi_s, SEXP kVector_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP tau_s) ; RcppExport SEXP makeCIMatrix(SEXP k_s, SEXP m_s) ; RcppExport SEXP makeSIMatrix(SEXP k_s, SEXP m_s) ; RcppExport SEXP makeAIMatrix(SEXP C_s, SEXP S_s, SEXP k_s) ; RcppExport SEXP updateTBATSGammaBold(SEXP gammaBold_s, SEXP kVector_s, SEXP gammaOne_s, SEXP gammaTwo_s) ; RcppExport SEXP updateTBATSGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s) ; RcppExport SEXP calcTBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s) ; #endif forecast/src/etsTargetFunction.cpp0000644000176200001440000001547314323125536017041 0ustar liggesusers#include #include //for isnan, math.h is needed //#include #include "etsTargetFunction.h" #include void EtsTargetFunction::init(std::vector & p_y, int p_nstate, int p_errortype, int p_trendtype, int p_seasontype, bool p_damped, std::vector & p_lower, std::vector & p_upper, std::string p_opt_crit, int p_nmse, std::string p_bounds, int p_m, bool p_optAlpha, bool p_optBeta, bool p_optGamma, bool p_optPhi, bool p_givenAlpha, bool p_givenBeta, bool p_givenGamma, bool p_givenPhi, double alpha, double beta, double gamma, double phi) { this->y = p_y; this->n = this->y.size(); this->nstate = p_nstate; this->errortype = p_errortype; this->trendtype = p_trendtype; this->seasontype = p_seasontype; this->damped = p_damped; this->lower = p_lower; this->upper = p_upper; this->opt_crit = p_opt_crit; this->nmse = p_nmse; this->bounds = p_bounds; this->m = p_m; this->optAlpha = p_optAlpha; this->optBeta = p_optBeta; this->optGamma = p_optGamma; this->optPhi = p_optPhi; this->givenAlpha = p_givenAlpha; this->givenBeta = p_givenBeta; this->givenGamma = p_givenGamma; this->givenPhi = p_givenPhi; /* Rprintf("optAlpha: %d\n", optAlpha); Rprintf("optBeta: %d\n", optBeta); Rprintf("optGamma: %d\n", optGamma); Rprintf("optPhi: %d\n", optPhi); Rprintf("givenAlpha: %d\n", givenAlpha); Rprintf("givenBeta: %d\n", givenBeta); Rprintf("givenGamma: %d\n", givenGamma); Rprintf("givenPhi: %d\n", givenPhi); */ this->alpha = alpha; this->beta = beta; this->gamma = gamma; this->phi = phi; this->lik = 0; this->objval = 0; // for(int i=0; i < 10; i++) this->amse.push_back(0); // for(int i=0; i < n; i++) this->e.push_back(0); this->amse.resize(30, 0); this->e.resize(n, 0); } void EtsTargetFunction::eval(const double* p_par, int p_par_length) { bool equal=true; // ---------show params---------- // Rprintf("par: "); // for(int j=0;j < p_par_length;j++) { // Rprintf("%f ", p_par[j]); // } // Rprintf(" objval: %f\n", this->objval); //Rprintf("\n"); // ---------show params---------- // Check if the parameter configuration has changed, if not, just return. if(p_par_length != this->par.size()) { equal=false; } else { for(int j=0;j < p_par_length;j++) { if(p_par[j] != this->par[j]) { equal=false; break; } } } if(equal) return; this->par.clear(); for(int j=0;j < p_par_length;j++) { this->par.push_back(p_par[j]); } int j=0; if(optAlpha) this->alpha = par[j++]; if(optBeta) this->beta = par[j++]; if(optGamma) this->gamma = par[j++]; if(optPhi) this->phi = par[j++]; if(!this->check_params()) { this->objval = R_PosInf; return; } this->state.clear(); for(int i=par.size()-nstate; i < par.size(); i++) { this->state.push_back(par[i]); } // Add extra state if(seasontype!=0) {//"N"=0, "M"=2 //init.state <- c(init.state, m*(seasontype==2) - sum(init.state[(2+(trendtype!=0)):nstate])) double sum=0; for(int i=(1+((trendtype!=0) ? 1 : 0));iobjval = R_PosInf; return; } // seas.states <- init.state[-(1:(1+(trendtype!=0)))] //if(min(seas.states) < 0) // return(1e8) }; int p = state.size(); for(int i=0; i <= p*this->y.size(); i++) state.push_back(0); etscalc(&this->y[0], &this->n, &this->state[0], &this->m, &this->errortype, &this->trendtype, &this->seasontype, &this->alpha, &this->beta, &this->gamma, &this->phi, &this->e[0], &this->lik, &this->amse[0], &this->nmse); // Avoid perfect fits if (this->lik < -1e10) this->lik = -1e10; // isnan() is a C99 function //if (isnan(this->lik)) this->lik = 1e8; if (ISNAN(this->lik)) this->lik = R_PosInf; if(fabs(this->lik+99999) < 1e-7) this->lik = R_PosInf; if(this->opt_crit=="lik") this->objval = this->lik; else if(this->opt_crit=="mse") this->objval = this->amse[0]; else if(this->opt_crit=="amse") { //return(mean(e$amse[1:nmse])) double mean=0; for(int i=0;i < this->nmse;i++) { mean+=amse[i]/this->nmse; } this->objval=mean; } else if(this->opt_crit=="sigma") { //return(mean(e$e^2)) double mean=0; int ne=e.size(); for(int i=0;iobjval=mean; } else if(this->opt_crit=="mae") { //return(mean(abs(e$e))) double mean=0; int ne=e.size(); for(int i=0;iobjval=mean; } } bool EtsTargetFunction::check_params() { if(bounds != "admissible") { if(optAlpha) { if(alpha < lower[0] || alpha > upper[0]) return(false); } if(optBeta) { if(beta < lower[1] || beta > alpha || beta > upper[1]) return(false); } if(optPhi) { if(phi < lower[3] || phi > upper[3]) return(false); } if(optGamma) { if(gamma < lower[2] || gamma > 1-alpha || gamma > upper[2]) return(false); } } if(bounds != "usual") { if(!admissible()) return(false); } return(TRUE); } bool EtsTargetFunction::admissible() { if(phi < 0 || phi > 1+1e-8) return(false); //If gamma was set by the user or it is optimized, the bounds need to be enforced if(!optGamma && !givenGamma) { if(alpha < 1-1/phi || alpha > 1+1/phi) return(false); if(optBeta || givenBeta) { if(beta < alpha * (phi-1) || beta > (1+phi)*(2-alpha)) return(false); } } else if(m > 1) //Seasonal model { if(!optBeta && !givenBeta) beta = 0; //max(1-1/phi-alpha,0) double d = 1-1/phi-alpha; if(gamma < ((d > 0) ? d : 0) || gamma > 1+1/phi-alpha) return(false); if(alpha < 1-1/phi-gamma*(1-m+phi+phi*m)/(2*phi*m)) return(false); if(beta < -(1-phi)*(gamma/m+alpha)) return(false); // End of easy tests. Now use characteristic equation std::vector opr; opr.push_back(1); opr.push_back(alpha+beta-phi); for(int i=0;i opi; opi.resize(opr.size(),0); std::vector zeror(degree); std::vector zeroi(degree); Rboolean fail; cpolyroot(&opr[0], &opi[0], °ree, &zeror[0], &zeroi[0], &fail); double max = 0; for(int i=0;imax) max = abs_val; } //Rprintf("maxpolyroot: %f\n", max); if(max > 1+1e-10) return(false); // P <- c(phi*(1-alpha-gamma),alpha+beta-alpha*phi+gamma-1,rep(alpha+beta-alpha*phi,m-2),(alpha+beta-phi),1) // roots <- polyroot(P) // if(max(abs(roots)) > 1+1e-10) return(false); } //Passed all tests return(true); } forecast/src/etsTargetFunction.h0000644000176200001440000000267014323125536016501 0ustar liggesusers#include #include extern "C" { void etscalc(double *, int *, double *, int *, int *, int *, int *, double *, double *, double *, double *, double *, double *, double *, int *); void cpolyroot(double *opr, double *opi, int *degree, double *zeror, double *zeroi, Rboolean *fail); } class EtsTargetFunction { public: void eval(const double* p_var, int p_var_length); void init(std::vector & p_y, int p_nstate, int p_errortype, int p_trendtype, int p_seasontype, bool p_damped, std::vector & p_lower, std::vector & p_upper, std::string p_opt_crit, int p_nmse, std::string p_bounds, int p_m, bool p_optAlpha, bool p_optBeta, bool p_optGamma, bool p_optPhi, bool p_givenAlpha, bool p_givenBeta, bool p_givenGamma, bool p_givenPhi, double alpha, double beta, double gamma, double phi); double getObjVal() { return(objval); }; private: bool check_params(); bool admissible(); std::vector par; std::vector y; int nstate; int errortype; int trendtype; int seasontype; bool damped; std::vector par_noopt; std::vector lower; std::vector upper; std::string opt_crit; int nmse; std::string bounds; int m; int n; std::vector state; double alpha, beta, gamma, phi; std::vector e; std::vector amse; double lik, objval; bool optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi; }; forecast/src/Makevars0000644000176200001440000000017614567526547014374 0ustar liggesusersPKG_CXXFLAGS = $(SHLIB_OPENMP_CXXFLAGS) -DR_NO_REMAP PKG_LIBS = $(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) forecast/src/makeTBATSMatrices.cpp0000644000176200001440000000562514323125536016572 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP makeTBATSWMatrix(SEXP smallPhi_s, SEXP kVector_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP tau_s) { BEGIN_RCPP double *smallPhi, *arCoefs, *maCoefs; int *kVector, *tau; int adjustPhi = 0; R_len_t numSeasonal = 0, numCols = 1, p = 0, q = 0; if(!Rf_isNull(smallPhi_s)) { smallPhi = REAL(smallPhi_s); adjustPhi = 1; numCols = numCols + 1; } if(!Rf_isNull(kVector_s)) { tau = &INTEGER(tau_s)[0]; kVector = INTEGER(kVector_s); numSeasonal = LENGTH(kVector_s); numCols = numCols + *tau; } if(!Rf_isNull(arCoefs_s)) { arCoefs = REAL(arCoefs_s); p = LENGTH(arCoefs_s); numCols = numCols + p; } if(!Rf_isNull(maCoefs_s)) { maCoefs = REAL(maCoefs_s); q = LENGTH(maCoefs_s); numCols = numCols + q; } NumericMatrix wTranspose_r(1, numCols); arma::mat wTranspose(wTranspose_r.begin(), wTranspose_r.nrow(), wTranspose_r.ncol(), false); if(!Rf_isNull(kVector_s)) { wTranspose.zeros(); int position = adjustPhi; for(R_len_t s = 0; s < numSeasonal; s++) { //wTranspose.submat(0,(position+1), 0, (position + kVector[s])) = arma::ones(1, kVector[s]); for(int j = (position+1); j <= (position + kVector[s]); j++) { wTranspose(0,j) = 1; } position = position + (2 * kVector[s]); } } wTranspose(0,0) = 1; if(adjustPhi == 1) { wTranspose(0,1) = *smallPhi; } if(!Rf_isNull(arCoefs_s)) { for(R_len_t i = 1; i <= p; i++) { wTranspose(0,(adjustPhi + *tau +i)) = arCoefs[(i-1)]; } } if(!Rf_isNull(maCoefs_s)) { for(R_len_t i = 1; i <= q; i++) { wTranspose(0,(adjustPhi + *tau + p + i)) = maCoefs[(i-1)]; } } arma::mat w = arma::trans(wTranspose); smallPhi = 0; arCoefs = 0; maCoefs = 0; kVector = 0; return List::create( Named("w") = w, Named("w.transpose") = wTranspose ); END_RCPP } SEXP makeCIMatrix(SEXP k_s, SEXP m_s) { BEGIN_RCPP double pi = arma::datum::pi; double lambda, *m; int *k; k = &INTEGER(k_s)[0]; m = &REAL(m_s)[0]; NumericMatrix C(*k, *k); for(int j = 1; j<=*k; j++) { lambda = (2 * pi * j) / *m; C((j-1),(j-1)) = std::cos(lambda); } return wrap(C); END_RCPP } SEXP makeSIMatrix(SEXP k_s, SEXP m_s) { BEGIN_RCPP double pi = arma::datum::pi; double lambda, *m; int *k; k = &INTEGER(k_s)[0]; m = &REAL(m_s)[0]; NumericMatrix S(*k, *k); for(int j = 1; j<=*k; j++) { lambda = (2 * pi * j) / *m; S((j-1),(j-1)) = std::sin(lambda); } return wrap(S); END_RCPP } SEXP makeAIMatrix(SEXP C_s, SEXP S_s, SEXP k_s) { int *k; k = &INTEGER(k_s)[0]; NumericMatrix C_r(C_s); NumericMatrix S_r(S_s); arma::mat C(C_r.begin(), C_r.nrow(), C_r.ncol(), false); arma::mat S(S_r.begin(), S_r.nrow(), S_r.ncol(), false); arma::mat A((*k * 2), (*k * 2)); A.submat(0,0, (*k -1), (*k -1)) = C; A.submat(0,*k, (*k -1), ((*k *2) -1)) = S; A.submat(*k,0, ((*k *2) -1), (*k -1)) = (-1 * S); A.submat(*k,*k, ((*k *2) -1), ((*k *2) -1)) = C; return wrap(A); } forecast/src/etsTargetFunctionWrapper.cpp0000644000176200001440000001125414323125536020373 0ustar liggesusers #include #include #include //For R's Nelder-Mead solver #include #include #include "etsTargetFunction.h" // This function initializes all the parameters, constructs an // object of type EtsTargetFunction and adds an external pointer // to this object with name "ets.xptr" // to the environment submitted as p_rho // RcppExport SEXP etsTargetFunctionInit(SEXP p_y, SEXP p_nstate, SEXP p_errortype, SEXP p_trendtype, SEXP p_seasontype, SEXP p_damped, SEXP p_lower, SEXP p_upper, SEXP p_opt_crit, SEXP p_nmse, SEXP p_bounds, SEXP p_m, SEXP p_optAlpha, SEXP p_optBeta, SEXP p_optGamma, SEXP p_optPhi, SEXP p_givenAlpha, SEXP p_givenBeta, SEXP p_givenGamma, SEXP p_givenPhi, SEXP p_alpha, SEXP p_beta, SEXP p_gamma, SEXP p_phi, SEXP p_rho) { BEGIN_RCPP; EtsTargetFunction* sp = new EtsTargetFunction(); std::vector y = Rcpp::as< std::vector >(p_y); int nstate = Rcpp::as(p_nstate); int errortype = Rcpp::as(p_errortype); int trendtype = Rcpp::as(p_trendtype); int seasontype = Rcpp::as(p_seasontype); bool damped = Rcpp::as(p_damped); std::vector lower = Rcpp::as< std::vector >(p_lower); std::vector upper = Rcpp::as< std::vector >(p_upper); std::string opt_crit = Rcpp::as(p_opt_crit); int nmse = Rcpp::as(p_nmse); std::string bounds = Rcpp::as< std::string >(p_bounds); int m = Rcpp::as(p_m); bool optAlpha = Rcpp::as(p_optAlpha); bool optBeta = Rcpp::as(p_optBeta); bool optGamma = Rcpp::as(p_optGamma); bool optPhi = Rcpp::as(p_optPhi); bool givenAlpha = Rcpp::as(p_givenAlpha); bool givenBeta = Rcpp::as(p_givenBeta); bool givenGamma = Rcpp::as(p_givenGamma); bool givenPhi = Rcpp::as(p_givenPhi); double alpha = Rcpp::as(p_alpha); double beta = Rcpp::as(p_beta); double gamma = Rcpp::as(p_gamma); double phi = Rcpp::as(p_phi); sp->init(y, nstate, errortype, trendtype, seasontype, damped, lower, upper, opt_crit, nmse, bounds, m, optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi, alpha, beta, gamma, phi); Rcpp::Environment e(p_rho); e["ets.xptr"] = Rcpp::XPtr( sp, true ); return Rcpp::wrap(e); END_RCPP; } // RcppExport double targetFunctionRmalschains(SEXP p_par, SEXP p_env) // { // Rcpp::NumericVector par(p_par); // Rcpp::Environment e(p_env); // Rcpp::XPtr sp(e.get("ets.xptr")); // sp->eval(par.begin(), par.size()); // //return Rcpp::wrap(sp->getObjVal()); // return sp->getObjVal(); // } // RcppExport SEXP etsGetTargetFunctionRmalschainsPtr() { // typedef double (*funcPtr)(SEXP, SEXP); // return (Rcpp::XPtr(new funcPtr(&targetFunctionRmalschains))); // } /* RcppExport SEXP targetFunctionRdonlp2(SEXP p_var, SEXP p_env) { Rcpp::Environment e(p_env); Rcpp::XPtr sp(e.get("ets.xptr")); Rcpp::NumericVector var(p_var); int mode = var[0]; int fun_id = var[1]; sp->eval(var.begin()+2, var.size()-2); if(mode == 0) { if(fun_id == 0) { return Rcpp::wrap(sp->getObjVal()); } else { return Rcpp::wrap(0); //return Rcpp::wrap(sp->restrictions[fun_id-1]); } } else if(mode==1) { // error("Gradients are not implemented, exiting."); }; return R_NilValue; } RcppExport SEXP etsGetTargetFunctionRdonlp2Ptr() { typedef SEXP (*funcPtr)(SEXP, SEXP); return (Rcpp::XPtr(new funcPtr(&targetFunctionRdonlp2))); } */ double targetFunctionEtsNelderMead(int n, double *par, void *ex) { EtsTargetFunction* sp = (EtsTargetFunction*) ex; sp->eval(par, n); return sp->getObjVal(); } RcppExport SEXP etsNelderMead(SEXP p_var, SEXP p_env, SEXP p_abstol, SEXP p_intol, SEXP p_alpha, SEXP p_beta, SEXP p_gamma, SEXP p_trace, SEXP p_maxit) { double abstol = Rcpp::as(p_abstol); double intol = Rcpp::as(p_intol); double alpha = Rcpp::as(p_alpha); double beta= Rcpp::as(p_beta); double gamma= Rcpp::as(p_gamma); int trace = Rcpp::as(p_trace); int maxit = Rcpp::as(p_maxit); int fncount = 0, fail=0; double Fmin = 0.0; Rcpp::NumericVector dpar(p_var); Rcpp::NumericVector opar(dpar.size()); Rcpp::Environment e(p_env); Rcpp::XPtr sp(e.get("ets.xptr")); double (*funcPtr)(int n, double *par, void *ex) = targetFunctionEtsNelderMead; nmmin(dpar.size(), dpar.begin(), opar.begin(), &Fmin, funcPtr, &fail, abstol, intol, sp, alpha, beta, gamma, trace, &fncount, maxit); return Rcpp::List::create(Rcpp::Named("value") = Fmin, Rcpp::Named("par") = opar, Rcpp::Named("fail") = fail, Rcpp::Named("fncount") = fncount); } forecast/src/etspolyroot.c0000644000176200001440000003534614353422767015446 0ustar liggesusers/* Formerly src/appl/cpoly.c: * * Copyright (C) 1997-1998 Ross Ihaka * Copyright (C) 1999-2001 R Core Team * * cpoly finds the zeros of a complex polynomial. * * On Entry * * opr, opi - double precision vectors of real and * imaginary parts of the coefficients in * order of decreasing powers. * * degree - int degree of polynomial. * * * On Return * * zeror, zeroi - output double precision vectors of * real and imaginary parts of the zeros. * * fail - output int parameter, true only if * leading coefficient is zero or if cpoly * has found fewer than degree zeros. * * The program has been written to reduce the chance of overflow * occurring. If it does occur, there is still a possibility that * the zerofinder will work provided the overflowed quantity is * replaced by a large number. * * This is a C translation of the following. * * TOMS Algorithm 419 * Jenkins and Traub. * Comm. ACM 15 (1972) 97-99. * * Ross Ihaka * February 1997 */ #include /* for declaration of hypot */ #include /* for declaration of R_alloc */ #include /* for FLT_RADIX */ #include /* for R_pow_di */ static void calct(Rboolean *); static Rboolean fxshft(int, double *, double *); static Rboolean vrshft(int, double *, double *); static void nexth(Rboolean); static void noshft(int); static void polyev(int, double, double, double *, double *, double *, double *, double *, double *); static double errev(int, double *, double *, double, double, double, double); static double cpoly_cauchy(int, double *, double *); static double cpoly_scale(int, double *, double, double, double, double); static void cdivid(double, double, double, double, double *, double *); /* Global Variables (too many!) */ static int nn; static double *pr, *pi, *hr, *hi, *qpr, *qpi, *qhr, *qhi, *shr, *shi; static double sr, si; static double tr, ti; static double pvr, pvi; static const double eta = DBL_EPSILON; static const double are = /* eta = */DBL_EPSILON; static const double mre = 2. * M_SQRT2 * /* eta, i.e. */DBL_EPSILON; static const double infin = DBL_MAX; void cpolyroot(double *opr, double *opi, int *degree, double *zeror, double *zeroi, Rboolean *fail) { static const double smalno = DBL_MIN; static const double base = (double)FLT_RADIX; static int d_n, i, i1, i2; static double zi, zr, xx, yy; static double bnd, xxx; Rboolean conv; int d1; double *tmp; static const double cosr =/* cos 94 */ -0.06975647374412529990; static const double sinr =/* sin 94 */ 0.99756405025982424767; xx = M_SQRT1_2;/* 1/sqrt(2) = 0.707.... */ yy = -xx; *fail = FALSE; nn = *degree; d1 = nn - 1; /* algorithm fails if the leading coefficient is zero. */ if (opr[0] == 0. && opi[0] == 0.) { *fail = TRUE; return; } /* remove the zeros at the origin if any. */ while (opr[nn] == 0. && opi[nn] == 0.) { d_n = d1-nn+1; zeror[d_n] = 0.; zeroi[d_n] = 0.; nn--; } nn++; /*-- Now, global var. nn := #{coefficients} = (relevant degree)+1 */ if (nn == 1) return; /* Use a single allocation as these as small */ tmp = (double *) R_alloc((size_t) (10*nn), sizeof(double)); pr = tmp; pi = tmp + nn; hr = tmp + 2*nn; hi = tmp + 3*nn; qpr = tmp + 4*nn; qpi = tmp + 5*nn; qhr = tmp + 6*nn; qhi = tmp + 7*nn; shr = tmp + 8*nn; shi = tmp + 9*nn; /* make a copy of the coefficients and shr[] = | p[] | */ for (i = 0; i < nn; i++) { pr[i] = opr[i]; pi[i] = opi[i]; shr[i] = hypot(pr[i], pi[i]); } /* scale the polynomial with factor 'bnd'. */ bnd = cpoly_scale(nn, shr, eta, infin, smalno, base); if (bnd != 1.) { for (i=0; i < nn; i++) { pr[i] *= bnd; pi[i] *= bnd; } } /* start the algorithm for one zero */ while (nn > 2) { /* calculate bnd, a lower bound on the modulus of the zeros. */ for (i=0 ; i < nn ; i++) shr[i] = hypot(pr[i], pi[i]); bnd = cpoly_cauchy(nn, shr, shi); /* outer loop to control 2 major passes */ /* with different sequences of shifts */ for (i1 = 1; i1 <= 2; i1++) { /* first stage calculation, no shift */ noshft(5); /* inner loop to select a shift */ for (i2 = 1; i2 <= 9; i2++) { /* shift is chosen with modulus bnd */ /* and amplitude rotated by 94 degrees */ /* from the previous shift */ xxx= cosr * xx - sinr * yy; yy = sinr * xx + cosr * yy; xx = xxx; sr = bnd * xx; si = bnd * yy; /* second stage calculation, fixed shift */ conv = fxshft(i2 * 10, &zr, &zi); if (conv) goto L10; } } /* the zerofinder has failed on two major passes */ /* return empty handed */ *fail = TRUE; return; /* the second stage jumps directly to the third stage iteration. * if successful, the zero is stored and the polynomial deflated. */ L10: d_n = d1+2 - nn; zeror[d_n] = zr; zeroi[d_n] = zi; --nn; for (i=0; i < nn ; i++) { pr[i] = qpr[i]; pi[i] = qpi[i]; } }/*while*/ /* calculate the final zero and return */ cdivid(-pr[1], -pi[1], pr[0], pi[0], &zeror[d1], &zeroi[d1]); return; } /* Computes the derivative polynomial as the initial * polynomial and computes l1 no-shift h polynomials. */ static void noshft(int l1) { int i, j, jj, n = nn - 1, nm1 = n - 1; double t1, t2, xni; for (i=0; i < n; i++) { xni = (double)(nn - i - 1); hr[i] = xni * pr[i] / n; hi[i] = xni * pi[i] / n; } for (jj = 1; jj <= l1; jj++) { if (hypot(hr[n-1], hi[n-1]) <= eta * 10.0 * hypot(pr[n-1], pi[n-1])) { /* If the constant term is essentially zero, */ /* shift h coefficients. */ for (i = 1; i <= nm1; i++) { j = nn - i; hr[j-1] = hr[j-2]; hi[j-1] = hi[j-2]; } hr[0] = 0.; hi[0] = 0.; } else { cdivid(-pr[nn-1], -pi[nn-1], hr[n-1], hi[n-1], &tr, &ti); for (i = 1; i <= nm1; i++) { j = nn - i; t1 = hr[j-2]; t2 = hi[j-2]; hr[j-1] = tr * t1 - ti * t2 + pr[j-1]; hi[j-1] = tr * t2 + ti * t1 + pi[j-1]; } hr[0] = pr[0]; hi[0] = pi[0]; } } } /* Computes l2 fixed-shift h polynomials and tests for convergence. * initiates a variable-shift iteration and returns with the * approximate zero if successful. */ static Rboolean fxshft(int l2, double *zr, double *zi) { /* l2 - limit of fixed shift steps * zr,zi - approximate zero if convergence (result TRUE) * * Return value indicates convergence of stage 3 iteration * * Uses global (sr,si), nn, pr[], pi[], .. (all args of polyev() !) */ Rboolean pasd, boool, test; static double svsi, svsr; static int i, j, n; static double oti, otr; n = nn - 1; /* evaluate p at s. */ polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); test = TRUE; pasd = FALSE; /* calculate first t = -p(s)/h(s). */ calct(&boool); /* main loop for one second stage step. */ for (j=1; j<=l2; j++) { otr = tr; oti = ti; /* compute next h polynomial and new t. */ nexth(boool); calct(&boool); *zr = sr + tr; *zi = si + ti; /* test for convergence unless stage 3 has */ /* failed once or this is the last h polynomial. */ if (!boool && test && j != l2) { if (hypot(tr - otr, ti - oti) >= hypot(*zr, *zi) * 0.5) { pasd = FALSE; } else if (! pasd) { pasd = TRUE; } else { /* the weak convergence test has been */ /* passed twice, start the third stage */ /* iteration, after saving the current */ /* h polynomial and shift. */ for (i = 0; i < n; i++) { shr[i] = hr[i]; shi[i] = hi[i]; } svsr = sr; svsi = si; if (vrshft(10, zr, zi)) { return TRUE; } /* the iteration failed to converge. */ /* turn off testing and restore */ /* h, s, pv and t. */ test = FALSE; for (i=1 ; i<=n ; i++) { hr[i-1] = shr[i-1]; hi[i-1] = shi[i-1]; } sr = svsr; si = svsi; polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); calct(&boool); } } } /* attempt an iteration with final h polynomial */ /* from second stage. */ return(vrshft(10, zr, zi)); } /* carries out the third stage iteration. */ static Rboolean vrshft(int l3, double *zr, double *zi) { /* l3 - limit of steps in stage 3. * zr,zi - on entry contains the initial iterate; * if the iteration converges it contains * the final iterate on exit. * Returns TRUE if iteration converges * * Assign and uses GLOBAL sr, si */ Rboolean boool, b; static int i, j; static double r1, r2, mp, ms, tp, relstp; static double omp; b = FALSE; sr = *zr; si = *zi; /* main loop for stage three */ for (i = 1; i <= l3; i++) { /* evaluate p at s and test for convergence. */ polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); mp = hypot(pvr, pvi); ms = hypot(sr, si); if (mp <= 20. * errev(nn, qpr, qpi, ms, mp, /*are=*/eta, mre)) { goto L_conv; } /* polynomial value is smaller in value than */ /* a bound on the error in evaluating p, */ /* terminate the iteration. */ if (i != 1) { if (!b && mp >= omp && relstp < .05) { /* iteration has stalled. probably a */ /* cluster of zeros. do 5 fixed shift */ /* steps into the cluster to force */ /* one zero to dominate. */ tp = relstp; b = TRUE; if (relstp < eta) tp = eta; r1 = sqrt(tp); r2 = sr * (r1 + 1.) - si * r1; si = sr * r1 + si * (r1 + 1.); sr = r2; polyev(nn, sr, si, pr, pi, qpr, qpi, &pvr, &pvi); for (j = 1; j <= 5; ++j) { calct(&boool); nexth(boool); } omp = infin; goto L10; } else { /* exit if polynomial value */ /* increases significantly. */ if (mp * .1 > omp) return FALSE; } } omp = mp; /* calculate next iterate. */ L10: calct(&boool); nexth(boool); calct(&boool); if (!boool) { relstp = hypot(tr, ti) / hypot(sr, si); sr += tr; si += ti; } } return FALSE; L_conv: *zr = sr; *zi = si; return TRUE; } static void calct(Rboolean *boool) { /* computes t = -p(s)/h(s). * boool - logical, set true if h(s) is essentially zero. */ int n = nn - 1; double hvi, hvr; /* evaluate h(s). */ polyev(n, sr, si, hr, hi, qhr, qhi, &hvr, &hvi); *boool = hypot(hvr, hvi) <= are * 10. * hypot(hr[n-1], hi[n-1]); if (!*boool) { cdivid(-pvr, -pvi, hvr, hvi, &tr, &ti); } else { tr = 0.; ti = 0.; } } static void nexth(Rboolean boool) { /* calculates the next shifted h polynomial. * boool : if TRUE h(s) is essentially zero */ int j, n = nn - 1; double t1, t2; if (!boool) { for (j=1; j < n; j++) { t1 = qhr[j - 1]; t2 = qhi[j - 1]; hr[j] = tr * t1 - ti * t2 + qpr[j]; hi[j] = tr * t2 + ti * t1 + qpi[j]; } hr[0] = qpr[0]; hi[0] = qpi[0]; } else { /* if h(s) is zero replace h with qh. */ for (j=1; j < n; j++) { hr[j] = qhr[j-1]; hi[j] = qhi[j-1]; } hr[0] = 0.; hi[0] = 0.; } } /*--------------------- Independent Complex Polynomial Utilities ----------*/ static void polyev(int n, double s_r, double s_i, double *p_r, double *p_i, double *q_r, double *q_i, double *v_r, double *v_i) { /* evaluates a polynomial p at s by the horner recurrence * placing the partial sums in q and the computed value in v_. */ int i; double t; q_r[0] = p_r[0]; q_i[0] = p_i[0]; *v_r = q_r[0]; *v_i = q_i[0]; for (i = 1; i < n; i++) { t = *v_r * s_r - *v_i * s_i + p_r[i]; q_i[i] = *v_i = *v_r * s_i + *v_i * s_r + p_i[i]; q_r[i] = *v_r = t; } } static double errev(int n, double *qr, double *qi, double ms, double mp, double a_re, double m_re) { /* bounds the error in evaluating the polynomial by the horner * recurrence. * * qr,qi - the partial sum vectors * ms - modulus of the point * mp - modulus of polynomial value * a_re,m_re - error bounds on complex addition and multiplication */ double e; int i; e = hypot(qr[0], qi[0]) * m_re / (a_re + m_re); for (i=0; i < n; i++) e = e*ms + hypot(qr[i], qi[i]); return e * (a_re + m_re) - mp * m_re; } static double cpoly_cauchy(int n, double *pot, double *q) { /* Computes a lower bound on the moduli of the zeros of a polynomial * pot[1:nn] is the modulus of the coefficients. */ double f, x, delf, dx, xm; int i, n1 = n - 1; pot[n1] = -pot[n1]; /* compute upper estimate of bound. */ x = exp((log(-pot[n1]) - log(pot[0])) / (double) n1); /* if newton step at the origin is better, use it. */ if (pot[n1-1] != 0.) { xm = -pot[n1] / pot[n1-1]; if (xm < x) x = xm; } /* chop the interval (0,x) unitl f le 0. */ for(;;) { xm = x * 0.1; f = pot[0]; for (i = 1; i < n; i++) f = f * xm + pot[i]; if (f <= 0.0) { break; } x = xm; } dx = x; /* do Newton iteration until x converges to two decimal places. */ while (fabs(dx / x) > 0.005) { q[0] = pot[0]; for(i = 1; i < n; i++) q[i] = q[i-1] * x + pot[i]; f = q[n1]; delf = q[0]; for(i = 1; i < n1; i++) delf = delf * x + q[i]; dx = f / delf; x -= dx; } return x; } static double cpoly_scale(int n, double *pot, double eps, double BIG, double small, double base) { /* Returns a scale factor to multiply the coefficients of the polynomial. * The scaling is done to avoid overflow and to avoid * undetected underflow interfering with the convergence criterion. * The factor is a power of the base. * pot[1:n] : modulus of coefficients of p * eps,BIG, * small,base - constants describing the floating point arithmetic. */ int i, ell; double x, high, sc, lo, min_, max_; /* find largest and smallest moduli of coefficients. */ high = sqrt(BIG); lo = small / eps; max_ = 0.; min_ = BIG; for (i = 0; i < n; i++) { x = pot[i]; if (x > max_) max_ = x; if (x != 0. && x < min_) min_ = x; } /* scale only if there are very large or very small components. */ if (min_ < lo || max_ > high) { x = lo / min_; if (x <= 1.) sc = 1. / (sqrt(max_) * sqrt(min_)); else { sc = x; if (BIG / sc > max_) sc = 1.0; } ell = (int) (log(sc) / log(base) + 0.5); return R_pow_di(base, ell); } else return 1.0; } static void cdivid(double ar, double ai, double br, double bi, double *cr, double *ci) { /* complex division c = a/b, i.e., (cr +i*ci) = (ar +i*ai) / (br +i*bi), avoiding overflow. */ double d, r; if (br == 0. && bi == 0.) { /* division by zero, c = infinity. */ *cr = *ci = R_PosInf; } else if (fabs(br) >= fabs(bi)) { r = bi / br; d = br + r * bi; *cr = (ar + ai * r) / d; *ci = (ai - ar * r) / d; } else { r = br / bi; d = bi + r * br; *cr = (ar * r + ai) / d; *ci = (ai * r - ar) / d; } } /* static double cpoly_cmod(double *r, double *i) * --> replaced by hypot() everywhere */ forecast/src/updateMatrices.cpp0000644000176200001440000001126514323125536016336 0ustar liggesusers/* * updateMatrices.cpp * * Created on: 03/11/2011 * Author: srazbash */ #include "calcBATS.h" using namespace Rcpp ; SEXP updateFMatrix(SEXP F_s, SEXP smallPhi_s, SEXP alpha_s, SEXP beta_s, SEXP gammaBold_s, SEXP ar_s, SEXP ma_s, SEXP tau_s) { BEGIN_RCPP NumericMatrix F_r(F_s); arma::mat F(F_r.begin(), F_r.nrow(), F_r.ncol(), false); double *beta, *alpha = &REAL(alpha_s)[0]; int *tau, p, q, betaAdjust; int zero = 0; if(!Rf_isNull(gammaBold_s)) { tau = &INTEGER(tau_s)[0]; } else { tau = &zero; } if(!Rf_isNull(beta_s)) { beta = &REAL(beta_s)[0]; double *smallPhi = &REAL(smallPhi_s)[0]; F(0,1) = *smallPhi; F(1,1) = *smallPhi; betaAdjust = 1; } else { betaAdjust = 0; } if(!Rf_isNull(ar_s)) { //Rprintf("before arma::mat ar\n"); NumericMatrix ar_r(ar_s); arma::mat ar(ar_r.begin(), ar_r.nrow(), ar_r.ncol(), false); //Rprintf("after arma::mat ar\n"); p = ar.n_cols; //Rprintf("line-a-before\n"); F.submat(0,(betaAdjust+ *tau+1),0,(betaAdjust+ *tau+p)) = *alpha * ar; //Rprintf("line-a-after\n"); if(betaAdjust == 1) { //Rprintf("line-b-before\n"); F.submat(1,(betaAdjust+ *tau+1),1,(betaAdjust+ *tau+p)) = *beta * ar; //Rprintf("line-b-after\n"); } if(*tau > 0) { //Rprintf("la\n"); NumericMatrix gammaBold_r(gammaBold_s); //Rprintf("la-2\n"); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); //Rprintf("la-3\n"); //arma::mat gammaBold = as(gammaBold_s); arma::mat B = trans(gammaBold) * ar; //Rprintf("line-c-before\n"); F.submat((1+betaAdjust),(betaAdjust+ *tau+1), (betaAdjust+ *tau), (betaAdjust+ *tau+p)) = B; //Rprintf("line-c-after\n"); } //Rprintf("line-d-before\n"); F.submat((betaAdjust+ *tau+1),(betaAdjust+ *tau+1),(betaAdjust+ *tau+1),(betaAdjust+ *tau+p)) = ar; //Rprintf("line-d-after\n"); } else { p = 0; } if(!Rf_isNull(ma_s)) { NumericMatrix ma_r(ma_s); arma::mat ma(ma_r.begin(), ma_r.nrow(), ma_r.ncol(), false); q = ma.n_cols; //Rprintf("one-before\n"); F.submat(0,(betaAdjust+ *tau+p+1),0,(betaAdjust+ *tau+p+q)) = *alpha * ma; //Rprintf("one-after\n"); if(betaAdjust == 1) { //Rprintf("two-before\n"); F.submat(1,(betaAdjust+ *tau+p+1),1,(betaAdjust+ *tau+p+q)) = *beta * ma; ///Rprintf("two-after\n"); } if(*tau > 0) { //arma::mat gammaBold = as(gammaBold_s); NumericMatrix gammaBold_r(gammaBold_s); arma::mat gammaBold(gammaBold_r.begin(), gammaBold_r.nrow(), gammaBold_r.ncol(), false); arma::mat C = trans(gammaBold) * ma; //Rprintf("three-before\n"); F.submat((1+betaAdjust),(betaAdjust+ *tau+p+1), (betaAdjust+ *tau), (betaAdjust+ *tau+p+q)) = C; //Rprintf("three-after\n"); } if(!Rf_isNull(ar_s)) { //Rprintf("four-before\n"); F.submat((betaAdjust+ *tau+1), (betaAdjust+ *tau+p+1), (betaAdjust+ *tau+1), (betaAdjust+ *tau+p+q)) = ma; //Rprintf("four-after\n"); } } else { q = 0; } return R_NilValue; END_RCPP } SEXP updateWtransposeMatrix(SEXP wTranspose_s, SEXP smallPhi_s, SEXP tau_s, SEXP arCoefs_s, SEXP maCoefs_s, SEXP p_s, SEXP q_s) { BEGIN_RCPP NumericMatrix wTranspose(wTranspose_s); double *arCoefs, *maCoefs; int *p, *q, *tau, adjBeta = 0; p = &INTEGER(p_s)[0]; q = &INTEGER(q_s)[0]; tau = &INTEGER(tau_s)[0]; if(!Rf_isNull(smallPhi_s)) { adjBeta = 1; wTranspose(0,1) = REAL(smallPhi_s)[0]; } if(*p > 0) { arCoefs = REAL(arCoefs_s); for(int i = 1; i <= *p; i++) { wTranspose(0,(adjBeta + *tau + i)) = arCoefs[(i - 1)]; } if(*q > 0) { maCoefs = REAL(maCoefs_s); for(int i = 1; i <= *q; i++) { wTranspose(0,(adjBeta + *tau + *p + i)) = maCoefs[(i - 1)]; } } } else if(*q > 0) { maCoefs = REAL(maCoefs_s); for(int i = 1; i <= *q; i++) { wTranspose(0,(adjBeta + *tau + i)) = maCoefs[(i - 1)]; } } return R_NilValue; END_RCPP } SEXP updateGMatrix(SEXP g_s, SEXP gammaBold_s, SEXP alpha_s, SEXP beta_s, SEXP gammaVector_s, SEXP seasonalPeriods_s) { BEGIN_RCPP int adjBeta = 0, *seasonalPeriods; double *gammaVector; NumericMatrix g(g_s); g(0,0) = REAL(alpha_s)[0]; if(!Rf_isNull(beta_s)) { g(1,0) = REAL(beta_s)[0]; adjBeta = 1; } if((!Rf_isNull(gammaVector_s))&&(!Rf_isNull(seasonalPeriods_s))) { NumericMatrix gammaBold(gammaBold_s); seasonalPeriods = INTEGER(seasonalPeriods_s); gammaVector = REAL(gammaVector_s); int position = adjBeta + 1; int bPos = 0; gammaBold(0,bPos) = gammaVector[0]; g(position, 0) = gammaVector[0]; if(LENGTH(gammaVector_s) > 1) { for(R_len_t s = 0; s < (LENGTH(seasonalPeriods_s)-1); s++) { position = position + seasonalPeriods[s]; bPos = bPos + seasonalPeriods[s]; g(position, 0) = gammaVector[(s+1)]; } } } return R_NilValue; END_RCPP } forecast/src/calcTBATS.cpp0000644000176200001440000000230214332530605015051 0ustar liggesusers#include "calcBATS.h" using namespace Rcpp ; SEXP calcTBATSFaster(SEXP ys, SEXP yHats, SEXP wTransposes, SEXP Fs, SEXP xs, SEXP gs, SEXP es, SEXP xNought_s) { BEGIN_RCPP NumericMatrix yr(ys); NumericMatrix yHatr(yHats); NumericMatrix wTransposer(wTransposes); NumericMatrix Fr(Fs); NumericMatrix xr(xs); NumericMatrix gr(gs); NumericMatrix er(es); NumericMatrix xNought_r(xNought_s); arma::mat y(yr.begin(), yr.nrow(), yr.ncol(), false); arma::mat yHat(yHatr.begin(), yHatr.nrow(), yHatr.ncol(), false); arma::mat wTranspose(wTransposer.begin(), wTransposer.nrow(), wTransposer.ncol(), false); arma::mat F(Fr.begin(), Fr.nrow(), Fr.ncol(), false); arma::mat x(xr.begin(), xr.nrow(), xr.ncol(), false); arma::mat g(gr.begin(), gr.nrow(), gr.ncol(), false); arma::mat e(er.begin(), er.nrow(), er.ncol(), false); arma::mat xNought(xNought_r.begin(), xNought_r.nrow(), xNought_r.ncol(), false); yHat.col(0) = wTranspose * xNought; e(0,0) = y(0, 0) - yHat(0, 0); x.col(0) = F * xNought + g * e(0,0); for(int t = 1; t < yr.ncol(); t++) { yHat.col(t) = wTranspose * x.col((t-1)); e(0,t) = y(0, t) - yHat(0, t); x.col(t) = F * x.col((t-1)) + g * e(0,t); } return R_NilValue; END_RCPP } forecast/src/registerDynamicSymbol.c0000644000176200001440000000034514150370574017342 0ustar liggesusers// RegisteringDynamic Symbols #include #include #include void R_init_markovchain(DllInfo* info) { R_registerRoutines(info, NULL, NULL, NULL, NULL); R_useDynamicSymbols(info, TRUE); } forecast/NAMESPACE0000644000176200001440000002125014633664521013311 0ustar liggesusers# Generated by roxygen2: do not edit by hand S3method("[",msts) S3method(accuracy,default) S3method(accuracy,mforecast) S3method(as.Date,timeDate) S3method(as.character,Arima) S3method(as.character,bats) S3method(as.character,ets) S3method(as.character,tbats) S3method(as.data.frame,forecast) S3method(as.data.frame,mforecast) S3method(as.ts,forecast) S3method(autolayer,forecast) S3method(autolayer,mforecast) S3method(autolayer,msts) S3method(autolayer,mts) S3method(autolayer,ts) S3method(autoplot,Arima) S3method(autoplot,StructTS) S3method(autoplot,acf) S3method(autoplot,ar) S3method(autoplot,bats) S3method(autoplot,decomposed.ts) S3method(autoplot,ets) S3method(autoplot,forecast) S3method(autoplot,mforecast) S3method(autoplot,mpacf) S3method(autoplot,mstl) S3method(autoplot,msts) S3method(autoplot,mts) S3method(autoplot,seas) S3method(autoplot,splineforecast) S3method(autoplot,stl) S3method(autoplot,tbats) S3method(autoplot,ts) S3method(coef,ets) S3method(fitted,ARFIMA) S3method(fitted,Arima) S3method(fitted,ar) S3method(fitted,bats) S3method(fitted,ets) S3method(fitted,forecast_ARIMA) S3method(fitted,lagwalk) S3method(fitted,modelAR) S3method(fitted,nnetar) S3method(fitted,tbats) S3method(fitted,tslm) S3method(forecast,Arima) S3method(forecast,HoltWinters) S3method(forecast,StructTS) S3method(forecast,ar) S3method(forecast,baggedModel) S3method(forecast,bats) S3method(forecast,default) S3method(forecast,ets) S3method(forecast,forecast) S3method(forecast,forecast_ARIMA) S3method(forecast,fracdiff) S3method(forecast,lagwalk) S3method(forecast,lm) S3method(forecast,mlm) S3method(forecast,modelAR) S3method(forecast,mstl) S3method(forecast,mts) S3method(forecast,nnetar) S3method(forecast,stl) S3method(forecast,stlm) S3method(forecast,tbats) S3method(forecast,ts) S3method(forecast,varest) S3method(fortify,ts) S3method(getResponse,Arima) S3method(getResponse,ar) S3method(getResponse,baggedModel) S3method(getResponse,bats) S3method(getResponse,default) S3method(getResponse,fracdiff) S3method(getResponse,lm) S3method(getResponse,mforecast) S3method(getResponse,tbats) S3method(hfitted,Arima) S3method(hfitted,default) S3method(hfitted,ets) S3method(logLik,ets) S3method(modeldf,Arima) S3method(modeldf,bats) S3method(modeldf,default) S3method(modeldf,ets) S3method(modeldf,lagwalk) S3method(modeldf,lm) S3method(modeldf,meanf) S3method(nobs,ets) S3method(plot,Arima) S3method(plot,ar) S3method(plot,armaroots) S3method(plot,bats) S3method(plot,ets) S3method(plot,forecast) S3method(plot,mforecast) S3method(plot,mpacf) S3method(plot,splineforecast) S3method(plot,tbats) S3method(predict,default) S3method(print,CVar) S3method(print,OCSBtest) S3method(print,baggedModel) S3method(print,bats) S3method(print,ets) S3method(print,forecast) S3method(print,forecast_ARIMA) S3method(print,lagwalk) S3method(print,mforecast) S3method(print,modelAR) S3method(print,msts) S3method(print,nnetar) S3method(print,nnetarmodels) S3method(print,summary.Arima) S3method(print,summary.ets) S3method(print,summary.forecast) S3method(print,summary.mforecast) S3method(print,tbats) S3method(residuals,ARFIMA) S3method(residuals,Arima) S3method(residuals,ar) S3method(residuals,bats) S3method(residuals,ets) S3method(residuals,forecast) S3method(residuals,forecast_ARIMA) S3method(residuals,nnetar) S3method(residuals,stlm) S3method(residuals,tbats) S3method(residuals,tslm) S3method(scale,ts) S3method(seasadj,decomposed.ts) S3method(seasadj,mstl) S3method(seasadj,seas) S3method(seasadj,stl) S3method(seasadj,tbats) S3method(simulate,Arima) S3method(simulate,ar) S3method(simulate,ets) S3method(simulate,fracdiff) S3method(simulate,lagwalk) S3method(simulate,modelAR) S3method(simulate,nnetar) S3method(simulate,tbats) S3method(subset,forecast) S3method(subset,msts) S3method(subset,ts) S3method(summary,Arima) S3method(summary,ets) S3method(summary,forecast) S3method(summary,mforecast) S3method(summary,tslm) S3method(window,msts) export("%>%") export(Acf) export(Arima) export(BoxCox) export(BoxCox.lambda) export(CV) export(CVar) export(Ccf) export(GeomForecast) export(InvBoxCox) export(Pacf) export(StatForecast) export(accuracy) export(arfima) export(arima.errors) export(arimaorder) export(auto.arima) export(autolayer) export(autoplot) export(baggedETS) export(baggedModel) export(bats) export(bizdays) export(bld.mbb.bootstrap) export(checkresiduals) export(croston) export(dm.test) export(dshw) export(easter) export(ets) export(findfrequency) export(forecast) export(forecast.ets) export(fourier) export(fourierf) export(geom_forecast) export(getResponse) export(ggAcf) export(ggCcf) export(ggPacf) export(gghistogram) export(gglagchull) export(gglagplot) export(ggmonthplot) export(ggseasonplot) export(ggsubseriesplot) export(ggtaperedacf) export(ggtaperedpacf) export(ggtsdisplay) export(holt) export(hw) export(is.Arima) export(is.acf) export(is.baggedModel) export(is.bats) export(is.constant) export(is.ets) export(is.forecast) export(is.mforecast) export(is.modelAR) export(is.nnetar) export(is.nnetarmodels) export(is.splineforecast) export(is.stlm) export(ma) export(meanf) export(modelAR) export(modeldf) export(monthdays) export(mstl) export(msts) export(na.interp) export(naive) export(ndiffs) export(nnetar) export(nsdiffs) export(ocsb.test) export(remainder) export(rwf) export(seasadj) export(seasonal) export(seasonaldummy) export(seasonaldummyf) export(seasonplot) export(ses) export(sindexf) export(snaive) export(splinef) export(stlf) export(stlm) export(taperedacf) export(taperedpacf) export(tbats) export(tbats.components) export(thetaf) export(trendcycle) export(tsCV) export(tsclean) export(tsdisplay) export(tslm) export(tsoutliers) if (getRversion() < "4.5.0") S3method(head, ts) if (getRversion() < "4.5.0") S3method(tail, ts) import(Rcpp) import(parallel) importFrom(colorspace,sequential_hcl) importFrom(fracdiff,diffseries) importFrom(fracdiff,fracdiff) importFrom(fracdiff,fracdiff.sim) importFrom(generics,accuracy) importFrom(generics,forecast) importFrom(ggplot2,autoplot) importFrom(ggplot2,fortify) importFrom(grDevices,gray) importFrom(grDevices,heat.colors) importFrom(grDevices,nclass.FD) importFrom(grDevices,palette) importFrom(graphics,abline) importFrom(graphics,axis) importFrom(graphics,grid) importFrom(graphics,hist) importFrom(graphics,layout) importFrom(graphics,lines) importFrom(graphics,mtext) importFrom(graphics,par) importFrom(graphics,plot) importFrom(graphics,points) importFrom(graphics,polygon) importFrom(graphics,text) importFrom(graphics,title) importFrom(lmtest,bgtest) importFrom(magrittr,"%>%") importFrom(nnet,nnet) importFrom(stats,"tsp<-") importFrom(stats,AIC) importFrom(stats,BIC) importFrom(stats,Box.test) importFrom(stats,acf) importFrom(stats,aggregate) importFrom(stats,approx) importFrom(stats,ar) importFrom(stats,arima) importFrom(stats,arima.sim) importFrom(stats,as.formula) importFrom(stats,as.ts) importFrom(stats,complete.cases) importFrom(stats,cycle) importFrom(stats,decompose) importFrom(stats,diffinv) importFrom(stats,end) importFrom(stats,extractAIC) importFrom(stats,filter) importFrom(stats,fitted) importFrom(stats,formula) importFrom(stats,frequency) importFrom(stats,hatvalues) importFrom(stats,is.mts) importFrom(stats,is.ts) importFrom(stats,ksmooth) importFrom(stats,lm) importFrom(stats,loess) importFrom(stats,logLik) importFrom(stats,lsfit) importFrom(stats,median) importFrom(stats,model.frame) importFrom(stats,na.contiguous) importFrom(stats,na.exclude) importFrom(stats,na.omit) importFrom(stats,na.pass) importFrom(stats,napredict) importFrom(stats,nobs) importFrom(stats,optim) importFrom(stats,optimize) importFrom(stats,pf) importFrom(stats,plot.ts) importFrom(stats,poly) importFrom(stats,predict) importFrom(stats,pt) importFrom(stats,qnorm) importFrom(stats,qt) importFrom(stats,quantile) importFrom(stats,reformulate) importFrom(stats,residuals) importFrom(stats,rnorm) importFrom(stats,runif) importFrom(stats,sd) importFrom(stats,simulate) importFrom(stats,smooth.spline) importFrom(stats,spec.ar) importFrom(stats,start) importFrom(stats,stl) importFrom(stats,supsmu) importFrom(stats,terms) importFrom(stats,time) importFrom(stats,ts) importFrom(stats,tsdiag) importFrom(stats,tsp) importFrom(stats,var) importFrom(stats,window) importFrom(timeDate,Easter) importFrom(timeDate,as.Date.timeDate) importFrom(timeDate,as.timeDate) importFrom(timeDate,difftimeDate) importFrom(timeDate,isBizday) importFrom(tseries,adf.test) importFrom(tseries,kpss.test) importFrom(tseries,pp.test) importFrom(urca,ur.df) importFrom(urca,ur.kpss) importFrom(urca,ur.pp) importFrom(utils,head) importFrom(utils,head.matrix) importFrom(utils,methods) importFrom(utils,packageVersion) importFrom(utils,tail) importFrom(utils,tail.matrix) importFrom(zoo,as.Date) importFrom(zoo,as.yearqtr) importFrom(zoo,rollmean) useDynLib(forecast, .registration = TRUE) forecast/NEWS.md0000644000176200001440000012601114634676474013204 0ustar liggesusers# forecast 8.23.0 * Prevented RNG state changing when the package is attached (#954, #955). * head.ts and tail.ts only defined for R < 4.5.0 due to new base R functions. # forecast 8.22.0 * hfitted now much faster for ARIMA models (danigiro, #949) * hfitted now much faster for ETS models, and produces fitted values from initial states (#950) # forecast 8.21.1 * nnetar now allows p or P to be 0 * Bug fixes and improved docs # forecast 8.21 * Fixed df calculation for Ljung-Box tests in checkresiduals * Fixed some broken tests # forecast 8.20 * Improvements to unit tests, and migrate to testthat 3e * Prevent failure in C23 mode # forecast 8.19 * Bug fixes # forecast 8.18 * Updated RW forecasts to use an unbiased estimate of sigma2 * Bug fixes # forecast 8.17.0 * Updated dm.test() to add alternative variance estimators. (#898) * Added `simulate.tbats()` for simulating from TBATS models. * Added dependency on generics for accuracy() and forecast() (#902) * Bux fixes # forecast 8.16 * Fixed `tslm()` incorrectly applying Box-Cox transformations when an `mts` is provided to the `data` argument (#886). * Set D=0 when auto.arima applied to series with 2m observations or fewer. * Improved performance of parallel search of ARIMA models (jonlachmann, #891). * Fixed scoping of functions used in `ggAcf()` (#896). * Fixed checks on xreg in `simulate.Arima()` (#818) * Improved docs and bug fixes. # forecast 8.15 * Changed `summary()` methods to defer console output until `print()` * Changed default `s.window` values for `mstl()`, `stlf()` and `stlm()`. The new defaults are based on extensive empirical testing. # forecast 8.14 * Changed default `BoxCox(lambda = "auto")` lower bound to -0.9. * Use better variance estimates for `ets()` bias adjustments. * Improved robustness of `autoplot.seas()` for non-seasonal decomposition. * Fixed scoping of parameters in `auto.arima(parallel = TRUE)` (#874). * Fixed handling of `xreg` in `tsCV()`. # forecast 8.13 * Fixed forecasts from Arima with drift with initial NAs. * Fixed season colours in `gglagplot()` to match y-axis (original data). * Fixed facet order for classical decomposition `autoplot()` * Fixed `summary()` erroring for `tslm()` models containing NA values. # forecast 8.12 * Fixed bias adjusted forecast mean for ARIMA forecasts. * Improved naming of `accuracy()` generic formals. * Fix seasonal periods for `taylor` dataset. # forecast 8.11 * The axis for `gglagplot()` have been reversed for consistency with `stats::lag.plot()`. # forecast 8.10 * Updates to remove new CRAN errors * Bug fixes # forecast 8.9 * Updates for CRAN policies on Suggests packages * Bug fixes # forecast 8.8 * Updates for compatibility with fable * Bug fixes # forecast 8.7 * Documentation improvements * Bug fixes # forecast 8.6 * Reduced conflicts with tidy forecasting packages * Forecast autoplots now use same colour shading as autolayer() and geom_forecast * Documentation improvements * Bug fixes # forecast 8.5 * Updated tsCV() to handle exogenous regressors * Reimplemented lagwalk methods (naive, snaive, rwf) for speed improvements * Added support for passing arguments to auto.arima() unit root tests * Improved auto.arima() stepwise search algorithm * Documentation improvements * Bug fixes # forecast 8.4 * Added modelAR(), generalising nnetar() to support user-defined functions * Added na.action argument to ets * Documentation improvements * Bug fixes # forecast 8.3 * Added mstl() to handle multiple seasonal decomposition * stlf(), stlm(), tsoutliers() and tsclean() all now use mstl(). * Updated tsCV() to handle multiple horizons * Switched unit root tests in ndiffs() to use urca package * Added ocsb.test * Changed method for choosing D in auto.arima() to a measure of seasonal strength. * Added baggedModel() function to generalize baggedETS * Added bootstrapped PI to more functions * Allowed lambda='auto' for all functions with lambda argument. * Updated author list to include all major contributors * Documentation improvements * Bug fixes # forecast 8.2 * Added pkgdown site * Added rolling window option to tsCV * Improved robustness to short time series and missing values * Bug fixes # forecast 8.1 * Added as.character.ets, as.character.bats, as.character.tbats * Made gghistogram() and checkresiduals() robust to missing values * All documentation now generated using roxygen * Improved documentation for many functions * Added autoplot.msts() and autolayer.msts * Added as.character methods for many models to generate model names * Added as.ts.forecast * autoplot method for bats/tbats models * Better ARIMA trace output * Made accuracy an S3 method * Bug fixes # forecast 8.0 * Added tips to start up message * Added pipe operator * Added tsCV() and CVar() functions * Added baggedETS * Added head.ts() and tail.ts(), so head and tail now work properly on ts objects. * Added gghistogram() and checkresiduals * Added ggseasonplot with polar coordinates * Modified defaults for gglagplot * Added autolayer.ts * Added type argument to residuals() for different types of residuals * Added support for seas objects from the seasonal package * Component extraction for seasonal decomposition methods * Range bars for decomposition autoplots * Added autoplot.StructTS * Added vignette based on 2008 JSS article by Hyndman and Khandakar * Improved ggplot functions * mforecast objects re-structured * Added as.data.frame.mforecast * autoplot functions now exported * Refit support for arfima() and stlm * Better bias adjustment support after Box-Cox transformation * print.ARIMA has better labelling of constants * Bug fixes * Removed fortify method for forecast objects # forecast 7.3 * Added prediction intervals and simulation for nnetar(). * Documentation improvement * Bug fixes # forecast 7.2 * Faceting for autoplot.mts * Box-Cox support for ses, holt, hw * ets() now works for tiny time series * Added h-step fitted values in fitted() function. * seasonal adjustment added to thetaf * y now the standard first argument in all modelling functions * Added truncate argument to auto.arima * seasadj() now an S3 method * series with frequency < 1 and non-integer seasonality now handled better * ggplot2 theme support * Added gglagplot, gglagchull * Arima() and auto.arima() now allow any argument to be passed to stats::arima(). * Bug fixes and speed improvements # forecast 7.1 * Fixed bug in auto.arima where the Box-Cox transformation was sometimes applied twice * Improved axes for ggseasonalplot * Improved tslm() to avoid some problems finding data * nnetar() updated to allow subsets * Modified initial values for ets * Improved unit tests to avoid deprecated functions and to avoid data from fpp * Removed fpp from Suggests list # forecast 7.0 * Added ggplot2 graphics * Bias adjustment option added for all functions that allow Box-Cox transformations * Added Ccf function, and rewrote Acf to handle multivariate series. * tslm() completely rewritten to be more robust and to handle fourier terms more easily * Support for multivariate linear models added * subset.ts() more robust, and captures some errors. * Added xreg argument to nnetar * Improved labels in seasonplot * More unit tests added * Documentation improvements * Bug fixes # forecast 6.2 * Many unit tests added using testthat. * Fixed bug in ets when very short seasonal series were passed in a data frame. * Fixed bug in nnetar where the initial predictor vector was reversed. * Corrected model name returned in nnetar(). * Fixed bug in accuracy() when non-integer seasonality used. * Made auto.arima() robust to non-integer seasonality. * Fixed bug in auto.arima where allowmean was ignored when stepwise=FALSE. * Improved robustness of forecast.ets() for explosive models with multiplicative trends. * Exogenous variables now passed to VAR forecasts * Increased maximum nmse in ets() to 30. * Made tsoutliers() more robust to weak seasonality * Changed tsoutliers() to use supsmu on non-seasonal and seasonally adjusted data. * Fixed bug in tbats() when seasonal period 1 is a small multiple of seasonal period 2. * Other bug fixes # forecast 6.1 * Made auto.arima more robust # forecast 6.0 * Modified dm.test to give error when variance is zero * Corrected help file for splinef(). * Fixed typo in accuracy help file regarding RMSE * Fixed bug in accuracy() which occurred with Arima and ets objects. * Fixed arima.errors() to handle Box-Cox transformed models. * Modified auto.arima() to be stricter on near-unit-roots. * Added allowmean argument in auto.arima(). * Improved handling of constant series in Arima() and forecast.Arima(). * Added plot.Arima() and plot.ar() functions. * Added as.character.Arima * Captured problem in bats/tbats where data are constant. * Modified TBATS and BATS estimation to avoid occasional instabilities. * Fixed bug in forecasts from bats which labelled them as TBATS. * Added allow.multiplicative.trend argument to ets(). * Set allow.multiplictive.trend=FALSE in stlf(), stlm() and forecast.ts(). * Simplified arguments in stlf(). * Added taperedacf and taperedpacf functions * Added functions for bootstrapping time series # forecast 5.9 * Improved documentation of accuracy() function. * Fixed occasional bug in accuracy() when test set is a single observation. * Improved Acf() to give better handling of horizontal axis for seasonal data or when ... is passed. * Removed print.Arima and predict.Arima and added print.ARIMA * method argument now passed when re-fitting an ARIMA model. * Fixed error when CH test applied to short series # forecast 5.8 * Fixed bug in versions of R before 3.10 when using fourier and fourierf. * Made BoxCox.lambda() robust to missing values. # forecast 5.7 * Fixed bug in tbats/bats where optional arguments were not being passed to auto.arima(). * Revised fourier() and fourierf() to avoid large orders, and to avoid zero columns. * Improved accuracy of fourier() and fourierf(), while simplifying the code. * Removed duplicate columns returned by fourier/fourierf with multiple seasonal periods. * Corrected some bugs in simulate.Arima for models involving xreg. * Centred simulations from simulate.Arima for non-stationary models by conditioning on first observation. * Added findfrequency() function. * Fixed error in computed residuals from forecast.stl(). * Improved handling of very short series in auto.arima(). * Fixed error in forecasting with additive damped models. Damping previously applied only from second forecast horizon. * Fixed misuse of abs() in two places in C code. * Added na.action argument to Acf() and fixed na.action argument in tsdisplay(). # forecast 5.6 * Improved tbats and bats by ensuring ARMA coefficients are not close to the boundary of invertibility and stationarity. * Improved nsdiffs() handling of degenerate series (e.g., all zeros). * Improved forecast.ar() when function buried within other functions. * Improved handling of degenerate ARIMA models when xreg used. * More robust ets() initialization. * Fixed problem in na.interp() with seasonal data having frequency <= 5. * Removed undocumented option to use Rmalschains for optimization of ets(). # forecast 5.5 * Improved documentation for croston * Added stlm() and forecast.stlm() functions, and added forecastfunction argument as a way of specifying a forecast method in stlf() and forecast.stl(). * Improved forecast.ar() so that it is more likely to work if ar() and forecast.ar() are embedded within other functions. * Improved handling of ARIMA models with seasonality greater than 48 * Improved handling of some degenerate regression models in nsdiffs * Changed AIC for poor models from 1e20 to Inf. * Update fourier() and fourierf() to work with msts object. * Added a new argument find.frequency to forecast.ts(). * Added new arguments d and D to accuracy() for MASE. * Corrected bugs in accuracy(). * Better handling of regression models with perfect fit in auto.arima(). * Fixed bug in tbats.components() when there are no seasonal components. # forecast 5.4 * Fixed bug in forecast.tbats() and forecast.bats() when ts.frequency does not match seasonal.periods. * Fixed bug in getResponse.lm() when there's a logged dependent variable. * Modified ets() to avoid problems when data contains large numbers. * Modified ets() to produce forecasts when the data are constant. * Improved arima.errors() to find xreg more often, and to return an error if it can't be found. # forecast 5.3 * Unit tests added * Fixed bug in zzhw() which reversed the sign of the residuals. * Updated help file for CV() to specify it is only leave-one-out. * Fixed guer.cv() to allow non-integer periods without warning. * Added use.initial.values argument in ets(). * Added arimaorder() function. * Modified warnings suppression by using suppressWarnings() throughout. # forecast 5.2 * Changed default number of cores to 2 for all functions that use parallel processing. * Removed remaining call to bats() from examples that are run. # forecast 5.1 * Fixed bug in tsoutliers() and tsclean() with very short seasonal series. * Fixed bug in Arima() when seasonal order is specified numerically instead of via a list. * Removed dimension attribution from output of arima.errors * Improved handling of "test" in accuracy * Changed parallel processing to parLapply for auto.arima * Added timeDate dependency to avoid errors in easter() and link to Rcpp >= 0.11.0. # forecast 5.0 * Added argument model to dshw(). * Added bizdays() and easter() for calendar variables. * Added arguments max.D and max.d to auto.arima(), ndiffs() and nsdiffs(). * Made several functions more robust to zoo objects. * Corrected an error in the calculation of AICc when using CV(). * Made minimum default p in nnetar equal to 1. * Added tsoutliers() and tsclean() for identifying and replacing outliers * Improved na.interp() to handle seasonality and added argument lambda to na.interp * Added robust option to forecast.ts() to allow outliers and missing values * Improved output from snaive() and naive() to better reflect user expectations * Allowed Acf() to handle missing values by using na.contiguous * Changed default information criterion in ets() to AICc. * Removed drift term in Arima() when d+D>1. * Added bootstrap option to forecast.Arima # forecast 4.8 * Fixed bug in rwf() that was introduced in v4.7 # forecast 4.7 * Added forecast.forecast() to simply return the object that is passed. * Removed leading zero in package number. i.e., 4.7 instead of 4.07. * better handling of nearly constant time series, and nearly linear time series * improved handling of missing values in rwf * corrected fitted values and residuals in meanf() for time series data * bats() and tbats() now handle missing values in the same way as ets(). i.e., using longest contiguous portion. * better handling of very short time series * initial states for ets() modified for very short time series (less than 3 years). * nsdiffs with CH test now handles degenerate cases without returning an error. * nnetar now handles missing values * Fixed bug in forecast.varest() so residuals and fitted values computed correctly. * Added accuracy() calculation for VAR models * Fixed a bug in simulate.fracdiff() when future=TRUE. Sometimes the future argument was being ignored. # forecast 4.06 * accuracy() was returning a mape and mpe 100 times too large for in-sample errors. # forecast 4.05 * Fixed bug in hw() so it works when initial="simple" * Allowed bats() and tbats() to take non-positive values. * ets() now calls optim direct via c code making ets() run much faster. * Added Rmalschains as a possible optimizer in ets(). Not documented. * Modified forecast.lm so it is more likely that the original data are stored in the returned object. * Corrected bug in forecast.Arima that occurred when a Box-Cox transformation was used with bootstrap=TRUE. * accuracy() updated so that it gives more information, and returns a matrix of both test and training measures. * Corrected training error measures for splinef() forecasts. # forecast 4.04 * Added ylim argument to Acf * Avoided clash with the signal package when using auto.arima(). * Fixed problem in plot.forecast() when all historical data are NA or when there is no available historical data. * forecast.Arima() is now a little more robust if a zoo object is passed instead of a ts object. * CV() now handles missing values in the residuals. * Fixed bug in holt() and hw() so that the printed model no longer contains missing values. # forecast 4.03 * forecast.lm now guesses the variable name if there is only one predictor variable. * Removed error trap in forecast.lm when no xreg variables passed as it was catching legitimate calls. # forecast 4.02 * Fixed error in the prediction intervals returned by forecast.ets() when simulation was used and a Box-Cox transformation was specified. * Fixed bug in accuracy() when a numerical f vector was passed. * Fixed man file for Diebold-Mariano test. * Corrected references in nsdiffs() help page. * Added warning to nsdiffs when series too short for seasonal differencing. * Fixed problem in getResponse.Arima when Arima object created by stats::arima() from within a function. * Added tbats.components() and extended seasadj() to allow tbats objects. * Added undocumented functions for forecasting, printing and plotting output from vars::VAR. # forecast 4.01 * Error now trapped when newxreg variables not passed to forecast.lm * Corrected help file for dshw() to remove references to prediction intervals. * Improved help file for dm.test() to give more information about the alternative hypotheses. * Improved dm.test() performance for small samples by using a t-distribution instead of normal. * Modified bats() and tbats() examples to follow CRAN policies on parallel processing. * Moved some packages from Depends to Imports. * Added getResponse() function to return the historical time series from various time series model objects. * Modified accuracy() to use getResponse(). * Allowed user-generated innovations in simulate.ets(), simulate.Arima(), etc. * Allowed xreg argument in forecast.stl() and stlf() when ARIMA model used. * Removed reliance on caret, and associated fitted and residuals functions. # forecast 4.00 * More robust handling of degenerate ARIMA models. * New defaults for shaded colors used for prediction intervals in plots. * auto.arima() now remembers the name of the series when a Box-Cox transformation is used. * New function nnetar() for automatic neural network forecasting of time series. * arfima() now tries harder to ensure the ARMA part is stationary. * ts control added for forecast of linear models in forecast.lm(). * Fixed bug in bats() which caused an error when use.box.cox=FALSE and use.trend=FALSE. * Added residuals and fitted methods for train and avNNet objects from caret package. * accuracy() can now figure out overlapping times for x and f. * rwf() now handles missing values. * Revised ses(), holt() and hw() so that they can optionally use traditional initialization. # forecast 3.25 * Fixed bug in simulate.Arima. * Improved handling of short seasonal time series in auto.arima(). * Added seasonal argument to auto.arima(). * Fixed bug in splinef() and added gcv method for estimating smoothing parameter. # forecast 3.24 (23 July 2012 * Fixed bug in auto.arima() introduced in v3.23 which meant a ARIMA(0,0,0) model was returned about half the time. # forecast 3.23 * Fixed bug in arfima() which meant the drange argument was being ignored. * Extended auto.arima() so it returns something sensible when the data are constant. # forecast 3.22 * Increased maximum forecast horizon for ets models from 2000 to unlimited. * Corrected bug in Arima(). Previously include.constant=FALSE was ignored. * Some corrections to bats and tbats. * Modified parallel implementation in auto.arima for Windows. # forecast 3.21 * Fixed bug in auto.arima() when lambda is non-zero and stepwise is FALSE. * Fixed bug in auto.arima() in selecting d when D>0. * Fixed bug in ets() when seasonal period is less than 1. * Turned off warnings in auto.arima() and ets() when seasonal period is less than 1. * Added plotting methods for bats and tbats objects. * Changed default forecast horizons for bats and tbats objects. * Modified bats and tbats so they now use seasonal.periods when ts and msts objects are being modelled. # forecast 3.20 * Fixed bugs in forecast.lm(). * Improved handling of newdata in forecast.lm() to provide more meaningful error messages. * Fixed bug in dm.test() that occurred when errors were very small. # forecast 3.19 * Improved plotting of forecast objects from lm models * Added MASE for lm forecasts using insample mean forecasts for scaling. * Modified definition of MASE for seasonal time series to use seasonal naive insample scaling. * Modified meanf() to allow it to be used with cross-sectional data. * Updated accuracy() to allow it to be used with cross-sectional data, lm forecasts and lm objects. # forecast 3.18 * Added method for plotting non-time-series forecasts to plot.forecast(). * Removed partial arg matching. * Cleaned up some code, removing commented out sections, etc. * Added robust option to stlf(). * Added naive and rwdrift options to stlf() and forecast.stl(). * Improved handling of msts objects in BoxCox.lambda * Fixed some minor bugs in tbats() and bats * Improved speed of bats() and tbats(). # forecast 3.17 * Improved forecast.lm() so it is more likely to find the original data from an lm object. * Parallel processing now available in auto.arima() when stepwise=FALSE * Default model selection in auto.arima() changed to AICc rather than AIC. This may affect model selection for very short time series. * max orders in auto.arima() now restricted to be less than 1/3 of length of data. # forecast 3.16 * Corrected problem with AIC computation in bats and tbats * Fixed handling of non-seasonal data in bats * Changed dependency to >= R 2.14.0 in order to ensure parallel package available. # forecast 3.15 * New functions tbats() and forecast.tbats() for multiple seasonal time series modelling. * bats() and tbats() use parallel processing when possible. * Minor improvements to bats() and forecast.bats(). * decompose() removed as the function in the stats package has now been fixed. # forecast 3.14 * Improved documentation for forecast.ts * Corrected bug in dshw() when applied to a non-ts object. * Added error message when dshw() applied to data containing zeros or negative values * Added checks when dshw() applied to time series with non-nested periods. * Added msts object class for multiple seasonal time series * Made taylor data set an msts object. * Added bats() function for multiple seasonal time series modelling * Added forecast.bats() function for forecasting BATS models * Byte compiling turned on * Depending on Rcpp and RcppArmadillo to speed some code up. # forecast 3.13 * Bug fix for forecast.StructTS() due to changes in the StructTS object. The default h was being set to 0. Thanks to Tarmo Leinonen for reporting this problem. * Bug fix for forecast.stl() where h longer than one seasonal period sometimes returned missing forecasts. Thanks to Kevin Burton for reporting this problem. * forecast.stl() no longer allows a seasonal ETS model to be specified. Thanks to Stefano Birmani for the suggestion. # forecast 3.12 * Added option to control ets model in stlf() and forecast.stl(). Thanks to Stefano Birmani for the suggestion. * Reordered arguments for forecast.lm() and stlf() to be consistent with other forecast functions. * Modified tslm() so that it is more likely to find the relevant data when it is not passed as an argument. * Fixed bug in forecast.ets which returned all zero forecasts for some models when seasonal period > 24. # forecast 3.11 * Fixed bug in dshw() when smallest period is odd # forecast 3.10 * Added lambda argument to naive() and snaive(). * Fixed bug in ets() with high frequency data. * Fixed bug in rwf() where incorrect fitted values and residuals were sometimes returned. * Modified number of lags displayed by default in tsdisplay(). # forecast 3.09 * Fixed bug causing occasional problems in simulate.Arima() when MA order greater than 2 and future=TRUE. # forecast 3.08 * Bug fix in forecast.stl() which occurred when forecast horizon is less than seasonal period. * Added lambda argument to forecast.stl(). # forecast 3.07 * Bug fix in ets() concerning non-seasonal models and high-frequency data. It sometimes returned all forecasts equal to zero. # forecast 3.06 * Switched to useDynLib in preparation for Rv2.14.0. # forecast 3.05 * Fixed bug in ets() which prevent non-seasonal models being fitted to high frequency data. # forecast 3.04 * Fixed bug when drift and xreg used together in auto.arima() or Arima(). # forecast 3.03 * Bug fix in dshw() which was using slightly incorrect seasonal estimates for the forecasts * Bug fix in forecast.StructTS due to change in structure of StructTS object. * Better error capture in tslm when seasonal dummies are specified for non-seasonal data. * Re-formatted some help files to prevent viewing problems with the pdf manual. # forecast 3.02 * Bug fixes # forecast 3.00 * Added Box-Cox parameter as argument to Arima(), ets(), arfima(), stlf(), rwf(), meanf(), splinef * Added Box-Cox parameter as argument to forecast.Arima(), forecast.ets(), forecast.fracdiff(), forecast.ar(), forecast.StructTS, forecast.HoltWinters(). * Removed lambda argument from plot.forecast() and accuracy(). * Added BoxCox.lambda() function to allow automatic choice for Box-Cox parameter using Guerrero's method or the profile log likelihood method. * Modified BoxCox and InvBoxCox to return missing values when lambda < 0 and data < 0. * Add nsdiffs() function for selecting the number of seasonal differences. * Modified selection of seasonal differencing in auto.arima(). * Better error message if seasonal factor used in tslm() with non-seasonal data. * Added PI argument to forecast.ets() to allow only point forecasts to be computed. * Added include.constant argument to Arima(). * Added subset.ts() function. * Upgraded seasonplot() function to allow colors and to fix some bugs. * Fixed fitted values returned by forecast.HoltWinters * Modified simulate.Arima() because of undocumented changes in filter() function in stats package. * Changed residuals returned by splinef() to be ordinary residuals. The standardized residuals are now returned as standardizedresiduals. * Added dshw() function for double-seasonal Holt-Winters method based on Taylor (2003). * Fixed further bugs in the decompose() function that caused the results to be incorrect with odd frequencies. # forecast 2.19 * Added xreg information to the object returned by auto.arima(). * Added Acf(), Pacf(), ma() and CV() functions. * Fixed bugs in re-fitting ARIMA models to new data. # forecast 2.18 (2011-05-19) * Fixed bug in seasonplot() where year labels were sometimes incorrect. # forecast 2.17 * Modified simulate.Arima() to handle seasonal ARIMA models. * Modified ets() to handle missing values. The largest continuous section of data is now modelled. * Improved plot.forecast() to handle missing values at the end of the observed series. * Added replacement decompose() to avoid truncation of seasonal term and seasonally adjusted series. * Fixed bug in seasadj() to handle multiplicative decomposition, and to avoid missing values at ends. # forecast 2.16 * Changed the way missing values are handled in tslm # forecast 2.15 * Added fourier(), fourierf(), tslm * Improved forecast.lm() to allow trend and seasonal terms. # forecast 2.14 * Added forecast.lm * Modified accuracy() and print.forecast() to allow non time series forecasts. * Fixed visibility of stlf(). # forecast 2.13 * Fixed bug in accuracy() when only 1 forecast is specified. * Added forecast.stl() and stlf() functions * Modified forecast.ts() to use stlf() if frequency > 12. * Made BoxCox() and InvBoxCox() robust to negative values * Fixed bug in simulate.Arima() when future=TRUE. There was a bias in the sample paths. # forecast 2.12 * Added naive() and snaive() functions. * Improved handling of seasonal data with frequency < 1. * Added lambda argument to accuracy(). # forecast 2.11 * If MLE in arfima() fails (usually because the series is non-stationary), the LS estimate is now returned. # forecast 2.10 * Fixed bug in arfima() where the MA parameters were of the wrong sign if estim="mle" chosen. * arfima() now allowed to have a sequence of missing values at the start of the series and end of the series # forecast 2.09 * Fixed bug in forecast.fracdiff() which caused an error when h=1. * Added shadebars to plot.forecast(). * Fixed bug in plot.forecast() to allow plotting when h=1. # forecast 2.08 * Added pp test option for auto.arima() and ndiffs(). * Fixed bug in simulate.ets() which was causing problems when forecasting from some ETS models including ETS(M,M,N). # forecast 2.07 * Fixed bug in simulate.Arima(). Previous sample paths when d=2 and future=TRUE were incorrect. * Changed way color is implemented in plot.forecast() to avoid colour changes when the graphics window is refreshed. # forecast 2.06 * Added MLE option for arfima(). * Added simulate.Arima(), simulate.ar() and simulate.fracdiff # forecast 2.05 * Added arfima() and a forecast method to handle ARFIMA models from arfima() and fracdiff(). * Added residuals and fitted methods for fracdiff objects. # forecast 2.04 * Fixed bug in auto.arima() that occurred rarely. # forecast 2.03 * Added an option to auto.arima() to allow drift terms to be excluded from the models considered. # forecast 2.02 * Fixed bug in auto.arima() that occurred when there was an xreg but no drift, approximation=TRUE and stepwise=FALSE. # forecast 2.01 * Fixed bug in time index of croston() output. * Added further explanation about models to croston() help file. # forecast 2.00 * Package removed from forecasting bundle # forecast 1.26 (29 August 2009) * Added as.data.frame.forecast(). This allows write.table() to work for forecast objects. # forecast 1.25 (22 July 2009) * Added argument to auto.arima() and ndiffs() to allow the ADF test to be used instead of the KPSS test in selecting the number of differences. * Added argument to plot.forecast() to allow different colors and line types when plotting prediction intervals. * Modified forecast.ts() to give sensible results with a time series containing fewer than four observations. # forecast 1.24 (9 April 2009) * Fixed bug in dm.test() to avoid errors when there are missing values in the residuals. * More informative error messages when auto.arima() fails to find a suitable model. # forecast 1.23 (22 February 2009) * Fixed bugs that meant xreg terms in auto.arima() sometimes caused errors when stepwise=FALSE. # forecast 1.22 (30 January 2009) * Fixed bug that meant regressor variables could not be used with seasonal time series in auto.arima(). # forecast 1.21 (16 December 2008) * Fixed bugs introduced in v1.20. # forecast 1.20 (14 December 2008) * Updated auto.arima() to allow regression variables. * Fixed a bug in print.Arima() which caused problems when the data were inside a data.frame. * In forecast.Arima(), argument h is now set to the length of the xreg argument if it is not null. # forecast 1.19 (7 November 2008) * Updated Arima() to allow regression variables when refitting an existing model to new data. # forecast 1.18 (6 November 2008) * Bug fix in ets(): models with frequency less than 1 would cause R to hang. * Bug fix in ets(): models with frequency greater than 12 would not fit due to parameters being out of range. * Default lower and upper bounds on parameters , and in ets() changed to 0.0001 and 0.9999 (instead of 0.01 and 0.99). # forecast 1.17 (10 October 2008) * Calculation of BIC did not account for reduction in length of series due to differencing. Now fixed in auto.arima() and in print.Arima(). * tsdiag() now works with ets objects. # forecast 1.16 (29 September 2008) * Another bug fix in auto.arima(). Occasionally the root checking would cause an error. The condition is now trapped. # forecast 1.15 (16 September 2008) * Bug fix in auto.arima(). The series wasn't always being stored as part of the return object when stepwise=FALSE. # forecast 1.14 (1 August 2008) * The time series stored in M3 in the Mcomp package did not contain all the components listed in the help file. This problem has now been fixed. # forecast 1.13 (16 June 2008) * Bug in plot.ets() fixed so that plots of non-seasonal models for seasonal data now work. * Warning added to ets() if the time series contains very large numbers (which can cause numerical problems). Anything up to 1,000,000 should be ok, but any larger and it is best to scale the series first. * Fixed problem in forecast.HoltWinters() where the lower and upper limits were interchanged. # forecast 1.12 (22 April 2008) * Objects are now coerced to class ts in ets(). This allows it to work with zoo objects. * A new function dm.test() has been added. This implements the Diebold-Mariano test for predictive accuracy. * Yet more bug-fixes for auto.arima(). # forecast 1.11 (8 February 2008) * Modifications to auto.arima() in the case where ML estimation does not work for the chosen model. Previously this would return no model. Now it returns the model estimated using CSS. * AIC values reported in auto.arima() when trace=TRUE and approximation=TRUE are now comparable to the final AIC values. * Addition of the expsmooth package. # forecast 1.10 (21 January 2008) * Fixed bug in seasadj() so it allows multiple seasonality * Fixed another bug in print.Arima() * Bug fixes in auto.arima(). It was sometimes returning a non-optimal model, and occasionally no model at all. Also, additional stationarity and invertibility testing is now done. # forecast 1.09 (11 December 2007) * A new argument 'restrict' has been added to ets() with default TRUE. If set to FALSE, then the unstable ETS models are also allowed. * A bug in the print.Arima() function was fixed. # forecast 1.08 (21 November 2007) * AICc and BIC corrected. Previously I had not taken account of the sigma^2 parameter when computing the number of parameters. * arima() function changed to Arima() to avoid the clash with the arima() function in the stats package. * auto.arima now uses an approximation to the likelihood when selecting a model if the series is more than 100 observations or the seasonal period is greater than 12. This behaviour can be over-ridden via the approximation argument. * A new function plot.ets() provides a decomposition plot of an ETS model. * predict() is now an alias for forecast() wherever there is not an existing predict() method. * The argument conf has been changed to level in all forecasting methods to be consistent with other R functions. * The functions gof() and forecasterrors() have been replaced by accuracy() which handles in-sample and out-of-sample forecast accuracy. * The initialization method used for a non-seasonal ETS model applied to seasonal data was changed slightly. * The following methods for ets objects were added: summary, coef and logLik. * The following methods for Arima objects were added: summary. # forecast 1.07 (25 July 2007) * Bug fix in summary of in-sample errors. For ets models with multiplicative errors, the reported in-sample values of MSE, MAPE, MASE, etc., in summary() and gof() were incorrect. * ARIMA models with frequency greater than 49 now allowed. But there is no unit-root testing if the frequency is 50 or more, so be careful! * Improvements in documentation. # forecast 1.06 (15 June 2007) * Bug fix in auto.arima(). It would not always respect the stated values of max.p, max.q, max.P and max.Q. * The tseries package is now installed automatically along with the forecasting bundle, whereas previously it was only suggested. # forecast 1.05 (28 May 2007) * Introduced auto.arima() to provide a stepwise approach to ARIMA modelling. This is much faster than the old best.arima(). * The old grid-search method used by best.arima() is still available by using stepwise=FALSE when calling auto.arima(). * Automated choice of seasonal differences introduced in auto.arima(). * Some small changes to the starting values of ets() models. * Fixed a bug in applying ets() to new data using a previously fitted model. # forecast 1.04 (30 January 2007) * Added include.drift to arima() * Fixed bug in seasonal forecasting with ets() # forecast 1.03 (20 October 2006) * Fixed some DOS line feed problems that were bothering unix users. # forecast 1.02 (12 October 2006) * Added AICc option to ets() and best.arima(). * Corrected bug in calculation of fitted values in ets models with multiplicative errors. # forecast 1.01 (25 September 2006) * Modified ndiffs() so that the maximum number of differences allowed is 2. # forecast 1.0 (31 August 2006) * Added MASE to gof(). * croston() now returns fitted values and residuals. * arima() no longer allows linear trend + ARMA errors by default. Also, drift in non-stationary models can be turned off. * This version is the first to be uploaded to CRAN. # forecast 0.99992 (8 August 2006) * Corrections to help files. No changes to functionality. # forecast 0.99991 (2 August 2006) * More bug fixes. ets now converges to a good model more often. # forecast 0.9999 (1 August 2006) * Mostly bug fixes. * A few data sets have been moved from fma to forecast as they are not used in my book. * ets is now considerably slower but gives better results. Full optimization is now the only option (which is what slows it down). I had too many problems with poor models when partial optimization was used. I'll work on speeding it up sometime, but this is not a high priority. It is fast enough for most use. If you really need to forecast 1000 series, run it overnight. * In ets, I've experimented with new starting conditions for optimization and it seems to be fairly robust now. * Multiplicative error models can no longer be applied to series containing zeros or negative values. However, the forecasts from these models are not constrained to be positive. # forecast 0.999 (27 July 2006) * The package has been turned into three packages forming a bundle. The functions and a few datasets are still in the forecast package. The data from Makridakis, Wheelwright and Hyndman (1998) is now in the fma package. The M-competition data is now in the Mcomp package. Both fma and Mcomp automatically load forecast. * This is the first version available on all operating systems (not just Windows). * pegels has been replaced by ets. ets only fits the model; it doesn't produce forecasts. To get forecasts, apply the forecast function to the ets object. * ets has been completely rewritten which makes it slower, but much easier to maintain. Different boundary conditions are used and a different optimizer is used, so don't expect the results to be identical to what was done by the old pegels function. To get something like the results from the old pegels function, use forecast(ets()). * simulate.ets() added to simulate from an ets model. * Changed name of cars to auto to avoid clash with the cars data in the datasets package. * arima2 functionality is now handled by arima() and pegels2 functionality is now handled by ets. * best.arima now allows the option of BIC to be used for model selection. * Croston's method added in function croston(). * ts.display renamed as tsdisplay * mean.f changed to meanf, theta.f changed to thetaf, rw.f changed to rwf, seasonaldummy.f to seasonaldummyf, sindex.f to sindexf, and spline.f to splinef. These changes are to avoid potential problems if anyone introduces an 'f' class. # forecast 0.994 (4 October 2004) * Fixed bug in arima which caused predict() to sometimes fail when there was no xreg term. * More bug fixes in handling regression terms in arima models. * New print.Arima function for more informative output. # forecast 0.993 (20 July 2004) * Added forecast function for structural time series models obtained using StructTS(). * Changed default parameter space for pegels() to force admissibility. * Added option to pegels() to allow restriction to models with finite forecast variance. This restriction is imposed by default. * Fixed bug in arima.errors(). Changes made to arima() meant arima.errors() was often returning an error message. * Added a namespace to the package making fewer functions visible to the user. # forecast 0.99 (21 May 2004) * Added automatic selection of order of differencing for best.arima. * Added possibility of linear trend in arima models. * In pegels(), option added to allow parameters of an exponential smoothing model to be in the 'admissible' (or invertible) region rather than within the usual (0,1) region. * Fixed some bugs in pegels. * Included all M1 and M3 data and some functions to subset and plot them. * Note: This package will only work in R1.9 or later. # forecast 0.98 (23 August 2003) * Added facilities in pegels. o It is now possible to specify particular values of the smoothing parameters rather than always use the optimized values. If none are specified, the optimal values are still estimated as before. o It is also possible to specify upper and lower bounds for each parameter separately. * New function: theta.f. This implements the Theta method which did very well in the M3 competition. * A few minor problems with pegels fixed and a bug in forecast.plot that meant it didn't work when the series contained missing values. # forecast 0.972 (11 July 2003) * Small bug fix: pegels did not return correct model when model was partially specified. # forecast 0.971 (10 July 2003) * Minor fixes to make sure the package will work with R v1.6.x. No changes to functionality. # forecast 0.97 (9 July 2003) * Fully automatic forecasting based on the state space approach to exponential smoothing has now been added. For technical details, see Hyndman, Koehler, Snyder and Grose (2002). * Local linear forecasting using cubic smoothing splines added. For technical details, see Hyndman, King, Pitrun and Billah (2002). # forecast 0.96 (15 May 2003) * Many functions rewritten to make use of methods and classes. Consequently several functions have had their names changed and many arguments have been altered. Please see the help files for details. * Added functions forecast.Arima and forecat.ar * Added functions gof and seasadj * Fixed bug in plot.forecast. The starting date for the plot was sometimes incorrect. * Added residuals components to rw.f and mean.f. * Made several changes to ensure compatibility with Rv1.7.0. * Removed a work-around to fix a bug in monthplot command present in R v<=1.6.2. * Fixed the motel data set (columns were swapped) forecast/inst/0000755000176200001440000000000014634702027013042 5ustar liggesusersforecast/inst/CITATION0000644000176200001440000000252214567530017014203 0ustar liggesusers year <- sub("-.*", "", meta$Date) if(!length(year)) year <- substr(Sys.Date(),1,4) vers <- meta$Version if(is.null(vers)) vers <- packageVersion("forecast") vers <- paste("R package version", vers) # Grab authors from DESCRIPTION file # authors <- eval(parse(text=as.list(read.dcf("../DESCRIPTION")[1, ])$`Authors@R`)) # authors <- authors[sapply(authors$role, function(roles) "aut" %in% roles)] # authors <- sapply(authors, function(author) paste(author$given, author$family)) # authors <- paste(authors, collapse = " and ") citHeader("To cite the forecast package in publications, please use:") bibentry(bibtype = "Manual", title = "{forecast}: Forecasting functions for time series and linear models", author = "Rob Hyndman and George Athanasopoulos and Christoph Bergmeir and Gabriel Caceres and Leanne Chhay and Mitchell O'Hara-Wild and Fotios Petropoulos and Slava Razbash and Earo Wang and Farah Yasmeen", year = year, note = vers, url = "https://pkg.robjhyndman.com/forecast/") bibentry(bibtype = "Article", title = "Automatic time series forecasting: the forecast package for {R}", author = c(as.person("Rob J Hyndman"),as.person("Yeasmin Khandakar")), journal = "Journal of Statistical Software", volume = 27, number = 3, pages = "1--22", year = 2008, doi = "10.18637/jss.v027.i03" ) forecast/inst/doc/0000755000176200001440000000000014634702027013607 5ustar liggesusersforecast/inst/doc/JSS2008.R0000644000176200001440000001634414634702026014712 0ustar liggesusers## ----load_forecast, echo=FALSE, message=FALSE--------------------------------- library('forecast') ## ----load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE-------------------- # library('expsmooth') ## ----expsmooth_datsets, echo=FALSE, message=FALSE----------------------------- bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ## ----etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."---- par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ## ----etsnames, echo=FALSE----------------------------------------------------- etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ## ----ets-usnetelec, echo=TRUE------------------------------------------------- etsfit <- ets(usnetelec) ## ----ets-usnetelec-print,echo=TRUE-------------------------------------------- etsfit ## ----ets-usnetelec-accuracy,eval=TRUE,echo=TRUE------------------------------- accuracy(etsfit) ## ----ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE---- fcast <- forecast(etsfit) plot(fcast) ## ----ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE---------------------------- fcast ## ----ets-usnetelec-newdata,eval=FALSE,echo=TRUE------------------------------- # fit <- ets(usnetelec[1:45]) # test <- ets(usnetelec[46:55], model = fit) # accuracy(test) ## ----ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE------------------------ # accuracy(forecast(fit,10), usnetelec[46:55]) ## ----arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."---- mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ## ----arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"--------------------- arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ## ----arimanames, echo=FALSE--------------------------------------------------- # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ## ----arimafcastsummary, echo=TRUE, message=FALSE, warning=FALSE, as.is=TRUE---- summary(fcast) forecast/inst/doc/JSS2008.pdf0000644000176200001440000064345214634702030015263 0ustar liggesusers%PDF-1.5 %¿÷¢þ 1 0 obj << /Type /ObjStm /Length 4926 /Filter /FlateDecode /N 85 /First 720 >> stream xœÝ\[sÛ¶¶~?¿‚o»=%îÑéÉŒ/M›Æn;÷N‹²ÕÊ’+ÉMÚ_¿¿€ER6íã&çœqÞ€……uÇ¢dÆ3•Y—éLHe3“ ­‹Ìf™¹L8Á³"…6™Ï¤T"é%Î"S’ã,3å ôW™–÷:ÓVáÞd†;ܪ֠2Sp‚•YAý|fµ*2É1¾V@ZÏ ÉœÀT™SÏuæ´Á &sƒH›¹Báì2€Ó™,²Bc0é³Âú"S<óxUoœ’™wèŒ.Þ8¡Æ¹õ™Âd¹4èér*L˜¯3 %8!¥<. /2š€àxN3¢Å$. ¡™ºÀôYXŒPB¸½Yx`¤Yrk3WH tµ× . ¨iˆ–Öø ”²àxÈÒ£!òj@6€¬œ)Ys¢) kéÑ µè ¥d#-Ñ– ÈÆÝ@@XŽ[ ÈV‚™–˜¯ÐÍ `7 k.Ù:€Åµ°¤ þ‚[` ; v8@vüu€ìln²kÀ>0Ÿc:àŠ(@0 äu€\)¹p˜8¨'ŠC€\xžƒý$lË @ö2ƒQ@fgþë»ï2v\®GãÑz”I<;ÉØ³ëõl:/Wëpÿ|tŽo^þuUfìíg‹óìÑ£bïz}±XfßMÊÉ„s#!/8ƒ‚ÿ¸¥sÁ¹óx^âÐ8Æ8D¼—g±¡÷&>‡8Ç6©O€ñ×Ŧ_€C×RùØ-ËÑzº˜ŽÖeöÕá·’KMØè‰tÿü_œÿëëÔxu4zY¾Í>N×Ùæ·\–¼~Zþõq±¯ªyiç¦Ç{Â)à8I¸îg÷jN/"âDíkšNÒX:ѳ6Á¬qiàZÃløRõw‰ÔÏÙÍØMzÔó›öõóD¯0w3Œ¾5>ãmX6ÉT³!99^Œo‘çËÅøú¬„Œüðü(ûáb±Z¯Î–Ó«uæscrŽ&§×~+ÏÖµ”Œe«ÑËm„™Ø„E›¾ÅmµÝ—ÚWý©s•ÔÓ 1£—Óõ¬lÊë¹3=ô3»èG:x9U£ŠOophÊBÒo‚Ù£âi2H‹ëùš\"{:…ÿGMÖ«ˆÿûp‚óˆggÒ¹ºv0µrE:Ç{ ƒÞòø^(<ö&½ŽÍ•ˆÍáŸâ½ŒítKêô4žTìôkef£å­Œì|¾XÓ\dl-+ÈE:×óu9G[•†OGû‹OèMxoòäèõ+ ´D‡ÊÆŸ”«Åõò CÓ¸DJ/’3X.ÎNË5±ç‡3öä8áòÓš ½}µA&=zÔœÌ64€O—«u–ÏÑ×2±#õªQêyX®Ö5?ÙÛwïáùrD;ÎçŽËl~=›?:ÒVHÇé%bl+IÃp¹Êqé¨8]c÷¨Ã %ëâ)ñ3¢§â€*‰DäSⱎäѱ»Ž,ÕŠŽPt•Ø]Çî:v7±{’>»û$š±{’H‘ðI¨#Ÿ„›')æ"e:«tÖélzdË=l ^-ÙJ2Õ/B5“mDÝFÌmÎF¼­íA¿x ô­»öû£Uz³w{¯Þÿðï£ã“Ååh.ø7'åùõl´ÌØ÷ó³Åx:?§¸-Œ”åàï°HƒC‚7B#ÄF‰B i"ç´ƒ ¢Â$Žüf:^_5I:x¤„‚Á³êÏ ’4 é(‚ oþèÞÁDJ SÁ ñxVE8ô V„‘r#Pw7ƒ±±®EÈë@zGÁ·Á2Šð°°¯a~èC÷‘Å©ˆ}€`"DÈz‘€7ÿj„Ñ\C«³‚VÏÃäè^Fd‰@é==WK§°4òõ3žÎtè äqÒÿ?þº3©æÈK7Œ¥¶¿Öš¼:# #N`aqõc9=¿¨n!ȤS_±=¶ÏØ!ûž=f?°Ùö{ÊŽØ1û™=cÏÙ ;e/Ù+öš½aïØˆFg×ë’.ίFó1ûÀ>,Gˆä8;cg‹ÙbŽÿ//GlÌJVÆöalV^ŽG« VÎÃiÂ&ôoÊ¿?K6™± ¬;g ë…‹rΦlLÇÓrY®¦+öûÍØ%›³9[°El±ÀÍ»"³2+'ëxÑ®D :Ocöûãî+\,ÖåøÃ,ô¨nb§p·y.ÙŠ­ÊËiœæªü8®¦ŸØjF3Z³õŲ,Ùúã‚]³?ÙGö‰ýÅþf—ËÅ×ÁÇŽ|#‰g£óUðÕ°ûÑR~÷ÞÂPøÀª_ãûÇÓY©`\£ ~]–7™¹'XJNÏöæçpÊîxºZÁèãÂ&vº./_‡H«a¾–¯cRßî¿~÷î´1V¢iQ“±ÜXTéº5º’E¥4Ë0‹j…êÕœEõ§¸C¨P„sÌhlASZϨíDXy7íe ¼QŒc€_Ý¡<‡at|]l° ÿ„u²2ôް¼¼=8ªkÂ¥ºsI÷’…(äÍ\4¬Ÿ†´(Ê˨ØÒZÊ~á¦÷·Û¶àE²d Þ“%h˜€½êT|K»ÚÕ6jfT¯AJ5X¥4F•”Âë¨TŠø*µṠj”-†jÔ鋃§?¾©‡’ßì/fã¦>©¶>)ÞÕ'so}òªÈºÚDéP]¿QX8¨$µa`ß‘Z¶d»m±6…ÌÒsˆ.hYZiA©Bk’a„ªá…ŽJEñ1‡+bø$DˆC ÓÆÕ \¦†äùÂY™Ô+º8…d*®h¾¡UzõŽü©®Þ %áKg:î£=¤;•'=nxÑèA7ÚBzRy@rßW©ÊâçV«”¦W]¶5Eôk ï„P}®ÇT¦½Ö“Â;TKª%¯_¿?~Q t÷ÛÎC¬ç{óÕtó`£6¬ŽÚ4VÇImø@µQªß õ8&@H-X\[¹­pÞÄV”ÝJbc¨k½Þ‚ÐRÐÂÖ4‰° asìY+_À± ÉM1pö›v0wœo 8mel xï¦X¿MBMÂ\ ï%„ö*åbø©)€ÂëJá²ZH/iCÀñŽÊ¶Þ,Cå.³_ÙÛD ¿ß5Ýß?=}~Ðð-Ó}£4]iômi¤µè i40ÐC%ü¶y¶¾#YÝ?«njeŒ-b(b¤»†v0³Lv[ꢘ햺Ó-ÃY‡µÙŒÁD0’Û1DÓ¶c†[±À|•– VNyc ûea¨".¹>}¼÷ô{ŠSNGóÕ-b!;ÃÉvªƒÚ ‹Ëu7Î$¶ÅRÝdéIƒuäfa˜Š#(OâCït› ;é1Å-aÛ^ÈMfìÕÉ“ªåÞ-#㣯.Öë«Õ·ŒÓ|±åüë¹Ú_,Çå2M0Xºƒp÷K7'”êýa}‘Ð,ÃÜcb¤Ì½·´1ûаæÎÑtþ{¥ðjyÙ9R¸Iyß”0‰_£rÁ7Úä^Õ™ß%¡Ã:%G#í +è¶È…ÕÃPÚ¤£Ñs1ßJ#šÔþÓˆ*å‡ï›GüéÅ«£Ó—-'óìÃlúÇu9PÕ|w œöHîcîÉùóŽëo™å”3j:ý[}¼­õs”ú"úð¦Ù´•º %(“Ý2œM”èóß¶­°7“v Ö:/ãy°÷þñQ½È;æë;›PÓuã²èðÕäë&‹W~ jª€“²]'lSˆ¹ÃIiHÓȹn2‡íl+ýµ3­1ŒíHÉmCŠÂmÃq?Æ*ˆ‚ÄQÃySJ`Z/mªñãÖ"…ÛÝ®™^R„kɘkËØ-|ì¢å½¤ìù³ç¯N_côãÅ|q÷EKw­¯x[¾¯õ)ceš—××÷9ªþ}pxïïi³«ï–Ü>¶ýcµÛbÚñÞµÊù±M¦ü åÆVgÓéÙtyv}ÉFëdå×£%ú»²e·åî—B“¾Ö¹vZ:¾Eüщm¥Û)öƒÕÍܪnZÞ-ÂIµ[79âÍV„»—ÓÆ¬C¤ãx3èÒ¹åîŽ!N*kp1@I{ð©""Õ4¤J×Sg`üEA©LâŽAÐöà9ç´ÍXä‚ê¹Bp«¹)Òuh ~p²÷s~òÍÕrAe!¿ýŽÙý÷d±,Ï`?ï‰+ íÀ¡ ùZ(þ/ _ÞiZ5os£¦b±¨P¹Šè§ ¢# Ððœ+›Y¥€„§Ü$ZÒB2/ìƒ.m†#¥¤Îi1^#¥V@úŸ@ªg½¥¼È5VDmJé"7†ÒÕ<§rb©j•_'%]^¸%ãrGõYPb[È6JRí}‘YyU)¨…`9û…JŠg|$ŒôEŽÐ•P”âAo8JŠóÜRÙF‰džrŸ )¨½E:2®a’_¸È,‰æ–Ó&ŒÌíЬÂC#% œ¬¶¤pæX`üHUå„ ¤Hn=Ö7m¤ÏÕækŸÄÊÉÜÑn€•9åW!µ‰/, øÂÞ+ÉòÀÄQNÁ¹ q¼Ë%…#w!ÎýóeÄÞQ8ÊŒ…B„„Te˜¾(R•aR6ç’>cp9‡Í„«ña5ôP"(ÓPßh™‡ÀIT7 §bƒR¨qÕŠ ÿÕfÌ‚ð‘aaëBAï‰f&WÎ>$B¾…@¼!‹B#¤û<’Dj˜ðP.5•&›ÜÄáþñÁËi‘6Z_Ä…Y•d•·ÛTIŠÎº¬ÊÆ7ò}{‹Wó)À–¡¶°QÜÙN RÌLlRbºÞ­²|³ûj\*çñ¶n×M‡ÞüWÕFºT¢èRR,V4z,üLVÈv¡D¬€,.‹QçÅ£è°ß¬±ŠÖ”h“Síi_Œ>Ÿ I]A…Œ.•ñ˜¼£/ºÐÒˆ@ƒH9M5š¡¸(@°Ýư2Ÿ£ºxÁF³« ÚaûP®c¦®NS\­¦”¸ œÝùˆžžÓ×=å’]ÄìD¹Z±Y”vÉ.¯Ù|SÎsu1e¤Ê„stÝd#hÊCüÅþXÅ#ã[ªé'ØÎÞð-Ò=xsxWÞ¯£NG?î¿Ù«Å]S}ª«Rí-;4ÓWÈ P.~È[SÏN.Ìmexãm-H[i¬žíØÙõÖnìî½X^ïÅöí†÷ c];çk;Û±;Y1”ëb׆l·$òøÍÁþ¶˜õ±¾¨äó¦½ÓÝ;èÛjØÐ­oÛÛ ÞôX¡ÂíÞ&ögtµÇoÁÒ*ÞyMv0Ž©]ܤPE·€êæmáC2,”ò¬«u×Û;z‡‰:¾õÜõd)mUì¿©ó»‘wƒ-„\ÆôâÇ'‡Ïoþ"¡ÚÔÞÔÏvàV1{CL†·µÅtӖѰ¿Í— MÈC¿IHX¸¾Ä¡›ø¾‚ÿæ³j«ëî%òÏáÅ^E•î¿—ëyOשî=¤ú'³òÓƒxÎf[)üÞþP[¹ÓK¦âq'{}dgÿu·@~~[¥ë½OÁ¶ßµn‘RQþ ëƒÜ`%!±fpîÏ…õ£$,ϱP®q*tnøL8íÊ;é¹ d¢ŸH° | ¡(åÂIX,j° «‘òà¥ýlHõçx–~ y…Bòœªë¿(RÚ:¬ëä)/r!îˆÔ‰ "æáÆdUE nÂÏ5T7>«v™Nh3Yðú•T¸«ßÑ2Tµ4':üGº¶™Õ5ýúCº¶.“Õ˜X¸È·ÂoªÚ·ÕµÎD1ò ]–oûøñc^~º‚9ž¯§£Ùêr±X_ÀÊåórÍîT¥eŠ`•”¥*­:ÁC©9¯²ë³# í+`¦h‹Cóh®8•Ë“Bø¡ià:´ÙwòòÝÛMéÝÝê£zJ]‡ÿe¥ˆŽ~5¦ZvªNvÒc°cÝU”|yY.>\ü5ÃÅçÓùdÁª-D |'‘A‘+Kq˜œ{ߨw¸ÿý¡O²naGé‡k,NŽ*ÔTnô@QŠvÏÎíxqÆNæåÇÕ½p>§_9’ÞçöØ*‘ë¡Û4w¡õ6Îw2^æ¤Ú™œ¾Ò©ÄB”ÁZúYPMDÕZ¼Jîò‚~BÉÑvÓÀóg—mdN?¡(ZèTô•>Yyðˆ¯†ŽqE:Ïá,Lp3ÃB*vâ–ˆcWŒ@ß“WíûãvâH±š±y´\Öát2)±^£­¯_§­“ Í%"_·»0ÊńǤ'“éA£4(-3 ‰õ0ýFcÑû@ƒPº«ùAv¶ýE6}Ÿ¾ÏÒ§ä°ÒÆ1‹3V>k|²M?Ô±ù–û¡0&!ež…ŒySã$|1·M¿%Ã.¯z!õØqÂQe]Ê©7y3î!Å’äë”VïÇù?¡mzJendstream endobj 87 0 obj << /Subtype /XML /Type /Metadata /Length 1627 >> stream GPL Ghostscript 9.55.0 ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R 2024-06-20T11:21:27+10:00 2024-06-20T11:21:27+10:00 LaTeX with hyperref Automatic Time Series Forecasting: the forecast Package for RRob J Hyndman, Yeasmin KhandakarJournal of Statistical Software endstream endobj 88 0 obj << /Type /ObjStm /Length 3688 /Filter /FlateDecode /N 85 /First 782 >> stream xœÝ\ÛnG}߯èÇ]îéûX¶;–EŠc+ ?ÐÒX"L‘Z’rìýú=Õs#g†Ô(¢,`!Ðsëé>]uº»ªºÆ!2Á¢`Ê{%³:°¨˜ŠE3ü “£ŒeRÉÈ¢cR;”òLZ‰—“Î9#“A¢LFGoI¦¤Ô8QL)§p¢™2RâÄ0e ‹–^Ž©`µÅTôT80­=ŠL;…·¤`Fh:‘Ì)3^£-4cEºc˜U’N,:\R:f­¤2žYç€m@eݱѢ#J0GXÐCæ”Ea¥˜3‚N4sÖâ-e˜#¹He™ 0”ƒ \*ϼ4T&0o•‰Ì{jT ä+; ?8Q o£°Ö,xz ý–ÚBšî@žZRaØZôâ=•!‰Hƒæ ÉO“¤éJX‘!Á¥Jé't) õ] i,É.Ð$ ”Òxš§§$5‡¾HIZål’Õœ:,H1–ú§èÝ$P&‰Ì’n,IÑ‘ ,I&h:#Gjƒ4¬q*uÊÔ@<Ð>¨2 b‹±DR˜qtíªËYj‘úE ¶‰Žøè¥vH›Ä¨>í<õT"µN}AwQ?ýœ¶ö?þȲ§Óél¹`ÿN¤ì8qª8ÚòèÊ£/¡<Æâ6GYUy,ëSe}ª¬/”סº.êÿÀ²g³é2Ÿ ±îe‡ùùx´7û |Ô èËAº`$‘Þ8Íñ+jÌŽóÅìf~–/õlµ¡öuv4ŸäKT”=ßgÙïù×%jøé'œ~»Î©ª‹—-¡˜²3¦ìŒ)Á›¼)…cJá˜R8¦Ž-…cËzlY-ë±e=¶OqWB°G{£Ež^ÏN_¾|ÿÃãÙÕhŸç7“Ñœe/¦g³óñô‚eïÆÓ§ÓŸ¹±?ž/–Ï.QŠ¦È„ãy¾8›¯—³9Mº ÐÁhµÐÉÍÇe‚A`d…©Àðn|¾¼$•XPúC­¨ÅuÝaˆeÏF×/óñÅeu‰j©¯ÿÌÎÿÅ2j›J>Á Ëö'£‹ÍQ43§ú÷*ñÒs‹©†ªøP<ÛOrÍ´/4™n½]å[Äòj9šŒÏžN/&9ÞÉÇ‹d’z€iŸ:ºÌ¯þ`a­‹+Ò©0›ŸçóRë"qƒ(Y]àÒd,q½?ý5rÞh1¨¦7“ •;ÎϨœÑŠÐÒŠÀ#Æ¢Ñüq´~ð€iðÊüÆÓ϶4¶C TI«’ÕÜ£v'5Þ¶Aa-á´D꨹Š¥¬M`ŒpKÝã€Ââ͆L *Œ{Påܸ J´ŽñÑ·X*P&NSö£‚2Ñðˆ©±eeàSÛ÷eŒàÊtˆŽ© *.°ÁÖ㦎 Ä7 –‡ÍRƒÒ’Àè‡U.IkêÃØ·¡;úиÿµRœìJ¥5´LÅ-w0Ê”‚ÀÔ œÁ¼eÔp¢Ã”¡e ÊHË…~IÝaôa掾‘¤Ä=¬«Õ3yje¹Æ,Ù™ÑMâ?Ì^žl[ëÒK Exèef(Ìèæv 3:ÍWwuœüŽÒÈ"BUçäM”çäA¸êN^UËGiÿâ)w,°_ž¾ÿmÿ-LÃÑòòäÛÕÇÙdzì0²÷ +¥6¿ºÆùšmëËõX_³·Ó1êÍaû”µödA¹Â.Õ²Ô×îÿTV÷–V£í˜š^15‹ËÚÔ„@óùxñù jÊ>ŽæÙÕxz³È®çã«|Õ UôZa†v,Pzæ1|©ê– Z2®1AoáÅ@C”B Û-ÑšˆÏ^ýz¸÷f½A)z™¨0±OÙ¨·0ÑÃçV¾aŠ÷ý¬©îÓqõüû3lèŸÅ/±HïÆJ"âÇùè,ŸäŸ–ÅÙœÊeùnÆ_F“|z–gó|òâÖh’MòÅ¢8+<Í/FËñlº˜Œ—Ù5^ŸSÃ(ž]OnE±Åøj ܉éJö1Ý´™~ñ†Rsß@ªïŸüñþUÙ⋯pÊ@?Ù+W½!»ê!{Û鍶•ëv ×ôé·‘1°ãèw+³‚N¿ïÅdI¬6}Í{Í:yA×Ïù’¨»7¾¸X½þ8®/ŸWŸ§Tàš¢#åÛÅEU6[Ü\]%ZŸדѷ5ÞF77AJò ÛÌumæÞΣÜU·rwPÀ ”\%³É8É)é$ ¢ž˜A¤Ç)êœLu#§P¨!8ÜËÂÜ†Ê ÍF‹º“¨mÍiòÏaþF:€PðA¬_¸ð°ÖÀôd[ËІD^ ·Àq™‚²’¬dQˆGÂ$ç¹T°ÄýC€êñîl0\t|;¡‘“ö¸6ê1!iܤ-› ’žGŠ«ÜÒ=( 'ʈ‚Óû!1Áë4ö^a•{L ¦Ý©·þ ƒ·ÀuIPFKn4æ]y„‡g é:¦àBp&ª[P) ,’»‰iR¦í@NûmRË{†G5µôÜ¡(é!Ðõ£‚òw ¨8mcîT[}R­ÌOtˆŒÚt˜UÏ*#§½,å4!m¨r›\wÃýÐØf³™ãKÿþ›9ú~›9¿¾9yqZš#…åá{ý7ÁZ&­îqàT'” û¸> Ö…Û­Óïÿç’)_Úª"Åuš?£Õ÷ž.³«ìúrœ-×lË´í´Á´Ä³³¦gªucYnWåPöø`èùoûë>X/wœos§‡:Z´©Óëû÷†œbá°Ø´‡üÿó§uÜIÀéN8æ¶¼rhWܘ­ÊjòCÙöæôøÅϯ«íÕ>žmÛu6º‡qmÜÄ¡„ó«s-iÀ­Onm˜Mó&Y߉•bÖÀÐ݉ƺ€!AU­+'ˆ¶r6Êj¨ZÔðàñÉñëãª)» µ˜¶Zô@µÀpïÆÂfŸ5MøUMt´@·iÏ£©š–d[ E3T þµðòÙÏïY›ú慠:ùaM®3)#ª‰ ÂpêQÌm+åó4$¥‰u×ÉЉ’oÖPåøMC${_®à£U›E)G¯ V§ô<[_Àõõ%åU”Ù%êGJ'éWW1e–W)?­.™2Óê’”“¦›’>åÞUW1åÝ•W+Z7¨6oJý|òúÅáŸkb•âÉÞlr¾f T±†x¡‡w±Í;58!È©G_¸w÷çl Ôš .볼̗£ìëêà©–ΰÁ€ÁXòE`cuÐT»5õ Ù¦ÝC&^Tž¾zùâmJµaC©× ‘0[u˜dd›Iz¨Yé{vV( ¿"ûâ² NÃgÌWm{±²˜Ñ’>í‘êTEKøÝŽÍò(zÛJòwsœŠ<êŽ#oàkx¦.‘Åtd•â1Rưæ”"ü° @•QYG @1%[sM Îpø:ec")!&迆d4‡ywoDXU(å¹0Ý8iMXf ©”ÀŽÂÞª"îÿX‰nÀb±¼gS\_ÛÀÖO“bÅ;M ¹Cþ3œ²¼kP!pÕwåÐ*刷Éd´˜Q–+"S¿£‡ù ‰=$% ' \—`rcw©ͳ²Á$µ5Xn¹wÝŒ20ˆ¾EPåÊ<)Êš'Õ¤„»bú°Ý+sÙwÝseMw‹îí\f2A¶Åh¤/;Lä~™Š˜ÏåN窻ÁÒ\,[ÀV¤å×ò^ãð~°6¤@?Á iYúØGGN_=¬B:2XNŒÏ#x(càEý÷^o(cQ«ôA F mEbséÓ˜á¬:VäÄÁºNÌî³ò:–êɯŽ^üð2Ÿ|É—ã³Q×ߩ´Mܼ'Ävý9ÐJUL%MV¥®r8l(‚Ö´¯b­k•mÿyÕç=5w›¼(—¢ºýå«ö}>±réŒ~j:Ò¶Suß%ŸVãNÚB/šw‹:;Þ4¾ê ¥ËÚ"š½Î~ÍN²·Ù(û˜eçYž]d—Ù8›dWU6S6+"rMrÈJfH6ÏÙâzt–gËì&û’}Ëþ›Ïgk1U¹%7ÏVLÂÕ2ó;©!È´ÕÆ÷¡ Úé¡þÕÛýƒ§ÏN›††ˆú˜ë:þ•!ê2·CC~¯ÙmÛKjžlÛôNQsʵ+Û!Äõyb>=¥³¦®#"å;ÅXé°W¯ûò˜gVØ»—e¿g§ànÁÜT(û”}ɳO0J.NlžfÓ1h\‘9ñ6ÿ’O³ÅøkÍàåå<Çñ¯¸üW“ëXA¥ìU*æaîxÝ T<5•» Êb¹iÿ©Ž«Yn‘ÔÖsAßÀ Z&SßLXX«Œºv×#íõÑéÞÁ …MfÓÙÉd4]öF3¶ÆÈ{6Ë\;F>|½°Xâ¬*6ʪóâzó“êNßýÛþúJ6umª{GOqémÁf¹¶JÓêÐ$®% bèÍ&³)þ½ºÃ0¥Â® Ã"[6»üv}‰G« å̦A¹uq)rg‹±ŠQJ+LÏú¢êQÝÚ¨|‚®¦Ç6ƾØwgx;WÇè¹í[ÅMñ¤•à·üï^7òTúÂ:ÔW&}G]]ùôÝtOLû.ZlšÏÇŸ>åsÊ’N t“b®$ûB&uÑ¿SdœÌ†Õ|u;·Øÿ5í¦c«†vS–‹õ\\Öν%£{-ÿ–­'à2L»íü^ÖÉèMiÒíTÝõRZ®/Ç­Iø"Å7 »ÖiŠmAè]÷¡•åÏH•U¢?£]ü•È£j¾`kßPô¨ú0€Yœ¯}Kþ»Œæã¶òuÂ-²úd§•Mendstream endobj 174 0 obj << /Type /ObjStm /Length 2941 /Filter /FlateDecode /N 85 /First 785 >> stream xœÅ[mo¹þÞ_Á-ŠpÉá _€Ãy¹ôR\©“&¹ù :ª#Ô–K)®ÿ¾ÏP\EÖÊ®¯, wµ\òáÌpæ™áÆ'1Îø QA› ûŒ6›(‚¶˜’ÈøŒ>ñCöÆ3¡GƯQAoôr1âB0¡cƨ!áÕœ ‰Çø9J.á¢*:Sq&xFŸâM‡ ™Æ)Á„1Na òÚG “`œ ³×Î,J6œ<@•b8 æqΈsÀâ¼/„ 2ð3¹`„‹^°‘È \Œ¤RW`¤°ÇE2Ñ)LY`¸(&bzC€ÑÞÄDzA&f|0ÉÖ pÉ'í,&ÂÈ>šˆ¸H&E¯³I â%ˆ&HŒÈ,àq“I™ ¢`²(0Œ•£”œƒvŽ&•1%S|P]fS(SUjပâo©˜ƒW•ªfÈà%L >ç0HÔù¤Ý¡P¼ö‡&ǤªÄUôú*çì”á KªC<Æ(é½ ‰úð #Æž¡c]”÷1èo˜–†‘s@Ý™1$Ž‘s""Ƙ¡¨ÀÅ«AªÄ¡XOj€$˜ƒJ ¸ÂÁëŠÔˆE}sVô‚9‚(z˜‰IÑ«Q…Ã~øÁtçóËåÂü£Ú¢3'ÕW-·VZ[›Z›[[j«6¸j}m?šîéå|9côW}º—ÓO³É“Ëß1Ÿö–"êËì-zàW“+¼`V3w'ÓÅå׫ÓéÂ(Òç­Úu}öêêòôõt‰ºWÏž›îÍô÷%FøñG\þ÷ËT‡:›âöú"u7¬@rk¥µ±µi¾¸ÑÀçûo’öMÒÞ·–ZÛçÛâ|[œo‹ó©µ¹µeÇbýX‹%w¯ÅR[µÅQ[µÅQ[µÅQ[µÅQVh m¼ÐÆ m¼ÐÆ m¼ÐÆ m¼ÐÆ m¹2ÝOóÓËO³ù™éÞÍæç‹Ù·žÏ®˧ŸÑ "®0žM§W³/ËË«Ê* _&^ýç²¢P,¾‡´‚ðnöiùY5V`~×z\œêÊ5ptO'_~žÎÎ>÷·U—úÇîËù×ÅŸL§ÓkçG^??Ÿœ-”Î ®Ö)žô®Ï3ô¦Ã|\=|>;Ÿ*ïh"ÖŸ~\Lo‘Ì‹åä|vúx~v>Å;ÝËÙb±ÔUqê^/§o5Hm®sCD%¼}ñæ·—1ÕËÉòójø»jAy‡*–*j¯½tž1ЄS¾AuŒ-Mȶ&nϾêÀ¶Ý­ŽîýÆzzÝ\^}š^µ]éêÞU¡ô7x[wšo~·{ÿÛó>ØŸnXœÝ5ó¯ççÚÿdzªýa{+«¢‚ýMd#¸¶M 7”óËlþïpõi·CsÜ7h-BTd¢°’†5Ë`FÛ B¶QsŠ}²^ï Š‰­šÇºð–;:5Íkë (¿¬o’iþZoŠiÎ7šÈ¸õ# uÓôwRS£þ.Õ´¨¿ƒÈéN”÷ý”ìö½”äöÎ\Ij?Hl#çðþäÝ‹Oaý¯'ó…wwô `½C·P¶Üò¶ý¼BŒ©iXJ5ük„­W1 =:˦G¯·k'ò¸û©;éÞn¸w›G¬&V_u͉Äm'r£÷ôyooþá¯ïžþåo}àðôh5þ!Õ V¶Cª{ÝOcŒŒ„ÝJÁݤ±]O¥µÃV_M}¿=‡À™Ò-£K}ƲºbÙžS9Wv}ÏÖ»2y¸L#ø«ïl[RÂo–¤yÝ–%½ì~í^woºIwÚ^ž_ÎñïÅŤûÔM»YwÞ]t—Ý—î‹’¬óé¿–««+­»ê–Ý×î?›1Ma³E¸¦­€¦‹Š¥½f‹iÛo´}c6رc‚º  8ØrRß—lJPg›c3lð²6¨DwIJPHc‘„X-ap6‚ç³b9(ÎÅ 2¬”ø`’‘@Ý[‰ø9RðÙ&¯•·d3¶Qø=Z^ðV0û6(+Z8$gheH-ËkÖj‹Ú²n…ìÜ•‚Ubð0 bÊ–R‚¹Å5¤à¼Mù˜ˆ‚ƒe»ô JúQ m[zÏa*$GQˆ´T¸-%»ŒWˆ­å^±Z5>,$X2˜ë’ Z”ì! H®ã»a:¹Ý$°>r­Þ÷wZLýv·ªãßNuo§·qgÏZ´\áÍjQݲËuïF?ª£ÝÔ5GÝwÙDá·â`SˆT‡Ò£ 1׌çÀ¨nÝ¡»ªõWH©%éûS>¤%Ö«g%xV=‰b“î €tGJC)y+zˆÑ0©‹e=ò¸¦ƒoÔ»oÍkY¨© ¶ìa·)Ü"v{†ÿ£ê³#“À:ç’ÿÏ6õØ.xĶ©¨°ØÐü^Ë¡Åîmê%ÛÓ7LIëÂ2¦=ö#Rj±|¼ªž¦Ôì}¬œQ£Ãžð‡Þ¨7€"0SêAQhÔÀÚÓ†µ{‚sGÖ2Ûœv¬f¯hW&å&$Î)©v s¡!f@„#äkÙ7H ¹i™ø¨Øëà{$6lÓÝ»\ë`LÁ# d¥³Bmõ|ñ°–!=l¿„ìQ#€VF5étVÏÀ}ÐÊè¡}  >Àë' T hùnŽÈgïV¬½oæ¤.£€×l‰0#e¯1#¯*úN‚c¸~o}ÕÖöÇ„‰ëÆìAâNNå@Qt6É Éì,¢‚òäK8&„WõÃÓÃòü  %x5¤À“ ‰¬pª¨ðÁµ·TÓ)YÖ ØQ¥6µÃ}"MqžÕLJ \@€/+õó0-ÓÑqy§ç]¹‡¤N?©ãÓ®0c•Ç ’µn)a"à>%¸fŽV?¶8 ¨ÀˆÍ˜¢ÖTi—×DF‘iHðsLúybÔ$›]ª,BËÀ®Œª»ý1)ÅŠJ=&ˆ‹•öi‡ßñ¸2yæ_Ärâš§•q}æþ˜à´­~׃"Bz¦ÊÔgkϽ rú]¥V¹b=iáq]æþ˜ô¸G¿¹[ƒÂ>tà_J"D‡Ç¥JYƒ"°UMU ªÙÔ¤zPpéâå vøÁ,.ìœaÌ.Ô0ŒÀ;/úEІa9¦…5¡á¢Ÿg¤ÖTxÜÔtLzÊ©´…˜ç´^xPß™Úl&3û¤(ú±t[؈銖«5j"ã½ëkx½Ý¿†÷ý˜š­«eðáJšP-Û“W÷©µÎu ¯Ôãñ€Úå¼µ &C†R"ò8ZƒÒZK¤Qjߊƒ¯> stream xœÅ[[o7~ß_ÁÇE8¼_Š¢Ø$m¶-’naoÚn‹>(²ì¨ëH$7í¿ïw8ähÄÙ’#¯aÈ3CqÈïÜÏ!)å$L9Å”ÂÇif´ÃÕ0#®–ÅÔŽ<“&Ü&Ѹ‰Lw¼`2zôñ’)‰ŽÊãM1¼G¿ ðϦ¢¡Ë´ Ôâ˜Ö©gÚk¼áÓÑXÜDf¤5ô*3Æ`À ™q}‚b&Ì…!¬P˜=f¥§ˬq4¡cÖkŒ<³!ÒM`N €‘9eÑ;#”ÄÂ[Q1Ì€¹¢fÎÑ€¼ 1„‹ÄEǼ”ô–gžx¦Ào$€iÞ:Á´Ì;0I É| R(æ#«Áˆ ¥ÆaX¯…eÁ¤Ç‚…H´ð,xqXÜÂCˆàŸ–‚E©ñ•”N F*IlCD§¨aÑÓ8 ¤¾ƒ…Ö‚€;C¢p [ƒP)<µ)‰»H£A$’Rá=vÐਟÅIß’–$ÔIÊ`©æPBÐȘCIKòÆЍ×s(cA9$/•³´ÆP¼¡1dEo`¼ZA¦\zs  æÕîR?Ì¡ ±JxãAbR ¤¾`\¤6Ìa$$§ æ04»ÆèÒXiþñŬy¾¼YlØÇš¯fë û•˜Ék~þï/‡BÚȆ^Ü\_³ßXób¾B?’fê÷r’žbûôýìÏM2ôôÃd5Ãà.?­f0ßÞÿg¾¹ž±/.g——Ô‚m!„ó›ácÛ«7¸F| S7:ç.rßËÜ'ä~ô®ÿ’}ùe¢®Ð+¢œ³ÜÛ-Q-n°©[%—Q£µeÜ"$TÎe4*£×-jBK}èZ÷KÏ™²B]¢Ìö¨¢¹ÒûUYfj?}pa\A—j¡™¡Ù¾Ðœ'>‰­÷®à Ãg™$—I.BËÂ,¤Vt}l¯½°BoÛ;!ç¾Ä¢2ñÒÛŠz'ïPI×Vê1’dž.“PHI¤X¶•^’t_’#põ¤ýŽtÜ¢Ý+B;šnçK÷`•ŠCò\Ež×šc޼š ,13F úÂÉBšÈ {JûPd_ÜŸl£-Gä«¥êoUÛ1Û“¨áÖ|rßv°—èékFè5#íŒtû·îr1fÿ5 ÚWÕêœáÄ[áŒÆ‘9u<ÍÏÅ3ÑÇ)UXÜ.¨°Ç¡ø"¨Ë(ÒXø n9 è½°%`S*YE/ûQÀ›Ð½ë$Ý}cá質$€äØŽð݃tC ¤µ•{µ« %ÃØ…¢{¶p™å^ÙMg/·Ø¶=Q˜­ãKïÛ=ãf›.1#µ©];îæï™MçGd~ߌã$¶&°°»ŠK¤÷ Í8 [Ùú}²Ý‰Û¶udŠ¢ˆÏÄö¥ï]ŒÎd¦©Ó+§™¡²¾»^¤ùyÏn°`ê+kß«ÙÅ|òlù'ØI\±ÑÂn¨RáȬ¶öž9x6[/oVÓÙšÑh/–ÄÍX¸»ZNÏg$—懯^€ŸÄûß03nÿz?£¡®f•©j¬ WFîQFÔþ¦’ÙhN¦·~¦ÐŸx[dUçdEŽ™¯´jŸµ-7¢¨“7§9*¹Ór}§ô~75êl­6w¸Û¢Îú¼É÷~G•ö!àÒר¯Blq¶Ã¤º¯D¬‹J#IvñZDÙ¨¡ˆ*c)™‘ïq¾§xî¢ãêÓÅb óü5-.‡hm¡½ª|ÕùjòÕæ«Ë×6?ømÇØã‰Œ–*>ÂØé˜ Ü‡ºõq\W²Ž¥cµŽ› s²o½Ø©Zûn–’s·¶Ñų;s¥!AÒxœ­ òV‚Fs§G6…½¹Í}TY¶l¢µ¨öÚ ˜Ö¡ÚkVq•U\é ›l'Paõ1ñ*ÔîÎEÃ…«3ŽP'm»§pÓ÷WÛÆ ¾Ô!U¤©ˆ£+D÷ˆ5©mÚkäyCÀJI>R”TG“„Ë]£èª×[Y¸ƒêÙd=KªÔ|õÃ×ÿúåùg¯Ï~zþv²˜ÎV½|B úífrÍš¯ÓåÅ|qÅšŸæ‹§‹õ|Û";ÞY1Dʤ— uºš¿ß,Wiy» ø©’"֜߼Ù$½$í”EI[ ?Í/6oÉÖZ;Ùý3nØÖû6Y!­¥¶ÿ[«‹MÿJc”±)4Q/ú´cƒ™ÉrZ»_OSb‚¶æùäý7³ùÕÛ ³Èk"†Lî“æ»æ¼™4ÓfÖ\6óæºY4ËfÕ¬›MsÓ|ø4I>òD¼÷âzrµ¦5a)2»žµ.à "[êâ­Ï(Ò×/æ×3 ÀYG©éûÉ»Ù]£ÿóéÓÅôè_Í×kˆ+1*‡–óÍìÝ´(ÞgOtÍÏ™\cÌP]^¼þîùëï>{ùê|²XKñäÙòúâ@5.ÕÄWjBÒ$b /äñ=yµ¼Îz2E ÎZþ}y$e°:½]‰BÕ¢çÆ­"¢ß#Âôåêb¶Êˆ’Zœ6vØžŠV¡8yÈ„â‚iç8UêRG®HFž®ÒjL°¶†t˜hÃ7¼89¦åUÞW@i«À(:?`á”´¼ätª@ëÀ-\죀2Þ§µÆÊÂÂ,ºøxP‹ê/HÀQÄ¡8¹¨a!°:¡[ÆNÅÒ_ØÖI½ÏHt5Þœ %b s@¬¤ü¶}ŒºH  Wt0$c‚ßà>S-¾»¶—JÜðÜH—‰0…L%¦S/¨;4%ÛÍ:ô€{ó±~Öq›gì-×äe)G½=Q ×%“â8§p×qÃ’Ï!æƒÖµu½ ”s"[ÁKt ñ‘ò~“~ÒcÉ=Êl‹dF§Ã°ˆÙÆ<–÷ÌåˆòÈ™„êÊÔL2ž4ÍÉÇU@؃|޲O:‚§Œçt4~݇_7?‚I+ÇEØbBÈ£C¸€i,3¤Ñꌡ3À+9§èG(p§<Á܃ˆª”t¶¸Å„¸#ÓÊÒ˜ª8šÎe‰ƒ‡dpÇÍm§ò^pîK´ÛkÞxÓ9ÃÓyã-ë Ën¯yoYç½eíF2ÁS !ßöPK>#Ì9ºxYç¹€‡ðÂpO–P|A<^zîbx`+Ý*¹­°¥%êØôË„“ƒ1 'cZ`€ò\BCŒCÒú³ƒ‘¤ †ÓÏ4”qÈÝ¡Þ(* ÍC`[ŸG$–v° mÀ( ë-˜¬RIz *ÇGgá÷½M›šöÎ<¿$LÒÁÖ¶" Éÿ")Pû Â#*NGäl •„IdÇ {tœ~Þò( TZÒ[Pàˆñ!@¸(èd.•«ä¢è(ÇÔ122£‘ц‡Nnö`Ò08A»uRÕ€õ(P»±¹·óuû~;·Æ7[nâ9g ¤½æ(mr”6f$*Ÿì¤—éÎR•Ÿ¦ÎYóúìÛŽ«ÓÍ|¹h›>y7™_o–ŸŸ-ßðoþZ\¼›,þùn¹˜¬ßòÙÅͧiÔýÚ ûŠ8` ¾q0ŠÐ&²à”ƒåúIîDûv³y¿þ¼iVË7¿¿máòéòÝQ8ª6æ4LœWFsˆÑ›¤N‹òÇü÷õf²Y//7|¹ºjŽª½J 6£·ÑŠÎsæûÄX/—B{> stream xœ¥ZI“ÛÆ¾ó7$U<‚)î}ñM©8±ì¸*–&q¥ì0‹F°¹H%E‡ü÷¼¥èÉU:höòÖï}ýï–¢•KÿÒÿ7Û…\Þ/Þ-$½]¦ÿn¶Ë?_-¾zi$¼i£ˆryõzÁSä2È¥Wª..¯¶‹Ÿ›ï^­D+¬ *Ʀøû?Wß-¾¹Zü¸x· Ñ´.ZÀYÙ†è`ø;:Ùª€rÀnzù—=  ‘ Þ¶R-½ ºÕn©µ1mKa®pËÃÝâõe±ÍRygM)¶ª•Dw­Õ:‰¾Øu›ÕZÉØÆhšýëü·n^»c?û›i€o^í_?v‡»j[¹%¼wÊáNk ZZ–k4í÷x¿!›è(•k>­ÖÚ†VH×(!Â3|t´ò¿V r”:4{žà´Ó²y¿½ËSb£^‚“ºá ¬ úBr7ÿä_l4!4ý‡$½€AwÙ˜Á…æ0ôÇ•¢Œ‹}“e'½¤€ qYìúï ,fš;މ$û¶ße«6ß¿év·]6ºR6Ýá¼^€ "¨Çõ‚P´.ëÅ¿ ^(=ø÷‚^Q%½(í`‰°üÙ÷·"¬F.· «Ôô¸Y¼º˜wàáS›ç×ÃñÐÝS¬È Õ´³ rhM ¯Þ°Ç¤†„è1 … Bó¡¿ß­ …½’“ž†5Çã]fàaŸ§DHÀ%yŠp0:Ew‰j!¶Zùyp—G@¹¨Æôz[@6$DV´ Ùo”UóD±d„gÿ¼ âK©Ø˜ó¬æ8ÕbÂȼØd/4 C!lòŠMÞB¥Y5”—ÛR…ceÓÛ4Ù6\§A„Úè\û¼ƒXǵ |Ga¬,òleì]§[%RþöÓ®¶|;*ª-*]Ž‚÷°’<,5-Ǭɨk…ìC!¬PΧX¬ìó[Êܱr=I%‰^Öa€Ú(ÀLŽ_Œ²/²”4°”›rY$ ÅÂO]ʰ¬ý¹ùe¤Âx*'Uà¿/’õºx¿éá‚F©Ø ¥ñçAÅ $Mc™œÆ´ÅR­Z‹ágèT@ÔXKG®f€æ`éF®fl¤Z3rµ•Ñ Š4<ÒÓžDÚªÜ;и`9Î$žÔHâ˜yíjí=«¦˜+”¯Òn$¨¥€9ax³\øÓã ŽCš%lÎ³Ñ ËÐÖÌ"wÓ¨¸;d@®ÙQá³FƒH§;Ûl¹ªŽ[cŠkçç)Ž+Db¢hI€£*µv%ZXY({ÀÜÁz= uøÈ +‰3ÔÛôÕãÀã0ÒDó$ã*„ij°Ú%ÙÀW»K±6 ¼ËI› êw“Æ×'!ÏýÅý†¤‡§ „;@bìS ‡`f†ÏvWu(àbh9gøïq`B?ÁßWú¹ùi°8ÂncYÿŒòp††›Ú-×\)œäÙ8`yfçÇ8ß§Ÿ¡Jwçcž&ûy±;y.šó•eïÓÀÕ»ÍýþУÝ\B]â·k‰,¾ú¡;¦@ìªPÙvVåØR¾I°Nž‹¥$ë.- ԯߖظ©ælÓ$ l&6ìH {Rvy1›1Ru=^‚Ð3ôýþå¤BE8»?Àt”ILGÃA ÜRï¸:C°¬ê.Ô˵Fk ¦¤jrʤpÔŸ6LøCñú0ñ>ÂA“7C^7œ$;ŒÇ3ý.íeš*îÊ¿÷ø cÅâ™OPÇ~_Ž’Pn†#  †éa gƬ£Š©›-dw©RIßlFåLÝ1›ÌœéSêTƒÎ†ëèZ«¤LÅiöF7‰D:C tO8È3üSƒ$r×d`õÐLŽ@pšß—õä>MØÖ„ÿMå—q­R7Ä5„é$|FDá”ËH”´c!B‰n øbu Je1Aël.}½þØyk E‚±J‘-¯GÊ7È[Íß?ÈæøIKKÓ|¬6E‡#`<ùâ‡çÜh"ǃûÅr±ˆã…9ÆŸ‘ìÕŽZN(g5ò–&Õz|0PEJËU˜Ú×uâºðÎ&oæ5ŸáïKÇXã×vÆAº¡ cŒVå´r#¿Ñ5˨&®‡ æÏ/KúÖg¯îÙ*»b¾§Ñ /R¬LÚá xF ª²ÓN* ­ .®Üo6D F‚]à¼~Z“Ò —¹Ï}u] ʇiÌ¡*¡DÄQ38¶2S¤'gaŠHº @$¦c)Fi6„ÔfØç-k7[èu÷¦_†I IõV¥&Šdg-‰ìËaO´aÈDæÐØÀÁS?åi¢†Ã’«ýÅBOœà¬6py+ýQȬJuì %âó)5Ëb@6ÊâÖoºêL—U™‰ã4§í²”äD ß:=–ø/ç)Ú8ùO9íÈ4íÿ¦Î|ÑGªVY¿\+Û¡ÓIêû•­>¥³aÔ B2¼¼ÎvòŒmS£P_ƒ¡¢&èÍ ÑÜ–A–еÍáFí¼ª‡·ðä-y&×4lêbãòÞdcÒSq Unʇ©8p[ôžw¨÷Í^;ÓTmÈjWx UxR-آ‘ÍAÍSúr1ÞÚ—mz_K_§¯T9r ûóĈTCeOaâ ‚R“'{†z¬ 9;žŠžG¼x–24Ä‘ÀQòÊfx[\aeâÆáGA…ù|ŸRÊ •OÆ´‹žðޏ¦ØQó‰W—­ciàÌ ±.†`“xXÒŽÝ!lÿjتhÿæÇ‡Ú¿Ø7˜zªêÿ86ˆ°£·€Ü9’È ˆz4/véBüvÀúŒì ŠÊíûòã¹dZKX ]+Ý:°$ûì9Ã—Ò ÿØ+SF8®¯‹ÇŒ`ŸUä‹v6µ9 È©‡‚‹å#›‘c€»/ªÓráC^|ŠÕUµÈ¬™Ï\6Q,籉âpñGBÇ™²‰ÂÉí#ΞY.ó:Ò^ˆÜ*­.Æ÷mÒ©Ånúûº_j?ô÷r[â7µ5^¹Íƒ¯ZÞ•S÷Û-7¨55†ò½âè›òZ…ðü!õ3  êž#-Ö¿N»ÒPy‚’ôÚM'4ÆåJñ®œr›eÕ5î¹; fnqg(~ºŸSÓ<¿yÀ°i3(C•|t‚æ±({AÌ?½ø¤ ½’N¯ô·œz'yRr²@ˆMÝLkÙáüP%`ZSÔØ3ß0(/ÈÓÄ~¦TõÆQ'G‹H÷†ß\.y0(PϸKã¡ö ÛnSËŒ!÷ôïFŽ´SHÀA¢=ÅЉfCÌÎ*ë»R þrÕÅz%5•öãEËÐ6¨2ÐÛmW\¨²èA׊²DU¤í3VùT/M3Ÿ’M" *ÙÝu É5P`Ý™<¨0Œäq‰Ç µàÝ %µ¹–Üñk}±GÆUœ™![Úº¼2¡_ëLSk3vj¾r«™y¾p«f–ܼ©cï §¸G :5ÛëŽ3÷•9G‡=œú’lPtNé}ù÷¦>9§r õUEb[$IU„¬’@¤œqÓã:Œ@ uTQÒ©æ3z_cù)ûXãiŤ Ayr^‹¬…A†¯R«ÀNÎìÍÔ?𫿦8ÀnWÄøмy“DÔ úš¶~;Zi)ÑÀî8>°£/À·@ ø"ô—‘[P`2q4HÓè#|öèj×Ý&aö•g 3|ªbÛýœRÕÌWÏ<06¤ìx)SÍ,ýT4¤°Ë'CnHQÉôS—•©ã¼†Sÿ‡š|Ü–‘~ìÜ¥ÇϤféâÚ mÏ®P—^ñšP“JºR2ü qw8öרTZ>C]]s§KxCìAá¹1uÃl¨ª°s`QðÞ~è´Œï«rPqêº3ÁѯEY›ô«L_d™±5˜~§œsÏ=¥Y’f<¡Í@¥/feµ:Æ“ÿ¡XÈÊÖùfˆáÍòä—±U^ô¾€a[%cÝ+i錊–OÇó}iÙjü*ºn–sÕáf¹?Ó¥~ø­ôlϳÝG¬à2|vó‘¯õ}$Äüi°SÑäÑtgCöð+ÏtÓ ùã³S!-)ËÉÙÈÍôr¼½11”"¤K\Ðê'õépÊÓHÐ ø  @>·$¼rVê4pž Dõt\ü¿'îendstream endobj 347 0 obj << /Filter /FlateDecode /Length 5336 >> stream xœÅ\KsÉ‘¾ó7ø€Û67Äž®w•#| gÇãY[Z[ÃX4{h½(X!ÐHÚ_¿™Y®ìn€$@¯C@uVV>¿Ìªê,ºV,:ü—þ³¾è·ÿ¸ôí"ý÷f½øýÍÅw/µ€oÚб¸y /θ6(³¸Y_4òòæï0Ö:6V„ÖxãoÞ^¼jÞ_vmg´—!4›í»7—WøÙ o}Óïö—ÿsóŸH"Ô$”k¥q>“øíå•Öºí:Õ\_J×vÂÚæ3ÑUNt¡ÙoÖý~ ¤•Rðes³\¿Kœl~®F¾«§ß.ÙÇ]zDèæ—^¶!r\s¿¼«hߦµpÑ] c[ß])Ózkã’ö*±ôû4s§›~Kœk¤Ú|ª¨÷[…lÙfó>ý­'•¸ ˜ŒÜ˜®æ&ØÖIw\7ô0¬®^ë~vmºk}Ð"“cì¾AM ~ìo#[¶ÓRû¦%ZW´l%ÈEØÖêNF73«ñN[5·)[íµšøéàj‚¶[R¾í´ÍIߣB`® ½3‚\Z‚­'u`ƤI›—Ñ¡FÎ×µNéÌÖîÛnÏlr¸é27‘åÌÍn®±#ï¨Lw•2ܦ7ëzÙŸi!Êhkš±­§ç]óKCÒüáæâ¯1À˜Åöp`áËÏEÕŠ¢KgZ-E—ã?’aIk¥k~½”Ý×q7_mê5­ÙowôˆUCâŠBÍ÷ ;˜1x!‰”RM§¹¹ô…¼âôûu4hæè!2‚e·Æ>%ŒÆU$á<6ÜŠ( ¿°A´Þ‘T^5¿\^^IHáËþz6.¥mƒ·ä!~vM¿\õ¯+]½KÏnºÛÍ÷zPâ(f<2ÆgLÐÞ7# ì–¿’ÁZîu áE+‚¹Ç1D~®·ÑKÕÕ¤iNpí²×¼HÓG†÷d2Í—ò×fû1ËÙa„l¼5¤ž#6îfµ©€˜s†kóÃ~ÿi÷Ûï¾ûþåõ‹öåd:ƒ¼êæÓvó÷woö)XªÎÓ³d[À2 e³½ýîSÿ#ÒïÞcÄÃ|z†µ´?ç[¥"{-0¤Ð¬Lóß”½BëǨæîšín¹¹ðßJHcé³Oyª  “lU% +!@âž0 t:¨ÅQ §/„bÙÇìímšÝÌ„CÑåà Lù@îCìÓ3>>ê£%qÀÅão>€³‰ T/Â(S¥ˆ›3U b)S¥Oó™êJwªuZ&s²ÎX SdQª,Jg€j01‚îOè¾O>>¿KO(j¹×ÃÕ=ÚÄà]DJ£×O5â sJ¶ø®^"0Zöû¼ÆœÔCPZäæ6q€d=ÕÛÄUIgfs|ÉcFßíY\M¦3Ñ+p.Ýl/˜n4)?“l G0‹ËÖ&{2Œ`ÓÇYK ¾¸ù÷W°°iöh€Ð;aj” önЮ]³&aüïÔ O6_÷ìã\dAz”Ä‘Ê/Ã8Œ¶$B sŽbiÚIŽg‘gƒAV/MLâÿz¼„Ü$RÝãÙ¦•ZsÏNH£Ÿu^ DÉRÜQŽb„Ñ¢ÓjH¢ÅiÓ*²ÕdQ¡¾ ]ú[X&°Œœi d7 -<Æîq"å*`Äc°à<8¦ h±@ð)=ï?n—ŒáKR®ÙߦuC*7X¦_}Ù.o?P’2;P˜F‰løßî(e@ Æñ–)¸z¤Z÷c¬À$<ƒaÁˆª‡L~¢ •4­@øXÑkÏÀJƒP”åºùå²ãã¼Ç!¾-¾ðîë'nÍU•³‰)9 Ôs̹˜ÑPD˜#¦›$Ã*ÝÄÃàkÕÑŒ‡á=¥N£} B®‘Ù$èÂSÒS4ß±JŽ$Ã?Ét€"D/¬Ô­€‚‰…¦\Ûø0i6åø Õ©õ#þDUEв‰ €ÈÉ‹Z†µyºÛ&:ï¿¶o«XMö‘ÜÄíPPP·Ö]›¡Jûöö€uA&=Ëu ´–Öy o½ \ XÆ¢ JUyVàŸsÚqòÉ~'g¤‹ÖoïíQ0/Õ®uB/}Îðj. «§9³à.JmUÞ(±Ô|¸3F´2–­êîW35,r¬ÑuíØr‹ÐŠ~3#Ï,½ÁøýÔªi¦u3R SÜç”Íó«I‹e¶ºØ/ 8Gzæ±}•y ‘³4 üf¼¾'—†Žààu «;Ü.¡|²bri¨Rü¾ß¾ŽK„é›ÍÝ„B2šñ¯hþ°,@xõ–LÓ(e}*5©X£tÆ•© œþq™fBb3=‹Æ!…%p8óç–'HXbQÔm½ðMl[þþlDOXX-Ÿa=(ï½`öòe?bæJëïþÁ@´~û¿5»&ùáwÖƒ¥lþÆüâã( §q°&¥ÔyqRµ|Ì€Yuq 6¤p„”}ÄJ t#"ƒ¤›‰oP bm£ºNeölòGú œãžR'Ò.4­’tS L߻湺J¶¡%­†§îì¬Øt®œÕŒŠ¡ì­Ô†×öiÜUèØ‚1äìíwÍ;aZÅÂ÷ÁŠÊ5 î œÌ?Ý9F€ñ©³¢fl Åly%Ð0ljê«àOiê‹V•Žøù]}pO VՖ‰h–¨ô…(!4­(2¡—ñ¶û¸Œ%™q©6ŠÝƒ~v“Ц1ãfÈã ]…½Þ›ÔÖ8·'>1›8ÚcÆÕ=[D jÁÇ1²vÏr6ŠhB€×/z~¶ð`;|oC¦¼•K>O‰x’ã1H ƒ¹†€¸¦%ý%Û©i±,>`§æiÒ±¨‚ñÓ¨V†äß×»ÝrÝŒgç§*£Õ^mv‰ý`gªUàR‰æÅòciÄlV™–†_Ó:7 ¨`[©_U‚¡h1{®ùx]7ûÞ$‚}íXmyBÐ*íc=‰‚Ê´`|)seÓL C˜ipÈñ§Ü^•Ê*r g)Jü¥2½å~û¹ú8‚yǼPRÛì÷ËÕªÿ?{TªéÏ óàß*H–…EF¥U·1üÒtÁW'Üa1zXM ª‡o¶ ®H7ÃG¸yMè~ z= dœ‰Ì%DðÛD[Œã÷lPóvÏ[§R“þt—Ö¢-–väEXd¿Ý/‡DÆ×j„ãŽ8d”8Ç#I²qûJ[ªUØ>írõ-ý(8d_îXƬk‡Ê Ž“XJ¤¬;=y ÀÉ#re›Ý:É•v׸ÐF©+r¥qføž­yS%ïéŒÑ¨lús<÷Á4Q6+Æ]¼S#º ÿG”Ïɜ膺[«uM/An«­IM`b&˜ _Ôƒ«Ó°2‘æêa‰xÅv‹nS€ÛÐõ.Ô¼Î$©,UIgTÈØ‘ΰÕë]!A ñô:CgXX(!jz¼LÚo’ÐÈgAbªUŽbDlJKáBÂÁÂ>55J)]G>^6xŠK?Ý¥Ù¡¦ù9 Ô(ëœJàú,'•qqçÒ‚[Š ÆÇ"›I‡Û;¶ðí²êd OÏd¯Ê“_Á£(¾\×~±:^JdðÝÇSã5ÔÓ¸KF+oU›èëê”°4…Ñ/ijû¸®>„©Ö9¹˜Ù›ž|À)3À̪{ì)3Ô¿„«Í8¹ l:Ù|Yb4.PŠÞyH£‚b­6¦ß×^¼´*Òóƒg·+B l:ÉJ¤£ûûcm–¢_Ý*m%)[«‚=îÛÁ¤‘¤„°`:LÐÍ_‰ˆ†Ô ¶÷ OF`|‚\}÷îîRaÓ âHÌéñh€€¥á´V¡)oÐ#ð=HsywèÔ«þ\ ™§n4r(=91‹‚ï€ÂìQ¦ñAŽ6ès~™ÑpJ)ø˜UÙÖg¦ ½ÕbÙì“\ ÒdäÈÉËÒŸÒ"à n°å8ærçŶ5—Þ —N›ÈÑQL¨‚ЈÐræjÔÔ­;9Ýzˆð´Ç³ÄÐsÏ1øV l¸V†Ô,Ïû Uk ÷YqA²)ËÃXðWì—mÚ!˜;Å€82(=ÞN¥.­úgm§Jˆº³õÊŸd; kx5]È î¨îM«­ã¬^>{^ ÔË´ÿðNR´ È çnÀ#LpÞOϨ§Øæ´Ù˜êÇí†Å3LF(sÃÓ›Œ·£] r=Ãd ”NÎŒM†¶?ðp(þÄ+ê¾îüU%õ„˜°1:„Çó GÚ•‡Ny„ˆK}°ÛÖ!˜A3Q çÎx„¸3>iVãF³±ÿÏAÉ…Öc8dõ4A /Íu€Ý×':ã!nñròt ÄVäà$ u¦Ùq šSè—»¤;Ú3Å‹Iy‡¾§V.}°\+…c‰z°S?ÙSJ¤†¶ ðzl+)íÙ"$PrT]¯YJ?¼™Ãð8ú/ì@¡E:¯uì«()ð(Âgn·åØûä.Lð„=pª}íjn#×ÀÛH xyJa¶—bÒí1pj¼›Ä®½ì—k*ÜR èÁÝó]柸¦}öøÃHw•7Åñ7˜SDþÊBi³©O ì²-ÝWÊ‹€jc7*÷+‹gžQéðÀ ňôèšÈþPÅëlÝz¬Ò£5¦2ô±slCƱÉGÕ–0ß¡-2 C¾ª^Þ²yÁ«¿ÅŸqG•oQô»Ýò7U@xbÖþš"7‚‰4D‹á” {êv$üË’Âó"Ø@ÆL‚À _“‘DÓ“£ÕCµó-ÞÄRÔ¯ŒìÈ oFû03gᇽ”XîIJfÕóƒíT†E‰ðvÃáóï3åBÇ ”SŠ€ßž¢ºÀsN-X—6i_àÇž•CÎy‰T¹²Q=óÓ$R@›àÚŒ®÷=Ý[ì3k.$ÚÃÔFÑÞ÷¡:;YþŒì²Fexb*+¤/ÎìÌ yjrúè²tÒX›û¦ï°×–Î.Cˆˆ¶ç6ÄôOn ZÊ68Á螉Ûñ¹óáˆ1LêŒfùr(¸f¾ûý嚺R`—Î÷ú[<ßÏiYYÇÕ§hFà=0.!ñ·¸ø›ØïÀp8Â\«ÍYÁA@8Ì[Ïó$ö PbXtUt%<ã¾#”þÆ.²‡x´N5·Ë_§·žµ¤ú¤'ošýfïSKj‚Æ»ùŠvlê ò¾J¥ü^Ô¸›ê¸{“yßÝ3…BìÕ ö6Ý0Ž/Sàç+7«Õ¦¼‚áËT8(HÜõ±~Ô¡ 6À_|Åýx!¤Ã`µA‰üb}¡ «Ë7«‹Ÿ‡Q.èVàá1ÈiôÕ! •Aå‹jÐcO½†Ö‰•xk—Z97N{~.º˜ ”ïËqÒ MÎ×JÑó«kÇQ ÕâËÕºŠ@,":Q‹(>öøÅ , ’Ø1¶ãÀ±YÖ‘½—E]þFê:A9h!®1ö^`‹Šn 7×€p£Y>Ÿfác$^&Lƒ§že-ÌôØ ÒtL¤i uéPÕ½f1=Ys9iár,äÇs¯½¸ü¥yÁ®õ2Ö·¿xq^G ny‹“Á«ÜÂGÜ5K ÈBáG qzu@¼7¡Ò Ú]?„%òý‚XBšûcæ°ŒA"§.Cö„‡GË KL‘ò9YŽÓží\Øø„?ðSDmµna¦õDLÙeÔxµLÛeÔ˜6#@‹Ö± %¾çmcw/½ŠðùLbÑ…„>0ÃSÕ¸Wç™Ë9Ë!¢8-vþÝ#g–Ú €S¥ub+Ô?‰Ô0ð8@[Lld¨Š¶”S±55ýÉäÑôñjúÁÁ¸ùPò%l¦sa3 °ëAø•]‹Ö ÁN—oN©ñ5r!ðMrNMštãW¹‘©Yшü†La&·ßâ»$Í™­¨'¸Sþ1÷@¨Kåォƒ/ýqvÜ+g Ó+Ô@—À(˜‡Æ«-)ÿ\oÓÛ”¥£ÓôJ"|Œºï°5=¡Ãƒo2.±*ïýŒ”|“ΜÛÑAŸ}ÖU:S˜ïêæ#Ûx¾Ì¡}_juø\è’~›Ö×ßÖB®;˨ö\etuJHÚàåoRqC‚Û×±whÑ­BCçúgŽa¦U@É |?å‹g¸í샑¼ £ô·“/Tㆈý׋ÿ/ÉEendstream endobj 348 0 obj << /Filter /FlateDecode /Length 5116 >> stream xœ­\K“ÜÈqv„oã›Ï>ÌÍh/B½Q»¡ÃhCŠ¥,ÆJòDøÀ±#@Îp¶½Ýl²&5þõÎ̪* …yì*x`7ºYYùü21Ÿ/»V\vø/þÿþpÑ]Þ_|¾ôô2þ÷þpù»ë‹ßüU xÒú΋ËëaЏìÅ¥3®õÊ\^.š~sý?0Ö:6VøÖô Æ_ß^¼m>lº¶3º—Þ7ÇÓÝûÍ¿÷¢·}3œÇÍ]ÿ—ðùʵҸ>-ñíf«µn»N5WéÚNXÛü/­«œè|3øƒ¥•Rð°¹Þîâ'›ÿÈFÞåÛŸvìë9Nºùæ—­çCJêw³µïãYt)Dë‘x–­0¶í[eÚÞÚp$Ùšv³5Ìîdó†–µNy8âÈVx ãön_œ†oÈí€ R6wû»÷ãîø1íÈ.Kv­òðèúO×ÿö¶v§­Zœvò¦õ½i†ø“Wð3—sý4°Ÿp ;Éæp—/0œÙ¬ÏaR×Ù†z¿‘°”R}óS¤ËHX(nÑ÷Ó‘üñÓœ?çë |»Û¸˜*át<á/޹iÞä"ó{úl¼†]o6qØè}•­›w›-©·7¡ÓYÍwÆ™Þ÷B:Uç{ˆÃ3˜¨4aé‰4¡šs ÛJkaîž}cô”’ï%‘ª•쓈ŽäÞ bÖWÆ$EeT®Óé¹€Åá£!&Þï¾Ðz-& È(-ûȇ^Á‘Rt@{XÔ?ÄÏ6¯u^Á/VÂ^¾ù”‹×q#{$Ò5_v·ÙÕ-ä—ùâæÎ¢ÍR@ÎqNï‚TÑIÁ¼0¾91ÌFU 0623QH»ÈòÀ‘40o»qrzì›ü Ç}>{{d”¤ ë y ÆbÒ£¥ë4ÒªÁ¦ƒ66¹Pa ¶¦’ AK4MÜĉ&8Ñœ‡Cúfôò¤´Ÿ'…O[|¥{íTy3Ó2þ »éè,0ÚkÎwbxž)‚eD•Š@;v‚?w€nè˜3 Œ<°±ÍA2Q¾NŸØ_йý,­Ó+ô’®¾”Öùò&F€Â Ó°<Çܘp)Ì(M7?"P¬'O¤$¹„K«Ôù+Ž4& Y4nùœíf‹¡@_àê?e¿ìïÂ.\ [9éœ Þkœ¨qìjNäð_£>q³¿ß ï²)ûÔÞI6àâPfØ,‘?ßWäJh0ƒüªÏí¦|ª¤OZ‰aþ©¸< üÐŒû‡MO'qÍ«™Cdhî§ÜÉTo´ö»ÿËcârWÕ…'e–dA3–'b’ûx—]ç0–‚Œ´!î"ÓQõJ«.Ã7Ìim'AÉoMã÷æ_ sFró§ÎqcÀB‚ýþ!®Ãrg=ÝøÈÕäÔ§i!®•éÐ?æ&2–9çZÃXò§H¥/™‹?÷]Q!5 Â?En©"t`*2©‡I¢á{ý2ÑXqŽ$ ççpëÂ&íwÔg ÛýDa¥’M°ž`­éèÑãk~=áÐöȹî²¹úyHDÈâDÿ:_6¯óã}Èüþñr\Ù6ߟvÅýrEþ6Þ¶kV—[ðd²ïÆíÍÕëïCÊI ¹áîU'BÂÒü6QÓÒÑ&âM6„85Èñ ÙêXÜÎ>®…‚|ŒK•‘Û0Î?@µâ‚€–۸ؤ- G_‡ËS!аÏ÷¹cf_òÔ¾Rý0–RñçT?2/M™í6 "[Hˆ ILHˆM(°¬ÖEá"Œ×£w”U '´˜¥#;÷%zM+€}XÁNï± i÷ª‡ûl¥Bû6b›|Ó¼¹ú3;Öü™ê1H~B𤳠ø(qÀI rT)KjÅÑ–ýº6ŽiŽZÚÑ4‚4ʺþ–pç¢H¢á KLFc‰•Î$R?¬\Á”T^YX©°´ŠnO…’SÀÔpa±äžRp§«L{V̵ƴŠó©¡ÿ „ëÉJQãjæ~:Û8•«>­$~çx`P×µÜð}nb†¹K"¶Ëâ¡]4åܸì•8ª¤<¼ÏEQÝåR“"7,DXNÙ°d@g—1ŒÎÓR8ŸC¢§ v…9pw,þdlÙž9*ð)îmË€¦ÂÌÙDŸsvòÎ?¾#ÙpRAøÅ ]爢cXñj®$+E”ïФÊÕäÁ¬>Z ê$‰ð 0¼BÝa‹ÿe‰Kq –žxj1J_…Oi¸ µrÝ¡Œ-Ózn›ß½þí'Íðe“…V·ñ˜*FÁ‰GLmé¡F¸zŠ·yyñ6š€þ°"$ÓÁ÷CB´²ÌO!ú•$?r›˜]`[m€°LÌá¾lmÖ|qµQ”@Sç>gÈ÷á =ؼ2P†ö#éûõ åÒv!/í=Ù.‰•|c¸û8Щ#Ð)‹‹ }’¾'çVºZÓ)ž”¬;«Â°¬ìwÖEÑ^èPÊ€ªP5ôäÖšc~„qÀ«¦…Àá i”€¤)Æ´vL£úE @ )p5’‚®aq'nR¥%ŒbPa–~-pÔs >IG¸û’èr[“é].Æ÷©–©æýlA~Ë*.²_K·J™gåç %®‰i‡†Ì8dDèäû/SRµV Š™{(À± öpN5 £Xîúùý18zpE_ÛÄ)˜ƒðY2öˆSBzYºÂ¤bQFwÁIq¿:µ€Å€ЮGRÖËtT>ì7X/âeyS ·¥e‰×8 êªWa’y':‹'Ä KØ)Êaør7~Eás°]/…ÿi9p`1h²ÒO>˜FÅ ÷QØIGðÂχ°2z0ÆTý¬£ð%q±\<äÅÿ¬b¹†*qoÜa jÊ ãx•-£þ æï´ìC˜©¡\†B¬¾È-(x6IŠš†bÌÅmxÌ6Áÿfå×€±mÄÙ•%,Qf:ºÚyùT*…wbV¡Èµj!ž1H9èVÛ ž¯¯ábyÆO"Zkéêcn4µä,k— ©W;å *cåŸsè!Ò a£›CÜã+á !˜³Â€¢Síü¹ÎÂÂ#sà ÄA‹Cã–Àû¯@×IVæùÂ.zÚêBLehk´7À…»ÂYZ¾ÌБþâ‰DÄ}ì`MGf<ìa³°&ZÀg9õ¡RqŽ×”Ò<í§– ~«Ÿs±Ù•ý²óvj½ÎDõÇJèPW P™þp{@é-@%Ò Iâì+«cð4ÃH:#ù!ő֓Ï.e’Æ;ý˜L-ýóe2‘ÀžïjIèT_¦)Eaz¡VaÝ%´›Ž0ðŽŸ‡º&tb?s¡hÊ)\.ˆì±!àŒcÆN4¯Ä£›XÛÖR”'wžsiJèHî^aT­‰­ÿ"Qãr°‚ K.ìÜ“!”‹ šePˆ÷×âÛ°G¡¥èB‚ÁðìfCóI‰I¯)È|G¦2Ì.zE6²UÂŒz7î‡E‰%¬âÖ¦„œe,!bœ€‘V¥ލ·¼508ÇAS_îd‡b}VòŽpO`Ÿb}öCµÿÇ´Ú˜.5f<¤1.«ášVé¾3¼†û±Ví5‹cµ7,öXë–[Îë¼ßm °$°ßâGCÅÑo‹§áãw󀇩Ù'§WúÖx,„¶ÎYo'×õ‰?ßÔÎŽ9ŠIåqÏØÚU°PQ“U¾ß}½uéJ ËÔ…ªa=µä­|È#‰¥ßE`4©Iµ5ɹÖ›zþ t‡@CÛüvSm²ÕNÆnªFÖ{<´3Êÿ²hÓ½ˆ*Ó{¸;í ÿ°&i )²8èÌ£çROk~éWLv—&›@YAé)òZ„·0jÝ·ËàáeÕç¯<Ìh«Å®Š-§ŸŒ|Ü–c‚qˆMî–M¢Èƒè)쾤µ(c)t|e:hP%˜Çç-^9†ý”Î0‹tO»¼oœm¡]×ù¬@½lä*ůs\¹y"l?ÀÛ£8æíßyïòÔ1Ô…b{œ"D¤¡'áz<íÁ ]¥X‹„ÚÅ›pKñàú‰ìP…ê-Žž2Qì$N±zÆð3°½&þÆ´½Ñ*<¾TÿEYʶbRžF€sûU’&Çñƒà™Ž’®!¿ãÛð¼–nãpN^ÿñÛ7 ¿‡ürå*­Y¹Kí©À´^h=ïôþ ™²%%ϸ½È»¬`>ü°‹ÎíîÇøö û5¶Kû®%‰Ó½€‹È«³µ¸Kwž(’„¬éÒkwšå׋¶áèÕQ^w™GÂX\¼Lóÿoû Ž«ßøÒ¶½^„5”ܲj0C÷acTŸgBÁÔ /«KG(,6›·ç.m]â•©ô½%^UTðâ/~‡¯9îh\Ãù© ¥R ÌÇpùMóúDU1/¸¿Oµ˜ß U/X¥3ùŽŠ´gÖªrJ¿@6Q@|‡yÎKšFæY7ŒæOñc2 16ÂtfñÚ4ž>Ú4\­æv»,žÌù~N\ ´ÍE%Asù ¤t^ý^H’Q¼›w^q| ýÛÅ× ã1*@ Ox†3SÃa..c×Âf€ch䃼[4ã¶r‰ÔîTvו•ع4w3…«ª8Ñi s|¬ËÆ—« ?ó»<¡¸ó ’fq•©‹+ë‚Z–vËó5—†É›òU—Öa›‰„dlê[\Rbþú;áç¼;€ý’¸¦H?4ÎèVi+Ã+GøŠ¡½ÜR7¢‹¯±¨Ô>#š«¿Ò:N÷(|ÿ’‰U~7W›ðÇK\(Vb› ÚšÛ»}l£QFØê{uÓ¶Rµ_ä¡m¯°ÆþükË=”½S—‰I (AHÏ#¯ÚQÚÝÖ.2KZ wseÆIJ·}À[ÖÚ‹´ÞS?Zèrò¡{÷ñœ:n|5µAðP›[¸ûŸwìÐîZà±x‹•Çb~%õ§¿]"›7ÇiƒÀ|YRi#•B„ ÈÜ@R@ ÷¹t%crÂòúÍõœ! Þû⥒U·‹ +‹+‹ U«–ÏË>h V}z£³Ö±f°@=ð¹ñE¸41aö¢GŠ}ùVE­YI”‹?t²Mݱë@âùœhÙ×þTÆòEù4ƒ›¯Ü@Uz±²¿I‘‹Ù4lŠpÿrñÿÙñ…Oendstream endobj 349 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 8434 >> stream xœµzw\T×Ööæ»2Ž jÎ`ïÆÞ¢W±wÄ.R¤(½‹ e˜aÖÌÐ{Š"Økì-cKÄhPS,‰‰Ù7ï}ß}4&ñ&÷½ßûýàÇgΜ³×ÚÏzžg­€2ë@ ‹eËíü|œ}ÇmçæâíÈ_Âõpý:p]±îµ¢%Öº¡‹Ù©~£Zpkz¢ÝÝѪ”P Xë0×Ï?§zSa”%%¥¬¨[Tª/ÕQ4ÅP¨mTGÊšêDùP])_ªåGu§®Q=©µd)3*œz%ˆ<êàÕá¡°Él¹ÙyóÉæ"7ÑkÚ–~ƬdJ;öî¨é4£ÓµÎ»ºÌír¥ëè®§» êv¬»e÷²ƒz$õø¦çòžç,ˆ'‹íe/é& •<ëlÙÉråA©‡Õp«@«§}&÷YÚçtß‘}Oõsè·çƒþ´lUlH:ÈiÕØ ¸7¹·€¾iò¼äŽ¿—âµoR!ªÕÝ ¯¯‡P«bŒÁô?Tõ2ä@#ôIÝ…sIU`E²~c*blmœ~Éùï¶ŒF‘’ H’ƒ"FÍÆáÖÓ°˜xÝt@k÷\@‚æ±xH‘L ª`ò@W,C1t>¤Ê•JˆŽc×۸׮;6¬°-ž„'`'ìŠ&ãqhšò É#kC^cD£ôœ•1Ò`ñê:Êk²÷à ·‰G<Çf3·„„±WiñS<ˆÇÖTÅY«ç[Ô˜éùoØšA§ÓÉ_ƒ:ÿü IdS{ã®ó§Ø¸º5ø³‘Å "Ñâ^Ç+Vï{óÂlίCµoûàè õBز5Kî‹ÆáYïÇYšX4 ­” í X¸ÃÖcC˜¿Ýª!À i÷—6Aî…è"\9>oÿÊôI0“!/ì~þMè# ‚=M(ïº+Åk$í¡pgBh·±Q.—>"dŒÀ½qϧCóiÃþJ½ /|'æÞ讼ÂýB‚¢"½ü73gE3¢QçO›>¿Û8q _%_Óè¼ñM!AökF h¤Ag¶X£LI ( žƒg6°j/…¢GôÓL¯É2*M’{`Ç¡øö ’ˆÐh(_‡eŒAä‡BÌ[Ï¿ÍOñÛü|.êöߥñ-›¼ºúê¥Øš£¸ž’*ïJ§9‹œ"YÔgÙû˜C¤Û‡aŸÊÊJÛ$fÖÊÐZ<ìycó—©ZPkY•:,¿â]úCVKÕœç=lD‡ôkvm¾Óh@ –â:îWn£Ä´ØC^¬H¬ÑÕ܆ú¾ ðØT+Ûi±Ý(È®‘!õú‡Œ7‰˜³}$; ¥ˆÄÃÛRÑE„F€Á>³d[êJ®×rý -ª —#ÁG,ÅSQ+ê%“>»£³¶WmMsN³Ï\” g™š²†¨Cò8o-«‰Í&’ e?šª(!¢£X•2&V©r«p]dkº¹-²÷Ìó+‘UT(>‹dÄ7õÊ{sû¬ót9ÅãÌ×;XU¶:)˜(ˆ—a)½â3“5šÂBV«mAáçƒêbRô¡O>#¸ö8L*E¹Á¦âŸÜ,l B …¸›Ê&ƒ‡ÎÊ»Š~¤“»Ë–ÒØ ‹cý–à 5&X_}þ×ÇãðÐ"ö/HÀ«ÊáÔ0áx2±3šˆ' »›wËÎí‘e‡ìsÍ&Òreh]Ÿ”|…ì¡ þJn@…¡8#Zfñóu²ƒ}ȾF_£’3«i7÷Eð8ŸEF4Јòª21P¤žæc=\É„Ðbã׸R„»]{£âÆe¶MJ/Ñž6ðŸ q:õ6ÑÿàQŕҵÚÌ[l©Q䩲7ò3Z+oú$)²½Ñ Œ¤)‘Ú„l w¦'e Kt[š^’¿ÿª†¥à*DÈÌ™…è¹P£E2[ŠÌ~$IJžs3I(–¨œa¸èÖÒÏ ÊSÖº‰þp½ýV<@ Áã,#t"kwY­_‘ŸLàÛhä%fD%+2}RwL ÒQ¨(!‹¼1éͽ$+c«ÓÇ K•å…,ð¯ÒäH]ŸmJrΨVú®K’󗬲AÇg©#úJZp̘]C²DŒ‹~§å=:¢·¨º…\¯ß$ÅÅ…5KNÚÓ«òíʃÉ,ÚDC#Ô{Ôm­_Ÿ·˜i 6-ö)ÚYVQPT–•P½Y#+¯9šYÌÑ3[ÇËÜéuªùªe>³·®…-ŒX>ýi५‡ö.dÅs\“Ê÷-¾ó• t\"ÖŒš»ÍqƒKͱ›Ð´’BaÇóïÀ¯%˜ÀÚB“|àô­-B6ii'¥±2Ò*/ð'fʶÒûÔ)Á 11qñø*>!E7þKq.Ô-ÆÅ)±éQm峈ÞÔ´ùá6`ƒÑ$˜`´Èâ4–b—M–,^éáÚàJÜÙHwïÝ{´´ÌXÛ{XÃe'ú¨ÀÖh£4dÙbëâÄ´¨ˆ>dáQ Šå3¥3ŒåÝ[Š俦iq6ÙiJ ›4þ<,>ú°ís,‘n²…ÄMvW­L ò¤2­QSß«¤äý³CtñÙ¹}@_Y}á³/‡JÓäÙu±±»üÕŒXE4“Ò•C½zw¢ÕV¹ÐˆÎé‘{#‘ßÅMHK Öávƒ5ü9ÍtÚÁ¢Oé6e¢–Öo tíÅÜàÆõŸƒÕ#¥‘€{¢­¯c¨·Ûv`ÄîŽàQTG<• äBC!h2óêOœV ¡6 Ô­ÌEg.Äæš“T†…qãÛ%Õ<^á‡|p$±ÒóüDâ±m¦i7½r¾g«"ñ0•ÇXpé»f¶ï°øæ]X¨ÝN9ë Ûb´(¼Oz†?ó‘Q·“´01êÄ„è!X-ÅB¤Wf$&‘Ê+«ƒÂ?ðÑPŽ'Ñ*® Yí·`)X‰o%¡´cRšò˜²yÁa!Q>ŽÇ.5œ9SÊ¢yܬ²KÇ5y¼¯ à…¬úIÉâ­}"ä& Î’ŒPNT$²¾c–D»³fÂ~4 M¹×x2ý’Ú£Fæ¢Rø@PVQV”¿ûÚ¬úàžb ÷À½ž#ØíS…ºdðE•n|STÖ(·Å|%|-|®á™¢u|8ök=/Å>Ü™X½ŠDb…g¾µB*Ú6 âmWGêÈ›êAp¸\ÆÕjh¤ø¯ÉQÉ¿V™ KKÊDê–®RMë Ñ›B«Ó]3õŸ@¯ |ä`àz‘æ‰ë(ä#[²Ó±¸SèÌÓI„Œ\—ôLéeÇA8‹oŠäx’¹A”òSaê@ŠH„«Z»ÆÊ!¬"SÉö &ÑoJyÚˆ>&¨æÅ’¤¶ðª¥Ø+}£•ÜÏ´8ï@coÚ·Ö©Ìõ€áX‚Åφ!Ñåƒ{ª ²Å¤¨Ä Â!Q¾“×oôq qêKÀ]´7 Au N{¾ò`±qoõ)¨Æ]FÇÌdUáïôoý*¸iÄR´éOGîÈó÷KØä/kEãW\^l–Z³ƒäûÞ•¢\(RçªKT5ì`Z ÿEÞóéä:IÁj}*MÖ¨SÉÔBw”.‡Ã/Ù·ó)FsLmjìÕv„Åibq‰¬ç qëÁuwpϸ µ ªéuPüÜá)©;æÃAäoŸ_F~¡ÛA$,”-FV#JAÃïöó2÷Ý*kãï²–©Þ&ny)lqCO$È $MݼƱ qwcðX´-A#ÑäÈÊ‘àüÁ”Gâ>ß ”…†\kzˆÎÃéüòǵOÐÄûiz2»ú䡵´t—Ô¹W¯™?yMÓ3¼ïà[Ñ|DFÜ1ô7GŽÐåB|.­ ‚@&8/¬¼2·¨”}Ûÿì-C—Œ5/áî%²i)þµÅŒSHž ûÀâWnñþ…€Ä¢™4ꂨ¯^ÈõÞ¤2'°âí˜E%mýQ³M€þøv)Avg–ä• (‰.±@¾ ûÂRL˹pn˜$?ºjkØ…‹3+Æòjgg]Hß1 üc‹ÞÍ*‹Ú¾<Á;ÛÍf»ÓË \™¹ÏW£n¨ÓÏÇo×D_WÁ®©X+©9A¼Æ+)ÔUÄ_•äÕû5¨ ùöúõ[uu%²Ú=µÉ„·-å© SË• rˆfvfGçe¦–°x¦$à¯öÚìç ÌDxQR®©Ê5ÈÄ—åYi%ûú>„¡Á2Ô¡µ³ÄaÞúáC?yhoÝ{Nô™Á$ÙÛé‘U{m¾1²7ÿb€4¶ØFx0jô£èM›ß&Cùÿr„ô¯H_÷ãý¡Ô£m[ dŸ [ü¿‘¤@f”bãÕ¬bžgx l€¸ã‘ßFÝ•†_ÞP½ºðiéí p›ù‹îã¡,þä4ˆ}ß?Oyƒ‡DÓyÄKÕlÛëÞ°–€kƒ#>îÑð±bwßÛù÷*d8„› Y4c‰îd‰ ²ôº m%ó™)ñ`’>ä|[Ix7"Á+‹¯N˜Í€€$o±2¹^Æ iñlÍçèJº‚0›þmüt5P“¸Gm%–®æ~””—ùl ó ô/ ؽ§¨ÂÀ¶í éöæÑ,ã#š^!8}ù“ËhÙe!§D+$ðbñ½)E¸—Qºº†]uJuY]«ÎˆÕ›>u“©OMÕh3SY]RõIã¶ËÍD:Þ}†:ÈÚ¨‚|Þ7\)1®OŽÿº˜¤æ{ä¾^ÂÉi<ŠOÉ­t6Ö´c줥 ÁÀæ&å@9S^äç°ö´ß™/.^~ÌŠ{µ 0{_ŠˆÓ'ù_ŠÖµ”ÖŒÒ Ând?9@¢8„ÈTš”[FB”AFb±ä̤lƒÈ¬«·¨A÷"?/¡àÄ•9{x=&›S”©-'æ‚|=w{žôÖpòLÀWÍ««_\­'Ñ5qidãÝ!ÜNb1Z(UkRˆßJºq>#íÒñFm1o<}UQàGüòrm$o< Õrˆ‚°Èhâ—ûãnR.˜×¼¿®:Ôám]½¤/|«£=„[íØ©ÜÁšÌd%Tª+‰™l‰ 1r‡ÛúçiÍ/o宓 w~+wøŒøhÔ&ÜÉ€ìX®çßxõ®4îLžŠèGß"Q>«+‚tØÇ ›t54©œ¤½uàùDHÒR‡ø´´ÍÉþÉÞ›ðAK¬:´e2éO[6 [|ßX‹xCxB]@\PœJ‘¨ÀK[C¥ØžÓ*rLjå0˜f¬ Èš±˜¿ý ïÝЮÖ×ÒÔɪlÈmrr â¾–æÔ•W^ã;T¡ÃÆç&<Úrê/ p§[äC›LÇw¦é_ý›Óô£ï'Ó0Í„ç)u(÷Ìh¬5Z¼²¯{­0eÊ'Ñ[ídoI蟨‹¶“V^¡Š#™Ñê"Ź(uRb épwŸ…2S¦ò™êÆß.žzŠ!ËC?_Še­6x'WëˆÑ½÷æ“_@RB–;ÚÔÚ"ÕÅêb²!´©IÙh—,EÓ[SÚ.Zµ]eÄS‘=w¿àÌþŒz“Ë&ðàØWÈ!Lð™^ˆVsö’²åa‘1 «NLT0˜“T•úÙ¥‹²,¾ÓÖ1© I ò©³p'ÛRÇý5åe,Ñê}‹T/¨oBiMBÎ Ý”ÀÝ„»žŸoþaZž3ØÂœ—1^ fÂG0;i\ìýÓo†…Sðeñï«®'߆;ÌÔ, Á÷BùÒ]£ì|a+¬4„ÿ ?Oà\†«i§ Qç/Ò P Wà ‡eØÃ,XÆ÷ë0uÆËaäÿ¹›ÒÛèESeÖɸ³t)hƒÙåK'€ß ‘çœ(cZsI£Ç±^¼AyûÈÏCÓHæõ.DŒ¼Ž¾Ž@iý?#Bhþñ·žÞB ~ùÑhqì—g¿ÀcKñkÎÍ!ž<Á'~KÌ®ø¸Å„c¼Hôê%‹Øó§à…»‹»as›ÇÛ]~MVCYx™g\"¨UlÉ•Sugy¼ú4™ø |´~Æêåx5Þ.•ó†=ˆáhÑïĬ9­ k«ÞÿµšåµŒ„„ï’ËJwÁ£óÃÑ(Ô-=5-²¬x“Ê¥†ri$ÞŸ¹YH' Ähœt“‹áƒ6Ðpö¬­sª\šNò˜ÈPËî$Ô9¦{èøE{2xMú÷uÚ˜ìÅ—êå¸Ó6˜~$-4Ì… Àððà ùʵR Fž/š¦eÑR¾ƒ¼²ŠœœòòKILjª&ƒ·~!7H¯6¨uúñ¢A€Æ™!様ÄâÍ}ýJ€–éÑlÂ`.ÜÉù ƒNžáþùþµE9)Ä€iµžÉ´‘J¯ø¹Ë–Ë¢£ hUL|²29³éKÔ‰Ekzÿ'_3e’PÒôxŸÁ¢ñ²yä÷ÈRÜJpê$Aâñ?`áB÷uÃØRšøÜµÇKÛôì.Wpô.Ý¥ÏÛ^~Ë‘©¸îKë\„˜yßÊÄ#á÷ß!©lœFaýNÛÍÄ’-ŸóŠÜðÃp ˜ÛU/å‚åF¶ÀÖÁž ‰=¸UEð9”Jx‹T‚•"Ü ³•…L h;v!æK——~);+5å6d‹è*Âfxü8Üw;–Á`«p¹_rÉOÐ8Ã^ ÅP‚„'IzîIÓׯ¯Þîèĺ8;Ãl÷üf82»ÐÐx¶„õ¢qÔÀÍ«v:h÷ø³Ò*óA¹!>±[rùIHÏÚyÉ>|"™ºmi–-å°¼/…×Á1àEôpq]ù±½Ù5Ä3WoËwdÄ •å•´lsŸvry…ü^ ¼âõ½›¤2¨Ô×7(È×·4¨²²´´’Å·Íþt­í‹xv(røë/ëH;[™ø¹Ã›ÂIÖi€MÕdjÓR‰¯ž¹Ô&>î·’ùì.bØ6—g¯GXz¤Ó?Ô[œ¾xÁð³M¿ˆì –â'[·øJ2cÓå1 ññ výÂEþs`>,7¬>±#?4—ˆÐ¬ySpGÜõ³é_Ü<ÓŒÌràpè Ù÷c µð%7¦$iÔÒV‡ÿKz$âGå±Ú¤Ì4–Óµ(RÒµ:H±jj%C¯|8ïc[=¹aL­»6lq6šŽ>º’]|ò¸Í–³múÎ,!¨ß>lóQ£ñ†1DòôM{/%~1 K$¬ç'[¼LCÕ*9Li[2ì$1ªU²Õ#:­‰Ê%fþƒ-ˆ>uþÚ—¤†E4Í[n³º¿sÊÜšñìØŽ›üÁÁjâ½eÏ¿>·¿ºŽ½n{.â ùÒíRâªK·;‚¬=îµ÷ÐÉ}×Ý©¿Z\Ó¬aZ; •ñ}9,ž>1(Àÿ®ê,™ûûªjÎC`K[³$Kí–Çû35øé©¯öÔž:!ÓoÊõÙ¿¼Ð¯vm;Óî'òxà¹ö?'ÚwXÚj÷í)Küÿú˜…zçœe¶ü-qÔNZÄ]¨³Üô¿®uï·vÙ¤7ÿ©c&zómÓ/±l¯w… žŸ¢û8^BšZíÎ|ÜYx¡U€Fš³ÍAÝQÏìöç(uŠ˜q ±x%»wˆÆBÒ5ÍÌÄ]?Ææçq÷K2€IIÒ¥ò£‚[¤…õS(:üxããk¤2Ÿ¿ÚðÊî‰'!×Ñ7ûDrÖõŽ©å —°â~³·•D–ð›qÐ"Yãɳ¹DU°p«Lì$‡•ááá‹ç†|+G"ÿЬlH/`kh@OoÔ”1âØÙåEŵ}°ukΟÙåwá¢o#oOoËQ×ÕÈÜuŸt%…R©PAba¸ìÙ°3x`ÒÍrÇóplÕ†?E’2%ãÑ5$>ÇG²î3ïü·7×òÿb_â²?6MÃÛzd!-b#ÔüY%ª!’Ÿƒ#s‘¸õùƒ}wׯ±.‰ ?ˆ`¶—E–”äí>·ºÑf î¼ X,úýV¨CÂßZË[h<}(5ÊE†K~×y¿C¾´˜ l‹Fr9¨ÁõM­åæ¤dd²Ú~+¶lR(ÔjPš ,ëÎD³ï†¥$DćåA‚zúNPÿ¤=JPk”Z+¯3 ìÊþn¹¿ûÌ|;7[‚·óÉèñí—‡>gÅ[Ó¡*bc•1ái»òþÿ3¾KUܘ¿ï¡x„žæ,…œm»vE*cbT,þîŸs¢ ¡ôØÌœ¤Œ ‹¾{='9’Áôd Ûg(Y/@:@¸*laZºKj=÷دÜâ¦L`}ëì“ý…™µ˜úÛ]rÿ›™?m$fèæ#‡¾æ£+%ª0~2¦Ð–Ǫ¢ ¢LãmÈœ~z¯ñÊÑŠÀ¾3¯Aâ'Õz iäWØrðúÙª×V9V¦Êù˺™`çº+œ ®q.ZÌhÜ‘Ñÿ•ÿFáƒ-}%[cƒ B²" Ü›Ç1ýâaý¹Òš¿r¶[ˆž››|3Sô"c§ëÙNf«WoéÒºtÖk´¼Ì¥jµº ]ºt)ÚN—œ’Ô¥+EýeÈ¢vendstream endobj 350 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 7522 >> stream xœ­zxTÕÖö† Ap$ƒxNDA¥ˆ("M=4Cz!mÒ'e23™>kzK/“ž i$¤A(b(¢Ø‚׫ÿÕ«ûŒ'ßõÛ“õz½ßÏýþÿá Or2眽×z×û¾kípˆÑ£‡3é•MÛ’BŸY0o}Zh|l¸÷â,æa3}ó7†5þÌxò}`<Æ~mú¬ù“î$¤Ÿˆ ¸NP´Ð?I˜•“æ·pÁ‚Eóæáÿ—ù…eù­žï·!4<.)35.Ö/41ÂoÃüMóý6'e⋱~s’ýÂ"cBã£ü’¢üvDîöÛ¹=`Ûv¿µÛwnÙþÄü?.ëן ‚X¾*qu’¿p”—SÓÒ×glÈ Ý( {%+|SÄæÈÀ¨è­1Ûb·Ü¿+!hÁ3 -^²tÙ󳞚7ÿé‡ bH@dê£ÞnFO˜)¼ðŸ{{É…»9žiŸs=ËP>Y×ëoGêÞùÞ#¯m¹Àr(ö/GŠÃaƒrwdk@«ÓìÃòX¥|-޼÷—v(¡Ñ'<4Ac~ ãÑgüÅ/>Îò(Ö—wí­V8tï†cm<½•}‡ÿäòÇØQû/ŒV‹Ñ¡·ÐWQ³ZÉ»my.™žð ç—é#¡E£®¢‚«\Æ‚Üü¯·\y–}€å³3ØY3ߨô zññ[gS‡ù’>3.0qÇîà½@FD¹¯õ×"nÓ)ºíÜÑÆÃ@«?°?wÔ#‘îöÌssГŸq=Ç=+øz“Þ&t(@šO½²*äÊÖ’= `ÃØ86•°Kš‰(„8èàQü)•Rr ½›]ÁNÚ¸È%‹N£çkÐ3¢Qô対ê}È[ç>›¬Š íM¨Ó¨/5QÃÛbw¹ÑSè |“ËXÑaþOÇ÷†DÇ&R’à‚”{„rI4Ë –ÇË5:A>­Â¬²Öa¸×ÓMЬÁŸ´`¸È@¼,‹µÖTB·¢YÖìš°84ÍAOÐìDv_’°È­²›6£Þh°Ñ}h«­ùü¯ Ç±-.TÈŽKÜè_$.Éfæn·žÑ\æoÈÆÇ/PâzK•SQì˜Øùìj ÙÙ<¦€ùÙY¨·Y`WB658•—:R´“]ï«Ój¥xgR\dzS‘™j¸Õq-ùñ‹ÿ¡RâKùV(¡˜™<èÁRv%ûê ¦"|ãÜÆîºO¸žf=öüßX>Å^¼W¸žå¡­h;ŠEéôÒv>lIdÏÞ»mMèr gþ–ˆbúí-V=ð7¼>f§z£P7DLgÝŸºÑ7ç¼ç%®Çß) EZ>µ•¥b6°‹p(&üîaWP½Ïg¼ËÖ¡B—+O¤A+×H`q e&ú5$Ä¡w $ ÎM†’­¥?äó¼sHg;4´ 4‰‡ˆš„5Bmž&•Ö©tù !%N0éMeª k•‚Áh^¶ ²ò¬P1ò|¹:—f§ úc0˜T^èõÖâ/P±Ñá ñ/œZ7áÈ@_~PŒ‘úÄ .#f&òOlp|9zOz%=Q~ðI]Ö›ºˆŠõÆÄÿf¨‡ qŒT*2Ï.ª-sö•~Àè>7êõ†y×Ä$ó·Å}÷°E';Ž¡ƒî…>Mö\ùCËx«ÁVhƒ©˜~™}>á]´ç ³Õ9xÁZ…N=W\¼åfÊ›“ÞŒñìÍržN“¥¢²Öä¤àú_Á»€ îfg,ï'WüË©‘&Öåøü–ÑÂÏ1©½þ!š¹ÛüÓhòë\æô >[QfO²DÛ ‹¬®¼üa]›,ÂIÅ©åB‘RKvuimAWoØáõìØ°¹A/^×+©nY v’qÂå3Ó·šçPÇÍÐÓPžW˜‘–”¼y ú-ôd;šÐë}÷+CZ%p£ L\ùg¹ˆ+ è²äTÖ¾ð•Û€BM ®:k=ýê÷¹É+uŽÄj9¯ ÍÁÅgÀ¨Ì6ËRdXâ"ØE>‹yÙwô!ÏŸíЩÕ9 ì9~âÿÑ_÷]6ú\ñi.3p÷Ý:]º’o½ d2Ô ¿ûê4Xp¬§¼83A‘(×R«ÙUhóó¢¤Ä„@çw:wè˚˴­Ò\ådÿAwÈüvŽr‡†"es×}=¤Ó©IN• Åtr@ò*"ŸHdI­Z«“t'ti:€´b¦¦'0!#QZp®è4û—‘1ãù-wbZJZZžUaQPÕÉÆ8,çIûY;f§´ÿÐ@Ógu×(s‰¥øc¿83ö“ .$žA/ô¢ÿª¥&0®t×Å¿2b7çÎ÷\Ï(Ï"¾]i”ŒÖ@, û8Ü^Jr¯œP¦uil:ȵ—cYr0ÊW´r[Àƒ˜y§Ag+í®æšÚÌ Íxqí©ð w¾º~ËE£±L¼µÞjëA±Ó»ï_F&ßÈ3ç Oß<‡ü¿àz6ý›ÍCxÜŒì°ô ¸°WÜ“ÓyÄ®¯vvÓGÐ —£LC¨IT1j¶±"E’.ãQ¿tW¼Sxªö÷Ç”µÜ|¯áÚÃN‡dB¬-¶6µ)©GS MÐRØx¼8ªCxÎÂ‘ÎÆ·/Fóá$ù{çò×Ë\Æõ?ý2ªù]MÅ)bU:JÔW‡…ƒËÞÇ>ÅÎZÜp½µ»òXíÜÛ–ÑòÏPe׊øò¸MÏïr¿üLc¿-;Bלëíïİ&(t ­˜K|:‚èóžv®gFt>äSÑz‰Uâf§¢ï|Ñ+h^Uùå×/Y6½C¥¤UmVd¤h ŠH#^·2DR»ÎLé T †¬ìÜœ<öïl¯ïèš­Éf{׃Êœ ƒ"2Lt8„#†oüŸ5st{§ÓЩÁvÍŠív¡2·‚Ín±5™Ëè4ÑØ^²M–\Ðb!‰‹Ôåy Í[K­‡ÐF5b“߀ù݆ù'â²—mKÞG—]œ“7P?V°Üß²‘£¢„Ïí‰2SXp¢ÚÙek¦›Ñ#E'ê^ëj D.ƒHä»zt|kA⎃{E2*ýì^[ ³w²SÓ†ú3]÷é‰Ã¸”ÊÓÂS%{ó£è´92–‚@©]Ôh³×@1Y›aÉKIÉ8q$­÷è¡öÆRªbÛ1i-ßGÓ*~%u„ƒÆ¼Ž„Ø#ñˆ‘±q±ÔUthÄ'x‹T#Óé4Ô¹D'ÕH u@ÑQƒB¦Ñˆš3Hë4²0ˆ,9²öm4EOb·ü{Ç—r°òz·½¥ñhk5Š+:Ú›Ûüg×—ºä.z˜3\Ïh¤ãc‹¢‘è4"·0„%Ù^“ò;Û4é´·XÀ1,"RPï×À«˜#Cá÷‚‰!k²Òëþô–ÒwQœ/Îé—F‚ u¡•ÞMþö4`5™:åf‘†ÿW†Ò[ ÔŒ>Ebxàü¿ŠE¯»º°Šd—­:°È}âv3è V=í6ÔA Y™ãÌ¢¦˜%ÊíA±¯ýpëûok†3‰Ú?ènbBor™˜øf¹S®ÖédrjÕ¼…êl w¦ô œ¬û{íQºõbKæÖmqØ%„`ìå8sªêª]íÇBÚ6Þí|æœYÿ9zð“²r¥î‘Ì\úù®çCÜT €mX©ÚW™¸yΦÙ±×€4‹½=xÞw;3 ui1@¬àÏÌóƒƒâí‘’`­@.Õeå[4ÖÆf#¸q‹Ñ2ÔbàB-Pè%ô6ƒ¶ŠÁS 0õ¾zÞ¹Áï Ù (A!˜­'3ÖÃÁ—ÙGßøCí·áúm§Ûqí· ×þï[À¡J 0™6ý{2-¶TÓè1æò=ÆV6/×-¬ Å›¾'š]—Á—Ço^þg4kƒ*]Y’#Ç‘"ÐèÔZ%ywr1L·n®ç¶g!ߦ6çÇÔ@±Æx UÃlj§Ohm™XÔòµ ­œ5¸Ñ— dJ•%:=6¸ì¤_sb=rFF³¾ƒuâQy¯þG9á3¾(nðŽ9W¯-ö¥zs9òcÎù–\8mlГ¸+­Ð…–cÔò® UûŸ£vƒô­ÃíŽk}˜×o–ÐÁJöFÞ8ƒ}lðÌÑÅ_ã4·‰²Ëà •ÁëBK¿ÇÙQ%ŒõRË?ÜDz­gi Íù’‰¼Éõ40[øhéãß°“ÙP6œ•±¹ìÜ;~h:Ú¢P:RR‹ðaQ4+ص`Ë]¸e% ¶ŒÆõ!¿«h}òέ·?òýkì4/Œº½”Qþ!#r£yïWº8)7Ðy,»=Sø]aUaO¤°Å*¨ v”Ïzo°31RŠéó¼Õƒc%›daÊ <6/(d 2J‹ÍEÕPAº²9))™1Á]m?t¢¨V+¦eÖçc^©c¤ÊVðšÐb[›ód+lP©-ÌIP‰S!“L/“•ÔÔT´`×âå².f†—!0Àï{…8ÌS1¶¦!K I“'ª)qƒ°>Ãp4;†Ã>º¸oíõã]tÆÉeFR¬ |8öbyBg…­ Ëwa~yrV¼8"¼-ëÆßß½ôn9•ƒ‚°[ؼÃx§ö]çÑF4¶²®9Õ݃÷¬}‰œÖˆ}†SÂîºó~ šèb¼‰¹ù… Þàz6êøV•Y)×*Z*-`ORÄBj±öZv Ý­%ÿ,u--6k%%Ò&‚˜ÜQ5€ÈN´(;“ŽSy€Õ³Ï=®mR¾ãäŽÐmûrŒ ƒŠ^s(íc¸jÐÜëh̹Û7_{ȯÎ<ÏŽ£ÙœÁ½üTIœû$it´6õuŸ‚W“å ÑÉÿ…:nÍæšî}6·.&&!žzõÜ#›°s?ÓiuRüQ©=·¼²°Ôi¤ºÐª‘Áœ½Uo(kªhêzãêÿu0÷~2‘îÂïI:ážÌd!Nó—S§”z¢¿â[µ–¼Sbb嘃þÕ~y§ôáµ_Þþ¸ˆ¾ˆJ|Á;r"Ç_¬Éo§¥ûÕb‘S:W‹{ãFã6ê)&ŠJõü;÷¤­rßem´¬ÕÐå@^ø+Ì)Ê¡ýøãdôàé¾×¦NyÝøç % çâ ÉÅúºÚÂC…môô’£Á€7"x-²&j^,û„ê7í¶ª½=¤Ýª´Ti¢BD',š#ŽTãrDT‡¸s=i¢¦øÕ11üÊ\WB\\|\zIFCS“»‰žä¦»= ê9hßet—b ÊçCƒ¢3¹)Å-1>U/2ÇÛU†Øâì&è$?~ï‹›wê×®JÖ)Ôé”NìóÛŒï ( ¼E•N'ͧ‚·G¼¹FŸ‰Ù|"ËÁ_c‚\»„´EãR«‰}OäÚ4¥YN±3¢É+f/\¹²õ‡*sщ-3”­7$Al¤V¡Ó`ï«4«-V½Áa£œµMŽÂ¶rO‰&#.š†&ÿF{ÌwÞÇÜ@1g½!NÝÁ¿Íܧ7šJ HЗе}Î.v¶â®…µP5otœÇÏ©1¤g ÏmæòuÊxöþÕ PA6d9ZœúV¬ˆ^rÉLÌŽ >.nýäÊõëå8¶èaÏü? î#C†us¿gœw^—ÀoDkõ&0ã0y§ Ô ƒ—‰-‰2k »Ö £zd:h2ÑH z½£¶=Z_‰ø@6óª@­PhóqgÃNlÎÉ—-Â^bxQæã'pœ>§4LJ¦=NäÒ¹¼ˆ7h ɨ2AE k4iñõ"Ô¦·ê ÈªÄ›p§+ÛBk¥CËÈóÇ`,3RÃÒxù£ÂaZé»p;:Ñ?y’¼Ý¢µ¸Wj«*ª¬õz3m®=Â债WìUE••uîg÷öYn“ªPÅ€|¨ÁpÕ—µ×f5ï ¥¶­ÏɉÉþó”ï0‘ÿ!5Zëíê8:Цº[ µFŒu[2Ò¿/¥­Ûu¨ÜI¹Ú‹Îûâ#®GÿçÎY.:4²'•TJ‘’ MÁÿÊi±“±)ReB®`WkÔÑöŠºšBª´¥èÂÐ{½>I—áYŒ¡é™Íõ0žgù uJS…H¡à 1w 8RØtt‰®Zjž4S­` fúîf*}OêEÆ&*\‘T,Ä, ^K¿!?Å@Ÿ4ˆôª`„èeè/Ì[¾o¶8ÛŒ6½pK€Ýô3gGzwÏÐŽçtnl­Ø°çÀJ~%¨<aïÁØWÿ¿îCAeC×À {aw®P‘¨Í§7°…ÿÉMÿ^x¸·>¿›ˆˆŸçpJ8%iɸwÅŽ5Æ÷kÿo¥KuÍJl5$ª|¥”µ Fù¦3RÐM¥–ò‹zÚ†KÎ.7(£"4E{oÆjåU ¬3Ž#&e®Ö«¯`C«UÊXñàJü±®RWó¦^ðk.ÿånœË§êéúÁÿÒgÔ¸±ÀæÖl+Fõ øºUz(K½vËŽ2t '¸ù|uÉQ“Co²ƒcd8“îòøb¿5öºxžËÑR~çív´>";Tè!–ï¿|?Ë ¤öÎaýa!,¦]ºÓ‰¿HÍ]ÏOß¿gwq!ï|m¹F·]?åj²³5a㪶5ä©‚€‰™æ¹rïI%çåA¶>OŸnTê1ÿX v½É[HwKܦ4)•:]®„zqf€"Q£òVS6‰ ÍT|ÌÒÒG±÷ &Êãä² söð]=F袻¡WÓ}wŠ8¼Ë«V*p¡Ãx«ËÑxzì-.S€–ü:Á¶2,bÝ:H‚[Ž)Úa)îB´ºþOßît—¡ùŸÝ:ˆO¢1+N­`ç,cƒ7dZBz>8‚Æ·—å·ä)U˜ @­×R‰)$dbÔ£›%2sýÅëh šÐÙ'\Ê>þì’9[¨`ahjhPÀéš‘(0°E²Õ—µ6BÔgÖd×f¹Ó;°‚éE«w÷àB×Üç1Ûº&£}ÇΦ»®^saË–2ièÊÕo¿ÔòCËÀ-x—¼¹áØ vܬ‹cÊÛÚz[úË¥n‰w…½Ö¨¤À&ÎKJÌT–—}pëÎ7mG³üsdìvÆ›2@dã‚Ù ”®ç•hŸÏÊŸH'žš {`O£ö.ua“%Ò’´ÝÚÀ¬µGobµüw4~äX/½Ûø¯?ìÀ~yüEd¼ÆeL¨Šß/ü‘õÙ| SCÕ—Õ—zϸŸˆ“„8S§ÜD)oð]9•Iq&—e×6»5R¡÷ùU9IÔ”-±ñqñi%™õÍî&÷ÝÛ…TïË\?¹Î}t WÓäwÑÞÆ³n´¾iê”5RO ºÀ‡³éý1½¹âF£póX™íÈÉHÎNH]:*æÙ—ìKØ›°.£ò@½°¥¡¼ªº*Û–›‘-ÎRPSNœÔØÀb›mU_½~d§ã„’Ë·±"¶ø…Ç[Ðý§ë¿¨ê¤Œwb'º{zÛ…®è„IúAÚøý´ô64Bw–~5¹ ö‚× õÏ•‹:õ²ß£ì4v2–{mPáT‚Féë¿"ãiˆ"»5-Cîwø;Šýìk~´02*!³¢NF9 Ûûº€,)•§ÜuÂ+3ÐÎ98„\.Jëãׯ…§Ä$ÆUÉ*+Jœ&3•ûâžûÃ%˜1j5F…µ MÄ-í8ouGž%PàiΧÈÂÅMÊ>¾Ábq‰r®lv®ZPçi•*‘÷ðÒeÚÁÕÆòn½¯æpÿ4q#•ñsn&§‚¹Íe=‰|g¾]&Wjró©Áîøc¢²vù›ÅXl§˜îŸý1Û[ðÅáCùßF﹆Fso 'o Y¸k×yªBBC¢DTF˜":…}ˆdżCh½ÄVb-jjì‡bhДçJ5‰¹?4£¨«r5õUlg§Ç=±ZLeÎôYï=lžËzÇ#÷å­Ë R‹ÊTqP ódÈ+ûÃxd·¨åf/šŠžn¤Î¡k>HÌ[Ë~/^•½Qš*PÉ¢7ï†LÈ4åU™ËË¡fè>aZzÜîœö+g:]TÍÑⓎ“Wо Ï‘±”Ò/òJo¨¤e7ÝÎ㘭j¡$+M%AÊÐÊ«kË[© é.Æß‰R %.žë¾Óã¨ûFïØq`üX?ÎeÄ-þw¸Ôd?¾½o5éÍåLåøû ⿚Á?endstream endobj 351 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5403 >> stream xœXXT×¶>ã0sލh8%Ä3ƒboM,‰ÑX°ADQÑ`*M@°À€´™Å€t¥I)c…±å"jb5ÑÄr“¨1Å\RH¼‰1ëà&_Þ>€¹iï½ûßÌ)³÷^ëÿÿõ¯-c¬02™l¨»ÇêÈp߈iÏN^æ/]+:ÉÄgˆ#ä@rž±ŠéNQÀ`9 ¶:÷ŒãG¶øíS˜2݇1r™Ì; baäö„è àšg]]§OžLÿÏÖø%hLÑ,÷Ý¢ñð×,Ÿâ1EóJd½¢¡ñ ö ÔDj֬׬õZ¼ÚK³dõʵž^ã§üqV¿}cfÆË "7,ܾ8Ú-fÇÒØeºåq¾+âý¶zø¬ ô ^âµ&4,Ü{Ú³Óg<÷üœqã'Of$³’Åx2sf3†ñbÆ2k˜µÌxf3ñf&2ë™Ì$f³y•YÄLa3nŒ+³„™Æ,e–1Ó™åÌ æ9Æñ`^af1¶ Ï gTŒcÏ81kƆ*Ä7dä$?›Cwý`XÚ°ëO¥?uÎ6ÄöW~.ß2|óðVU”Ìn®]…ݯöÃíg: G³Í¯òU,èj7ÇɺÛí,“XRØm“ZnØŽÑ`HOM Y=?;¤îCdftfÄ— U2½õP7 ÚÀYØX€È¢ì0™r:ÐÎ+•—ˆ«b²VÙdê‚h‚ ÖÀ‘w1]Eö(q ¶*lºtè:Ë1K´Ùå(ÃÍ(³ç Ä.;‹–²ßø½N,ôž"q€’ÿŠÌݫвï俚¼ï¦e«M×á0=úæà†!,™Û—VÆÌHÝŸ¡ËU¿!-÷f¬¶Èp4ÅÏðyw,z¨à›Y×Èt©;|Ns3o“BÏí£ž.g­¸^‰³ïÜÿVMfá'ªÅþ É@ŒÇj\¦ü±rîõarGå2‡°qÇ ]`a6«m~ð¬žIkŸ6Ëq0ã`¹X‡‘*tšú=™FfÎEìˆ}çdœ†3?ÿm2¸P5 GTÄpàÝÓðŠf¡êøÉr ¼ÍÛË"7øpOr'vÑ•ü‚¼¼ûf·½ÊÂFÖ@8DÂBØaâ´l-‚Z}j’YCZˆ?¾­¸gQ†FÓ|…ÀØNŸù–ESBAv9kÙzÓOÐLóõšhH5¸‹Å$|P°?·àŸàH‡œ¤g’,‡1:Œ³ÈÞAg1«MÞŒñª²´¬ÝÉÉ 7 )dQyOƒå0½ÜëRpKèy@¾+¹qúÂå[ ÏÀ-À!¾ d‘)¥Š«S#ØjÈŽæ/ô:³´æep${É2š?Ocm@7dÑ­Õ7dG›EÎÙlû'ÛóÃÄO0F…”ûÄÉs}\èVá,Ëw‘—MOþ߀f [{"ú-àpÄÏŸà5lGF/\¿B«­:&`Ë?s´£ô˜Ó•‹žÄAÊèçá†Ú,Xj±EtÀÙm8¡MßfÏ?î^‡Õ*_¥ŽŒœKG\±í¢òÛB¿yê6lðUÎOö(xb)º*ùVœ æå%Dŵ)#0YÑóÅZ§³¶[¡´ùUvY/ "JÔxøðGäìy8OdUíÞ‡}^ qóÝC9!Wò×Ȭ?sâ]\=ܺ¼¸œåÇ™óp:öèSwBçgÞY[k.kèR.L–F©—NYïR¦·¡†æs¬è©êÓý¾9)ëM¨„£€^ qÄúóì·ù[ô-jÁÞ-…5X£”8j C(:eg:˜43>Ý€#®4ÈZ®â®«rq<ªUp&ÊWîw$$wmኢ•Ep†»r*Qž»8Ú$d%æK˜¨„ìjõgläíN3@Ên!#SŸšžY˜\8™Än$‹mLW×¥›ÓràxÊñhðå^rÝz÷Ò®H!í 1k'p:ÈÜ¡&,©%y&(=(ª8|ÊÁñ<>UŒŽ4¥²¶^’²ôгýe¾m8„¦³K´Æaª_ovYxDØJx¯ZÀ#èb!.èåËÇùŒ—Ém£¢s‡4*ïî¸~óÄ…›4¬òÈJ)¬'-XÞÖÛè ï¾kgq»Y؃‚è¡J¿G¤œÜÆò¿^—„‡ògJÈføêi>ñ$Þ&'ÿ|2"ͼ]‚‹¶ýwJºFRRÖ#Ž©E+–7·ÿ`e=;oó+sçgÔžxü–åë7™û^ÄÏ57áŽEÈš£´gö Í(ºÑÞÎB¬Ä.´²°A†±àK±D§ŒV=]ÄJË6›@=@³¤ºVu0¥:q‚E&¾+ŽVIoÀž39hÙºªôT e&YM’B¢Â#ŠóåÃbذ ¢èÏ^bqQmA¼)½K!ßTpgã/y%9ù—€³?J2BAÜl¡"ëŒ#¨Šû{Å0Úà¡@<èH(ô¤°“7…x§n‚Z¡¹ ¬¸×ý«Â‚‚£7Íÿp=28¼óÍ1…íº¾)w×÷N9ÂH3㉽ e ^§ŸÀ‘¬Iö[oX”1O:k:ØE6{ï¾Äœ”ì´ÂHÏHr˜GV+¤§v–ЧBÁÂé/]dqrÏSy 9©UàXD‘Ÿð–;<"¥±ÒWÇ*ÈÎÍ+§µê±C^YvÞ› tä§”[ÛëÄfÛzšo?)Û'Å–*Ue ìܳ ª9 c;¼Cœ½ˆuúLߦ„º†úÊ–ŠÔòE‚¹ *€»Ú0[½•%Ï’é[ˆÜ¹¸k¾Þvâ š_Ú íÊÊ|¨Ro¨š<3!ôU?ó©!ÓøÝ> åÛßyR~(ÄÅ)ÄoÚY¶³k!c_òR…=˜‚›÷äßG «1„Ã\ˆ69jÙ*¨1JæŒl5¹®À{ÿ%ø·±?~¤É”™“ŽúW!VвM¦¯hÅj€¯ûj¼„6ʾÇm7ì,‹Y2µÈ¡cÑþ‚üO¤yL5D€"¥yTTRô¥§ êãÇáEÛ“ô„ÃZú§ý€EW²šÖè±I{RSg}ç+S=…öaý¥xݾ<S^¾ú*PNœ¾Zeé=ªÀ ÐAŸéc¦Èµo3ãÚvªäX$U*M_¥1ùK2BàïxÂÆx¿Pϰ¿/VÿncË*Ê*J¶¯;¯?Në“ðèަõ)K›auL`˜ÏÆäPXgöTqR¡r9óÚÙò6'„_¦î‹Ùh¯XdU(“÷“/Äñ„|p33ÓŒ™ùö‘áØ ‘/†F ’’ϳ—|—Yôèž´'"=ÉQ·9Ôm1d@èsMY ÅЙc*ÃÃbt!Oø¾u÷ÜÍ }sää_¦ŽA̤ºjß,k¡æg ZËÅ` ¥b=$ï2“Ò…ÔøíK§Ò2é 8éäÅ,´Gû¼‹†t0 Fuffânˆæ|쬭lÙú.as×·ÅÔ±ä™o¦âXœÖˆ\…¢ÕÄ‘O 8ö´øŽó¸°_lI«ƒHёԞ×HªøZ 刎ڋ ²÷”k4Fe$⌠ÀíÐ*«MõÚƒ{íÁv; &;"ǘUœx>‹ÅÚžŸ÷és“ËÀ± r ÷aC÷‡, ¯Ê:ÓC8B‡P÷›(‹Ö4õ’.K!È•ˆº\üì7YYÞò»¤“ÍllÅ–z7’‘c¦ õýóèxþt…ù˜Ú›åçÿlÅ×½â»>1ȉ äߥEÝxábn5´Â‰ó‰²•´š¡W=¾d¶•\ç˜Nè´ç?ciìKvCÚ.cfbšµ|™Î‡4ÚˆNÔxNÉÿT µ ©·6Vû¶yí_%ù—çG[¢ùÑ'âÄ“ÊÔSÑQER”¦Ö¬c•ÍM¯UY(; ((7HŽÔý+&©¥{Xs¯1\ŽÎòîjüJ…ÁDÀ1$šÄd, "A¨!θª×Aq> ‚qúYÜ¢CÖhçRãèÞð†Ïô¯©M{;û^ëÕï_¯½HMÍ[ÚúÍGW{ÀXÉbx÷3cÌ2T<¼œ\œ#þ¢j j[äJä„F¿¬½Iÿ1zŒJ ô#ûðÝRs‘ÉhÈv'§ì·µ)áPUsÉ Iâ˜^VÒ‚½*ÇãiiMëÄd2“>%³…žI}R¥ü³T•àK,¾ðÕ§?ª‰³Ý2˜I¦ D¨VâT˜½¤¿3 ›šðÙÎ[M¸³)òˆmãÝÕpÁ£Ü»ö<Ljm¢‹ª&¡ÉWë§KÐ ¼-Sãç_°Þ‰k^ZVè×¢ÞŸLS[X¹‹ã¿ÓǤyìXý´ç•¥èŽsî»õŪúèbÁ§fL‡hØéYÚ¼¤*üùY%EûÏD]€IVÎø~{DsR¡º¾´&§Ì”™N›¼DNw þС• ˜ Ún3>Ÿ™×N‚kŸn›Ô´ÚU¯"¿HO‚.´\¾%jT*o¼Ÿè­îëïJ-Wû ðcòÍZ¼1¯. —ììwÊ?p¡çÍ¿mñþŸo‰†I2ë,xԌͽöœ§½¦s:ÛóÝÿìT•íÍNŒ7¦&…ô aºHX©ïííLëH½I3:5›¯Ã9îÁÈ{ÄCèQh)ùàþJô°7½?õ§÷(bCSRUTC툗Òï=„ãÛƒÏÞ5V9ýPüå5i§¨Ög2›qgfÏö”üæ¬~w¯mÿ ]l©¬î¡=_‹Mvq"Ë¿A^’âpñw¶þm¶Ãe ½¼Ãñ'UkHc€_è6¿#a-­Mæc7íÍæYÐû8º—½ó%z~Ùö¥\¼ˆUðµÇ—DVKF4:Œ«Ê¼¸vóÅ®®Ï7ûJ![ÆŠ~Ëž½ž;5Ó˜œ"h_®ÜplËQó¦ÙÒæö¨ÏzN{¸ãAl±¡J·?±>ÖsZÝ‹Äj:Q7à˜½‚±²ãžønÏ>ß•UP(쯭}sÓÝD©Ûr}÷Ñ# ¹M $Ý_P~}DòDÉEÄ‹îª8Ú}ü2VG’mÊ”åŠ.ÀÝéù‚LÔ*1܄ᅅL¿¸ÍÒù3 È€ßj¿_;³à2 úH°{H#[$^§’» â@Ÿµ';#[€¼€ahûCqaQÁ5ÉDâ ¢À«¯o.®ÉÜ›‘–AYNÆ(D-Ëÿ N~¾©š¦Å‰¯¬ƒký}š--.Ù_UŽCˆ«??o/dÂÓ¼>9Žú²Þ7jÞ•ÞзÃh“Û‡Z»$†(è\â1JŽn5K›»¿°áŠ‚:ø ì ½0ª3oûÝÊ–¾?{þêãÑ}$ëô?K¬Æl\ºKØõé’’0¼=t>\ï¦Ê‰ÿbSå*Êt´*vîßp®”ÿÇ6ÏX‹uÝ‹LÚ”\9‘Q¶b$VüìÈÌHQ“{V“%b˜âiƒb @L qÚ{l+aȈ†¸ÑÛ_[”†ñT÷Bau“ÒmLù…ËÝ™-Yhj—órKqèë€ {|swõ^­€}9y˜ þàWnʽ×k¢Ÿí |¶årœ ÷ B.ÆÑ¾f„x‡­7@¨ðò "—öºâµÊ× #÷ø~®§Š¨µ,®3)úßFk¼ŒŽíÙtDO.MJûÿL]ªçî佩sa7­¡S•T†¡ü“«íð­#Zk> D1aŤu–ÝÕÍ•§ÚÂ*Ãr„ã§:r*ë<1áÜu ý´jNâ’S¨ÿHpÜ)ÎbûÆL:~ ´}íok¶¥åˆ ÀûÐù Ÿ¨Ê½X„KTðzô7djÝuo¿s·^"jb5ÛsΆc»Õ¿Vqª:ùÈÆ<¡­õ*äwÜÂÃ36…ú©£B¢ †$cœ1Ö ©ÁíÌ…ƒÂ%ZÁ(ŸÍàLݲì#>jKAI#m Z#êý>u¾oD„ïÔkq(Îþø_ÔOÂK‹¬´³(u(’­³³L@e—e‚ø2[k̉N0Àî$tõ¬Thñ(Q™¤µßS¥=ßK.ŠøQõ" DÀOBð>å~çßfþ7*FšÆÙ"s #ÑÁž·ÕÓlR¡Í¬ÏɈ­>{CµŽgy+fï‘:(qâåL ¼ïµ6Vœ=I{¤}PœDšž,ðïvìÚôªÞËi5øm,K5eš2K‚äjr‚ª˜þ`¡)«0_(>Ð|âSš&˜üâBE†ºx#DÍk˜ŽýÇ«Ô|ÓU¾-aãŒgâ ×ÌÇ)]÷q˜úÉ䑘ϡõa)y<á‘NU[‘·çÓÄù©P3á&·žß*Ô±Ôý¼sø\U[û±+´n7gÖDd슄í\PÝŽ¦Cæ²om}{$yŠÌK,R‚t!o¢Ãw¡FMø,Õ8xaÔŠèsùÂ3܆‡vœ½ø=¼0Vè—ŠMWt‘ÝC9Æ‹œªu[£¿ÿ¶0??sرcæÆVtXýåZß– Ù©{¼‡&—锋Öd¯ rÀ_N¬ÐJ‹‹@:7¼ŽÏ£*Š‹³² ‡£±¦Íè¢âBØ%Ä´û5²GîP¬ ÛW Åê? ï”ã4©ÝÝp ‰\T•’KO‚¤ 0T$P3B¬,dHç†WÉóÄŠ(’’ŒF à¤bØW`‚ÊC]+{Ý—r6â \Ô)+Æ9^ÁEªNÒ1•ý-$ýá¸þ×hü¦º-[_¿CGZnK;tè/ö|þ±#U]q måÒüJ}qyÀÒæÐºðxQ3Ä5aɉ1Åñ¼>°.æˆÀ_Õ›W·œßüæ˜ÜÃÖª}O¬ÚŠ™B†¨ÇüÇ¿þNû©è>Fÿÿåúb¶6׬&·þ|·'¨ÛIž²[GÛ[]~|…ž›JB°÷o¹]ÖdH¨l’ŠEÏÜr°°Xi±FÙ ÁÚjMäà0xP}vVvv–)+?¯èÖàÁm9§Š÷™ ò²M9ƒ‡0Ìÿkº³÷endstream endobj 352 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2932 >> stream xœ}VyPwîa˜î9"Ípˆé‰Gb2FEã‘ÔnäDQDŒ² È)Ã}‰ÊÍ0o·r(€Œ0hLcRjŒš¬IV+11‰‰Æ#bÐU“ÍküQÉö€©Mv·RSÕt÷ôï}ßûÞ÷=ekCÉd2×¥Á+u›#“ýb6ÇÏž¥Y›™™f}2Mô”‰“lħå@V?ž>¢{9ØÛžšä qÆž ˜ë„럢ä2Ùš¤\_]JnZ|l\†zάYÞt]¨ŽÊUûÌTFnLÔe§'Æ«#“£Õ3ƒgª—é²¥›ñêguÉꨘ¸È¤MjÝ&õª˜µê°Pÿ•¡ê€•ËÃBBŸ›ùkûÃMŠ¢&.^«óMñO[’ž‘™µ18fù¦¸•¡«6k(Ê‹ZH…PS¨Ô4*”šN­¢Â¨Õ”õåKùQ3©%Ô,*€šM½NySs©yÔ2Ê‘ò”ˆ¡l© ™§l»­M™Í=y°¼Ó6ضUa«ÐÓ6t.})g±Xaœ×8°›cwsüÜñ§Æ_ÁnÇ_m~4Q  § Ægˆ£]…—’2¬(ÚWhÌ\(€¢T7rß½,ß OÖgWd@>°Zæ_&´Ó+¦ØUU}(W Žˆ§b–6›¾ ôÁe0ëY"`®’,£ïànÅoç‰×…Á™;9œ8z`þŒ*Lªßi42ypjÓ^ì÷Ü+ÐÜ«ÄQ¯àÔ1±&éØÐ %ÛÝ'“…„Æŧ֗ô±s!Ê“kŸÑ×®1¸‚|H|ɾ´Â(t-ަ>øŽè=&baszaª;N&ù Ÿ±’û¥¢/Io°Ž¢¾Ô"rfÙtÁåè"Q§ÄÉÞwÈD¢|q™J¦ .BÝo]C5O\ê• …5…ñ…Ñ›2baÄ´¼UÜ ç¡¨ëªï=Ò~ŽAwr]b]„ÃzV¢âÒª@ Îï¡FêÁ/‚÷Ű³•-%ƼmEgà‰û3KçùùwžßÄÇ-½·áïðƒpá|Ñt ¾†oƒ[É´Ê-5°Øce‡ ™=P—[TQQR̯X½Þ²¶íuðà’µäEâB²H:>OÜp=ÎD·ŸURÏ›(²ZÀÉfQ&$Zœ¯!kÐÅ{J܃éJœ>ó+b»!rkò&þà ÿb…V:¢*ö衲ّ7—ht6¡³¤‡%èÏJïÙÞ ,Ú=¼‚Ï«ˆ‹+Q¯Y½."é࡛Υsßá}Ç=ßµ'kU£ ì°^p–øöjC/­€vBŽÄÇ•a÷+µtqÍžA;{ÚhÎövU„JÀÃZš{ìS1‹ÏÄúû4¾M w[V “°T1Ò©ùF£ߢ•u³sÁ ‚Î,Á¤®¡£"Nm•ï…t„¯L-“7ܦ%²YA~PÑ*òÁ¸^‰w³$¹ ¸¼7!'/9ª+ûàÁ®V /ÁH?oý~»€m‚Ì åÚrñª¤­f÷X5tiPÚÛpúõ¬@ì°}7s³:ê¯cxþZ5‡Ïö[4¾ówI*2C*UjƦ®û]²îK.5]’㪔p6©skSBwlMT­¶>¬N°ç/÷ܨʪäù•å-Àî…êvÕW£²(Î.(ÙÆçÕëjc€õõN›¼njãvUWboñÛE–âóÛ!˜ Ù¿Á7Yø(/ßa0囥*"cr ¨¹¦›ùêªuÕ&‹î(ì”ÚìÔ{ñ²ÔFYYUjÁg²±DÀÐlç«Hk…[R Æëè¨<­ c¶Ä%o€ {xìB8`¨–1Ì×zZ9Ëp’ý4¡¾ÎºðùÁOÎñcÚ,5oúÌØ!hO;ß‘´9i7.A|ÇU`‚K O%R ×@‚þÓ°0Æ7nYÐRCÛIž³Ç{ ×ÿû§Û˜éë†:NÁ'½¼1À*‹Û“¬ÌFt¢Æ,kÇq8i¹˜±W -…•F­ š •Å$æîâ÷‰Sq«˜Ñ“³¿³«õhWF3ð­Uµ•‡€=× ŠeÈ=q /vÁ`âåóý=Ç[ù,X}›?J·4Â^ DZ9/039*£¿mн±£ÒŠÓiTš™Xë*0D…Ñ¥¾©ºæ+ì*±"Ê SLîZFš;îBÿî¯"¥x[ ‰>Œ±•ÚOt!Ë&“©[s$7òÐ2‡*Oá¡Þ]`’å•Ûj¿F[÷kÄI±^²Ãñ&Åo ‹.f\qZödôåÒà§)Qóìe" ÎO‰ç±›ûQG†˜¡-Ý’|<Ðáþ×8]r£Rrµuù1ùñ é›$LlIíc±•é>Ö·£Ìp(½1¦1Ba£UÎ'$9[D'‹¬[š;œ ½qœrÇV(Ì/7l/çK²3ƒK&Ô–Û—Ûg8ì€OÝin¨Ðï⋊ ¶@û[>ZÜñ qŸ7‹¨É³C/KŸ›Ô‰¶ ¼ãã@RF<'dY}夨ˆܸºÇÓ]xsP ÑL–5ÞÒHè¸sj*ÜÚö¢2SŠ'‰‘†õ ™`H׳\>3 J<³%»yíM ®$·Ëf¸¸,Ȫ,1¦™Àm¬xØÈ`ÆÈ5W²­ºj{«g T5U7bžøÀÝ8ò¥ÆúSÏUèóä úàèÑ[Ù¸Ôiœ¥Ý¸Èì,%Î÷wá®÷“(QÒœeye[Ëøôe+²6Kl'œU¡‰ÞWcÉ—\?þí¬Kµî›W¤L>ˆ^òá©âe–5?—vŠR…@סMÍ·è쉑+d¢–0ý'¥ß0`Mç‰â•ÉX¡fA‘Kª–/UÞN¡Ïº-=‘Ø$¾¾ô;Iø…G¯Iásn¼lJ_X•¼œ5< ¹ûäÕÿY.þD8áÏT# £ÐgHÖ€Ír žrˆ4Ï` Ä:ÜÐZß@ vè5ž·³]¥³öã»MUÆÊ*cMMÓ…]öö›Æþ¦7wÔVªì(êß„ ^endstream endobj 353 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2652 >> stream xœe– PgÇ{€™nFµ{ð@<@<¢Ñ$ @ *(^£‚BP¹åkp˜7œr*7Š8#‡'j*k´4k5«.‰I¶Ü×îg•Û IÕÖVWuUÝõ½ÿû¿ß÷^K(#J"‘LòþÌ7j_PäB§UQ{CF—„)aª‘0͈ÿëo_ûHÁÔLM.Màb‰ù< ךSÆÉæÐÈÕQÑÉûÃ÷„Å)¹¸,vrïË•ÁÉÊUÎJ¯ ]Q‰±áʠȥ—ógÎJŸ¨Dq1\é©  Ú»[µ[¹1t‹r“Ÿ»¯ŸÒÓwݦõ~sœÿWÔŸOE)VFï Þêç²pÑâ%,]öáò5Ú@ùQ©MÔ*j5µ†ò –P”%%§¬(5™²¦l([ÊŽšBMó¦L¨8ÉxI¸‘™Ñ£çÆ¡ÆC&kMéNi³l–ì[š£Ï3~ÌÕqkÇÝOc»Ù[ãÐh xtà…‰’×ßNæçÒ¤ôµ4ãøA8¶Qp(;'™èÞüb£NÕd»“ ’ḂN¸ §5¶< ‹ ’+¬8‡¥X/û’8Hç«d/ ºà1è5 ¹‡9 ’$C¶JÍÞ¥?¦ˆ?öæSùˆ.ËW(´"RñƒIhe-g(achëòœÌtóߛǞ¥å¥qÏ–ªèk%ʽÑy¨h´,@K”÷@Z¾*#¬§7³{ Îýí)r±šLìÝ7{¥©àÒN6£tPË`-·Ï€Z®ªª«ûBq0ß\ö!–œÙ[ÉýêQ~åUƒ¢ Ð,ͬ厂`¢¸¸©e³û®­Þq,JiùYñ^GG®½ÓÑ7À Ú±WÓ·Ž¡ÙKH:Ü–ØÒÒVßÅŠÛ_=J©Û" ”6ËŽÛxð¶µâ ÄzW]tÝ®+¬C}/Í ¾›ë·o3ÔsÍ­Ê…4`bAÇ Y5%ùp¤ž•kŽW÷G\€:°mEg@‡Q.|H©»ð-~‰–¿£LÅãxÞZ>"X¢¹â¢j íµn4²Ø"òBÐOEkgostÔ0{D_~ Í2åñ·¿ï»2Äš —Õ†}<®;‰NËf‡!(³–«„sMŠÆtHg}tP FÐOÖÜ$s|‰Ù¡Åú¤æÖ¶ú3'“ª5ùì‰â6]0wõA®\8½FCJ²€ÌbäóÐ<ù»|÷™Vî¶vü‹ÕU@Gv ­·Ï[¾Ë.}/R¸¨ê\û:µA°2à†Á÷¼¡ù(kÑïQszNf­Ú™Ëb?ý'h!Ü›ž>Z[]s´–‘ßÌ>|vÓ“)8ç÷÷„ 6Æ„DïȈw¹”y„ÁBºY?Ð2 š™è>jëÛ©¢­Â¤.Ë´´¨B‹…ha-&ìÇqŠª4ÈLÖæ¥ç²1ÞÛ“¶C8ÀÙý—'£B_­ÑTròÉ)y Yv¸3L˜â ¬¼žx®$4aˆíóùèŒKO"S"†3𤭢w;®0HÚЪéš¾0²ÄX5I{ /7EÍ&l÷ðcÙkÑö<‡ƒ²†2hnUm=Çí6ø×xˆ¯–,™I,ˆÝ˹ø.êyUÍÍG;É–åwêÎÔ·ïêªhùÄZƼs8ƒÇÀî‘ÜÁïFŒ…Û 4^ñÐ¥–ÌjµÙUêw ¾bzN\{ùà×p2µ‘-Œ…¼?ǵc„gkµ™ì΀ ý^¥ÛÁ–Ì_=oºWûŽúxîì¶S9(‰},²Ñp$õX ì`¶ÇxÌp%óëpN6«­¢ø?X÷c½4?¿ä0Ûßÿõ¶gên°Åå×Pú+g&ÜÉ0Ù†ëIŸÈAZ ÇñŽbD‹ÙDMÔZ’5ÇQGRP9ùxà%K¬*à.9›Óœ½ÓöÁðkHʺÿ‚A¸7_¨xr«¬ NCwRm@õn°‡åŒxAÍßH”àAœ V`2O”ÂÝe1y“ÅþÅw&$“¨ª“…Aݵ̛*©èÞ‚WpV¼^A¯fl´À+hßÁ[^A¶Ùœ©DÖZ^-Tá~…J¦IÌZ›–“¡vƒ±p³e|ç…ž¾vFþ”=ºÁ¿°CzÆßˆ=™èôÙÂõ©Mm] ç.M-d[š:Ê À ÷{ºy„zº¯ J?yh›LËs„•²Q³zÚÐɵçvÞÁi–sB.§N³–³nB#~ª€ÁÈa±ÈßÕ]aæžë51à„OÖ~ØÝÑw¬íôÕMå‡t¬þDïáV`þ^°öót-±ž¡æ‚óò´¹ÚLm^&¨¹yFrÔ°òªW†˜³E³fÄl.<¹k/=v ºù%ª/¢5 (,zË‘u¨`åJ =‡^þÄŽ^Ø*:þeÆB§8ÌP2ÂÏ–ÓÍyEQ ZmêA–üöæ¯Rv 1QÑ?ªÇFÕ‘X2¼.Ëü+œŠ3 OѼ-Q4ß.ížàtkù€èüÒ?#Ž‘þš‹6ô‰Üò¼SâÁwøJ•ä•ÔháPçï¯Wð¯)Í*Ðê4À¤AF GúèdHo¨Ì/<\Âvö_*ëæ ,þhÌ&¦±ÞEW‚8ùUC~KS·ÝÅÀ#±©›³|–Þ_*¶­%??E3vL^Góe†4?Þi9€,úŒ6¥X *†ÈDVþi¼ˆn+SKÊê{JŽ4ñ— _#?_§4q{r“£aÞÕÙÙt¼ãZÐu{2•,ÿ€}\úQ·Ë»~{KÌŠûçhÇ‘©ù Xé°9ºóh6›ÒCPàÅ%~ð¸ÍU– 4£E›XƒMha, ¾¶Q@YÙEi… …ÚÒ¼2F% #%R^Vö}eea~yñC¨#|"KŽOJHŽŸGLV“)@÷ËP|”/úší§a ` „/ý¦þ }7.ÀOmÊÊ!ôÌ;t v’á›Ã¸`ØXÌØx¨eü¶· ­½p•ùÍ¥Èçþñ¼€ïûì²Ý¾°’!‘Ú‚ \òð1±JRÀ¶˜ð€H2÷‘MJiT5œbn=-ézt.Ø“­3A±Ì)ÅwŽïùû÷/öèëXœæÚ»ÿtl·SQg øªSrO<¯ëpÚ¨ŽßVL2Ýq±#ì/n8IÏ—Üê`ãiX•µ>5ü@HàNH†=ùûê* NWAs*îDHHlL¨çy_4çÀÂGXÊ’i¿(¼cf-›w_þ þýE£f![ñ9l€p±ÿJ\cÄ¿´AÉCt1ÆLÁHѦ Ô‡õôèõ=,é6ù¿5³Œra})î¬/+—ñãQ6o²1Êt˜Nh/(Ðéò u%%§¾45å‹ÏT—+V`:‘¢þ ¤Ìnendstream endobj 354 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 321 >> stream xœcd`ab`ddôñ NÌ+64Ð JM/ÍI, ªüfü!ÃôC–¹»ûÇß^¬Ý<ÌÝ<,¿¯ú$øÝŸÿ»3#cxz¾s~AeQfzF‰‚‘±®.´THªTpÒSðJLÎÎ//ÎÎTHÌKQðÒóÕSðË/ f*häç)$¥f$æ¤)ä§)„¤F(„»+¸ù‡kê¡; Æg``` b`0f`bdd ù¾†ï?SëU†eß/žø>ã0ãë‡ßoÝaþñWìwÔ÷[ŸVž»Ô}GòÝoÆG¿•å~·þõzøýâw#¶ï¿/²–ÿ˜ êâð›Kî·Ê÷ßUؾsœ p‘ç+]ø#`Î÷ð…³²àºÉ-ÇÅÏÃÙÍýyVôöÑTž#3zûú@| —‡—饮endstream endobj 355 0 obj << /Filter /FlateDecode /Length 4076 >> stream xœÍ[Ms·½óĘS<ÙØ9|¾QŠeK"Y¢ír¤Te-®É w¹òìÊ´üëÓÝf™%i¥*)8;ƒo¼~ýºýrXWü°Æñï»Å^}x¹÷˧·‡ñÏ»Åáãó½G¯Œ‡7•¯=?<ÿy/Tá‡BøJ(qhµ­¼Ô‡ç‹½7ìùòä®jÍ•žµ7ôC{å›Î'ÇRŠÊ;É–?㳬¼âì5•‘–מ­§ëfµnÞMŽñãÎdõ|QšÈêM„«êšKv;–MÛYÖÂ?ÏŸÃ$Ï'!¼¨¬v0‘ó‹=&øäüß{Ç ÊK]9cðõöujÆ*#Y˜ŒrFr‘=·'Çðú·ìoOÓ³fo÷Þ¾T0~g«š[¶~•UœÅ²Ê±/³…*f¿¼Éç¼ÈçÅÖ-¬ÌI8«9[¥žimàÙU^[v6 í™Z è¨]§¸VEc»zÅ #õìéÄ)¬nزo` »˜·qYM¸ôìO“¸&ß ©d%½ ›ñ†_A»RhØq Cƒ·\:]h}~n B)‡°k©m‚ŠVþð˜Kü+B›7€¢ná·‚«Jy+# Ž^i•1•1¦±¯m(ÔkÇU°i*o™Rêíä( ½®ãRÖ\‚Í„áêºrÂu>ïã|×KXSölš™·¬xß^BÓÊÖq^c´‚#Å–`THköêhÓÎWí´(WìrËyΞ¤:^²'Ž,¶†FZsÁñQZ˜6wìq|„öÈ,j2 mp; Ûχ>+q¡…¸€ºÔìÙÍ;Ò0’uQç²jP’#31˜.ôH7ñ½ ,›Ð 36a„1@ ©‰Í¨¿9h;„áÑ)ÓGÂò†ÆŒ ËL¼§a8"Û m Ù¸[Ö´ghÙ«ÉVœr[q®î©å>Qj+LÑH´M8 Ô¦  K^q«ÐãÁ®³ªÁ"¥a¯ž‡çšsØq„A;nhò@„ÏÖŸ¯°„ÔhvÞ ³©ÚžŠ1„ jÐv¶Ä­‚†ý:ÉŒ:Ñ,÷írƒÿúá*û4]«À  „$ d‡„ƒå±y(Ï>~þç'ËÅ¢€F@ŒÎ×iž3Œ†·¾*ÑLÝËÀP%Tœ Ë ý¼6®66l|ÝtX ±~t;Þ¼ã²7ìU…i*v³ž ±¹•`=˜ªS \oa¹A®ÔKüƒPåB&:†wD÷—Û æÊ½» °Ú$îåÛykxF¬‘Vƒ ÐÆh[Gä  ¥r½¼r¥60yÈ=Š?€ O®Ûé"þÎ9KŸ€òNÚw @“ïxóSŽ·ùE¬®óñ“TÛ›pÓ@ƒá.:uÐ!÷ÆÈNô¥Š£ÍjUt6OU”d/ ¦xf 3(eÀÐ5vá@Þç“(\{7..J†®!ÕL¨üÛ.ÔGȃ6a üß{„%_7ˆRűY wn‡öq;ºlÅFíÊq’_CøÃ³…,V®§m¨‚»÷c¬ Fø›`æœ&‡°t PÖ‡<6]’AbUð›J%±ƒmÅF¥oÛ?W±ÆSÛ0ÒEi²;ûbrlÁŽúX£2)®“î®VwÐùËÀÔì¿#—¥Á]O1‚R¡Ø.#,4¹ ‚ŽWIÔè1Qã* N*dT³ ?’Ü“h&ˆliá5n£‚ÝÛ¼Í`Þ)"Í^/^ßónÝζ«…‹÷Y.ÆUNG®5™6•áЮJ@<§ìÉ‹&¯rÙ½?;¥2’´úËŒ*›u›95«,{ÖÕØP1õhˆŠñ %@ÀFtÐ8†L„ÿi k)Ø Ïڴ촀]Oƒ€érfWÝp¿[Ŧc¬³}’Ï5—U1š‚?{¯ó=“n÷wÈ‘€µE{…–æÛV"è¢Ur |Äd¼i"ÄP›|X­#žÁB ™Ò%Œî}€þ›< 3»E€5þ1ËdHΧј` ÿ5SÑ4àíDÊÝ'ÀP÷ 0xæ„8Ëý)hx`Œ‡Y¤ä°U&Fø ?^Ib–.‰QpãÅçéÂ2#Ônš9Áì Ïä!“SÜ“ºØ?Éõϲ/rÊP€{Rð§ À°ÇËëØ•°¬Ó³í žé*·Ÿ¶ï»°*7Ú{l@Þߥ™º,*ÊäZøÞ¶ ]>_pX¦»d=²…æfD{ÅT£ªk˜j,âk#ÇÒ@[âë(Á_+W~'ü3Ï#îô<]BÖq]B|°±:âÞÚ{áÖ æìð…DE.>øÔ&„Ý'S› ÿÞ^Ä/°â/ŽÂ3Ê¥×…KÙ›£A_›ÚUò7|6ÔLðf?eÙH²2²‰+¶Ÿ{ÎþCg’¢ÊSRoÀ‹Ï²ÜRaí¤#É_'€Ÿé|•a„‡±ë2O¹%ÉËÜ~?µ…¬æOöÀÕéOqtÂiJrt8ûb(ýÓqÀ8·AH#÷l*~×4¬ðmø€®óÉ|ºZõù·%4QAíû'M³Ø6,è½34V%BöÓ p/ȇ‚f†.·-šƒ&Jl3æ¨LÜ*5Ô¶\’ ¤²î‘3œë8ƒû¶YÉ|'cPúóލMÀ"'‡)v¤eƇ¼*·‰ m…i¸<# í'$LíˆQ‰LèêÚO#Cá» dÚ \BÏàjˆLLíÉÌ{d½éw“IhS™Ê4Äè±Ï%[“ðlT—¨í1#»Ùýø÷ÿØp‚+¢™,µûRœ°Q Ht@w3»‰ŽÏþ\/Ö ¹8ãñ(–ˆ@s¤è"MÎi#À¦±q¥ …¹.¬ï_û3^ÐñÐë÷Œ|GÀMŽ¥ô'ºÖÿýû6p˜µOøŸ¾»ц²÷šVèšk¡úGɹŷ͎³¯ã¨Eo“çÓËж¦ãÕ)æËó½o÷êJÓy;~¾nK»ŠçëJûCiT%-­³«õúýÝÞÞû‘ÆV\÷Òm³ßÞã¶áÞ¬˰ÕÍlý«„áì8çW[ÇAnTÀX訒Ó`¢ kE_™âñLù<:”Ÿj©2aêÀÿ!K UXj76Ͼj—è]è§Æfã¢"!¤ƒïŸl*Ý }Qp7TÁ¸,Ðí'óƒ«B¸ŽCáÒãQkTÔ 6·æ¬¤Á èP©—gþa#6§.Ã@Õ·:}úBì³ñé¿e'`[TJì3Ù) 9bpÐu¶¡ÖOÜv‡c¹* ªŠ«‘à!‹h:s÷ø‡Ýåm©™ÿƒ´­»WÄ,‹´­’þ@ClIÛÞ7d®³$–¬e?tX­ "È·÷:˜¼ÅKµ¢ ödÂqøà9¾Ã0Ɉã-4à³&Íb’d^n‹T¿´åé"·f:ŽƒQ×¶,ÕüÞ?NÃñazºâ/y‡Miwƒ#éÁm> G)ºwz®ª _!Æ7UBøºÿl Qw—5ðn¦¶þR˜f.ãõ<ê+’(=>è ŽØk›Å4¶àÔ}¯uÄ °h[¤©Ö”žÆNàíÓfNǦ¤æ¶ÕF9z·ªÄ0²`ì~¡)h}:_|²\‡;èÔ•íÙðâÃMÃh{•Nc$Y¸„d7wIVÉ'rN>±ËàÝætŽóYr•(™×W1úV0‰‘ƒ3ÐÆ>ÊÝ­2‰i!¬?¢KM `F8/!}6½n›ò8Q(5ˆ1}¥Lˆ"óÍEiv®(u‹Md?¡úH;Š=™¶ñº.˜ò ¡R(jS Xr€×(VW¡lGQåë&] 6tw*ŒÜÁÙQ|† §ÛBE¡Ùt8ÄLR»à{‚ó+办¿)]wÁžÇr˜O{Ù]£œ¶¿ç&Ó‹æºÞÇï„ Q «‹PŽNl(lïzx>¦bâ_­0Ò«Aƒw 4&PÇ΃¦øcçASì<º dì®ÛiôâxUgžH«$ÝpM„ÔKì¼&òv’¨B7à <Ûµ¦ m‘l8ðxUj‰BŒP¤{‹ÄE§{ç}C4*C¬ô‡Ó1¤¼ß%Ùâv•˹ãW^›¡»/´}¸ª½+ë:ˆ¹é¬ÆqŒËÆóïÞÜù_ 6'v|L»s׿ÁÍ9á.+[9­ïÇŠtÍù>+ª:ÜQ•7쬻G‘toÞ%-<¸Q‡p¦ËÐÙfzWbz°™€JØgò_eÜG%˜p 4\´P˜ç<¯µ*»ZÅA¢(i2Ì\ô³2Ìïýw¥]÷]{z:µ`lcÿ3¦Ò^”WÿÇ7¹íp»5èÜîЕæH<›Äù·{ÿ$¾%üendstream endobj 356 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1205 >> stream xœmSkLSwÿ—¶×¢”»Ž‚l^Qö†¯¸é¾LŸ(0T ¶@¶Ø4‹~pË\2!.Y"f;×Ü%Ûí>m‰_þÎÿäü^爈$†ˆD¢×÷ì=¨=Q¢ÙQ~B½>+ci­º¾©<ú“Æ­q)1Üb°q/ó¤ ƒ\òuŠä·ԯĊxT­ b‘èP­1[[glPWV52²²6fdï{L©‘ÙžÉì*)«Ñêu5j¦DsœÙ•¹7“Ù§Õ E5“®Õ0¥åU%µŒ¶‚)(?Ìæx0ŸÉ=¸¿0/ÿíÌWRû_‘"Ó6èKÊÊ+É#ù¤€’ídÉ!¹$VPH$¤Š<èÏ{Ì_âjñ"NÇqÛ,G‡E\j£¸&{O^ÌŸ¦ã×=ÜŠ4 ^<CFÆÓ~…Ee9Ôªn=^ÚX PuÚqÉ1í¿í™ñE|WB§Î_=_À„†­qW[Š,Geq‹B·DðN"ÚpÂ’I$€q‰tWÂIþo Æ‹à(ÕUï”uSƒO¥ô4¿ÍbQ̓¬Ç“óóÒÊ¢Bš=ßÁŒ#É‘n¢&1WJ'<èû¥OV4µØ5¶Ò ýY8 á g6ŠÛ]Olaìýá‘ûbŽÆÕ ÿÍú)SÝt•·ÌWì?ä÷]õß¾ç†Ç`ßÝÕÜÝæì1pö¸Gû*ÙaW¯,ÐÔÑnmõ×ûÊ!2ZÖXŽœ®÷›ÎÔœ³\2_°Ü4uì5絃,š#¶»]À@ݶÎF#/R: ]æ!AŒ Fú<îÁ^;£ƒAÀx¸v¯ïA”ª=—Øfq­­Ì×'`:RÖÈ@$‘~†`œÂ]ª¢Ì…æÂ¦* $Cà;ü%†”¸œÌà—Sô¦æ =m[T<•Öi«TÒg¿¥Šù3FžüØ|’~€Ùïý·¢POS¢®›Â !wØ‚1»:œ*–ígƒa¬U~úl÷ >ªWvµ­· ÎÀ̽sž~ èîuMÂ-*2å 8ü:¿ºÈ˜Úþp"3—Íæž‚lŽ>pÉø-H)Ú7ç4i Z/zÎc &÷»¢|® )Írñ³",Â×úp¥˜ËÅX…k NBtvÛLúšÝ@%hOw\0]ð^ö]ñÏqÅ A kÐqÒÒÞÚÙ(û/ÓÀÝœñ÷–ÕÈ3 |ú/Ûp)` \C‰+E½?5|Ï4n\(Ù!Ø¡vK»zß¾æ2à%Ä•½7eÈRŸÏéž|ÒŠ±ü;Ñ IÝøïà´_7c"`2Ì?weÛ1YÁï§z¿Å%áèøµ1m=½€ÅÖ…^ ù‰ $‰tçà^*¼su—³ASjà©–·”„…—Ò;oP´÷¿øªý§÷ÜÀEéB4ÿtêÔs)б#?9«Xpƒ»Ûdmm±×[? Æa2êÉEY\G€ËëÅâ ?@E–bê²7—J ´òX/›fÝN—Ûéõ8û–Ë#žë,ô8{> stream xœVyTSW~ŸŠ¨¼FòJ§'Z§ÕºÔu¬ÚZqE+›¸±ÊjH (²$@6òK^BØC€°Ö"TqC«V«¥UçÔ©ÝÆ….§ÓÚmì}œçLçfdêi§?朜œœ{¹¿ýû>áçCðx¼ -[ÃdñÒpI¼4{á‚yaI) I¼Ü{õ,ÌcŸòaÿà \Â蛣q|ð÷¿sOM"QÓT”;ÅM!|y¼]ùke™yò´”Ôlñ¢ Ï›‡¿—‹òÄkæ‹7Ç'”å:˜&Ž—ož¿u¾x›,¦‰gɤℤÔxI²X–,ŽHŠG†¯ o Û¹#|öüßî—§AC¤2ù†CÙŠÍøÙ¤ä”´ðIÆ ‚˜Al'váÄsDIDsˆhb ±ŽØ@l$B‰ÍÄb)±•ØFÓp1?¢ˆ'àIx÷},¾ë|»üD~þ.>'ÈÜg"ÆÏ{Bú„ΉYß@£%àA»=ìþ\Þhü4‹‡Ÿ'ØE6m'ç7Êé« ¶|ÈF]È]zø¥(+:® Ò •ˆ¾g@oñ솷¥…†©0šÞEA"‚6n范ŽÖf„C&`ë^æ:¼äRdr Znñ~ò™ÃàaùUc š| ‚(ÛƒØr0¡;Pxõ«GÞC+ë¤ ;a DÆØZÿwu{F¥•\›ºd˹JcT ´V‚ òÅq…‚ ÊÓGÐ>ÕþCý‡ç¯_SdIØ®Pôȇ…­ù¬^åf4òX3JºÐËp“€› Ü8Ž—ÃMUpÂ{sÐx@“MøÚ…(ry¹P“R)͈Ù#Ý qèzu0ïXÅek­Ï~Îs¬{ð´ë(ƒ>…3ÎW¼S“DüÄÓy~ζó Ü ¢ÄìjvªÐÒ‘Õ‡“Ûªˆs"ÊMÿEv|ÊSŠCÆùcŽÂ èÓÓ¸U9jn€ÃÚÓ‚æ‹jï_ù ØÀ&ƒÞ Pj³t²ú‚F¨‡ÖJkŽÀÇx G€NxР'Í)Y2‚¦ºÁ­q#±;ˆšEŒ®a÷ íHÁ8(ÐJøÍ‚÷Tš’²Kº4):Õ* á«æôÝŸA7®¶ˆÒé*L5LUÖî~dÝü , ÿDºÇP=_.(\–Ÿ>€€úãêבÕÒÉØNüèy¨ WsO{kdÈ"T&';ÝÉCûO~xÒ—ŠžÚÏÊWfv$•'”G:6WU\´vwößäÅ u²ÄPe´8™§ë¾eêŒ6ž(ƒ¥N§*ÑêÓÛ ¸ØZ®I­”5)Ú²Z5× uä+×–FÇ'¾ ä2ȼ »m¨2˜‹¡ j}ž‚‰ô‡ jXÀhlršpe]ÎÞøã qpáÊ?{cVß#Tn´2•zЖÜ@ôÊ5‡{È ‚ )Bûœ=u¦$B’œ” t(Ô~fª;<"ôŒw‘{F€ÿˆm –ïÏÒé¢Û‚ý\{7ù3eÐ7àè ÇUòѺ¨Už“ôºG2H…þ{WÖƒÚf?E¿‰Ÿêåf?)·¡`ÉŽk€^æåC$EžG·ÿct ú‚E´œ{¾èå‚h.ÐçáT[ß[ EØï¡hä‰ülŽ'ðò ¢¢Ù£b¡ÅÃMÓÇÃbHfðÂñ{_CY‘&õ•‡ûDù/ì’š©uqꎘ[¬=ò>\§€”Bu,IéCŠ.o þn_o¾i*“µë=v‡ÈÖ_ WÀ›è‰Š–ç©ñÞ ùÇK£!ATꨃ‰}+Ê2  _`ðœyZ)mÐÊÔEÍÙEQ¨£¬Ò`2ZhKk4žÌú 3L8Ô^¨)+/¢Ík†ùU.ÓŠ¢Qƒ¾Òhóc{ÎGÿƒ‹¤•œ‡Þv~‡ûÑTh*­º ky5š…zDç¸^“jìÐb·T Õè#QË鮪n#锯ÃTûéÉÆ@*“ÕŽíçq{K‹wfiðl¦ƒù¸ÑÒ‰ö‰lÖ>iozß¾Ú½°VíSmÒfÔn6p5XZL•Ð¹ßØÙ}ÆÑ§àÌ‚E¤&y—~½~c½"-m—·–ª…_ȯÃÙ£­'Hju¢¾!þD° ®TWDr‹Ï)ÕÜ‚—Ób‚÷@jwùiûç?A+¬Þ±=•åÛÎÏ{Ý×?A!Ÿ$~D±ìF4Qhª(=”€^§×ÄÌU&C¬|½-mCËF΂Þ2´cPX— We”i³jrÛ šê,GHêø»Ž }«‚½øéÃ9Ü”î‰g!Г0„üM}g¸Ð »ÁþcnKMZ¯[¾4jæB(†Ýz$¿WqÒ1lí!Q‡ `\u÷“¾ò>OÇ›‰:›{räy¼(®"_ÆIÆ"ZÈ㥚}ÏÝ߃’ Vðº]8Æ7hÉ_Ë»†· 7 +‡7ÝE>wƒ¨‚M,´öJ:£`=̋ʛþ*7Nô’€ÚDà1ÿ@@i‰Hîý1¦¡ Äñ#(&©âºÉG‚/÷xµà¬‘5÷Ο<45P­.ÒåéäºìªÜVh‡ús³wj¸„&vñxSm‚&eS`ÍÐÀ͘›ÑC1C»n"þÍÌ! ¢ü|ØSì,!S§é8¹£Ù“@Rã‰ÎäF<Vl(ZER3TêØÚ$WNAÁ¡tMªVbKjËÝr81N“ˆ'fÍZå§( MÀ ~¦ê–µ[{f_Ie‹Úöî„mÁrÌhj£Äœï±tÍæ¦††.Y¿Þ _À‡×ªÞ·ö–ô*šºz-õ˜m©Ô©UeJýa‡²*ÀecšH.Š#„ºLÍÁ\y±2S–ˆ×ŸG,ÚoÌM$eÐ[Z­NwEe[ÛQ¸ ‡ŸÓe“'N–D®Ü=ž…ígìgñ,¯¶ ï=Æà_3ß`´÷ƒÂ¥Þ6 /=j£dñ¾†›¨û1ˆr#Àh¸ ô[}™õåSos30н åšÒ=t ×øX$ø2ÝÐUÖe ©ÀAö[¡¹5¯%Ò@.Óɵ™Î¬#Ðî6³{ t´Þƒ^ñ\ö m¼O¯ú²­h›°î›¨W6p“=¢í5ç*.V^¿zù • å^,‹!5ã'u¡ý¦:“—”ÔPf(QƆ‰äíiý‹€ó…Y‹ó¦¾Ø¿æ½Ì Ý)å;‡[ÔíêÆäÚ,Kº&]µnëÚe …W,±§È=çlW-=ÆŠR¼™Ø)Û7ÆN˜òMåŽÁ "wÚÅ”ÀZ嫯ë‘Ï’âRmËAQ˜ñ}Ý(rÄ0ÒìF¡˜œ²Gl´Ðé%{$ã|· -uu¢9X1´€½Ì¥+SáÆP†Å›µiúÂ@'bê`¾ƒA ñbõR¢`0Õ–.’J¹Ç63 Lu½™®4µ‚ðSöô"îqÓFy^âBS†ƒ¨^T>íg™ö=ûO>ÖtÏ>&¯¥ë·Ë«´Ÿ|C"¥ëÆ$ãè¹ß—ŒË rû¡$ãÝµŽ˜ÿW>vÿ/ùø+ëú°)п'5•á’=üòÁ®AÔ5TÎ-í¹ÍRÜiÙ!­T+m8ÔŽA¡­ÙÒNFr·Æ.IÊõ÷˜œ×Ö!©ÃÚ(ðL™øô¿ˆˆXÿñà?Ñe4šŒ&“Ñnb.ûû»kZ0w1 c±šý'Ä¿Õ+EÛendstream endobj 358 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 5449 >> stream xœXX×Úž¥ÌŽ€Æ‘¢w†xk$ƃš* 4€bAéeeéeéýÐEz/Ò”`$VlÁ²D“¬5šÄ˜DŒ&&ï5÷›äð?Ï–YorïsžÝgvΞò~ï÷~ï7JO‡’H$Æëœœd¡²7çÏsññ‹ ö’«¿œ!N•ˆÓtÄ¿éfáX±îwG}d¤‹ŒôÎN›6ÇDÔ7†¼‰à<‰Ò•HÜýd+e{bå~þ– æÏ_8oyÛÒ;ÖÒÎÊÒÑkW,:<(ÀÒ+t·¥£•“•¥³,š|`9Kjéíãïìk)óµtóÙl¹Ñuµ‹«å—õ7¸Î¶ú÷m^SµÜ6ÔN¶rÏj¹}xÄÚH‡¨h¯ïu±»œv;ûønðów p t Úâ>ÿÍ -^òÖÒ·­WÌšmóú<«7(j:µžú;µz•zŸšA¹R3)7j#5›ÚD͡ܩ͔µ…ZIÍ£¶R«(+j5õeO­¡Þ¤ÖRÔBÊ‘ZD½G-¦ÖQK('Ê™ZJS&KM¦8j eJ™Qæ”5•â©q”õ 5žš@½KM¤&Q+Þ”•H‰’Éq9:ºsu+ôÆéUèýK_¡ÿ+mM7J—IÏ3žÌ…qSÆ201øÔÐÁðŒ‘¥Q–ÑÃñßNØ}¶u¸7bæ/þ æÀìï¼ôñëJ¡ .?¾1ݨ¡^AZZBb3SÒ3ùœ,”“(lÛQ¿1˜#»°ÀŽø= Ÿ0˜ï¨wŒåJxC ŸkâuT–431A½÷‰ ~ye›GlpÏ>iÏ©ôy)TCmÿ1TÞt¯×‘Ô.Ä€ä矀ì§Ì]í¸z…ÿý“ü )krñÂÙOn^°[Äk©Ïa®˜ªŽ©¶“Õÿs{¹Aij ðGQѼ=®‡H0•z£øîƒ¨±Ž4r/ޛǦàI³½³‰ŽA‘Õ(5:3öðCãèu€[Q•Ð1¤Ým• 4«s ¤´KÐ"+ÌS® }kÊÎãEC®>飥›ß‹]çÆ”²·‡gÄ:t‚œÌØL(ó5°»‚JºJ}þ0” `]5S^ýñâcÏç…×D· ¦±¾zÿÈbêÉ3L#ÇÕÅ ntÃÆc[<0¤»+ºª…LNàu‡@˜Fÿ%ÓÈ­- ^òºj•º¢'s蟘k*ZSnZ¸¦}È´vuþôMeHXŸ—×9Â$Õ0‘³é9¼gƒ'Š!Zñ¶ßÏã*„¾´¯Ñ&Æ×7pþvײ;r>³6«, 1êp s¤ñHÞQZ×SÎ÷ùÍi#2ËB>v†c£ä$ïýq”\‹š»šCº©9e­`e¯²+S6A,íä:‚÷€»ôp´_’½ší{Ó:)h9:ÐÅC“ôÏ#J鹪ð½½ûÚzxv{P^ªzƒ®nA^‰ ‚k 9´ 94¡!)Q”-¥PC Li]§ž©&J4PJšaË]Ø¢+f‹¯j ÄãÆŽÚFc ¾…%pK¿M ¸qcPuÐ ÅÊêQ2¯CM¨ìHáG³Že}§•’jØö#lÓ‹Ä׸ŽpËc)N–iBuB½ŸÖÏ¥³Z'õÆ~’]æR“#³Ówa>+šÉ¥Ýá´~Ÿ¦@E aôç d‘Kx°0¢ á02?Œ Jk•-÷ó* ™<ºÈ½+L+«Eå¨T6^‚A³Ž ”ÃÑ»¦49¥‚x"F1b5‰œ%x¥¤ñ,½¶ñ~uçkÈ.¤!¾¹ª­´'G{óòêó+ó*P)b¾8¼JÀ«³ñÄ-h.Ã&Ûâø¬g ¤óÏÚ7žÌ=4µ­‘,…Wüƒ#Å6jw,ÃVÄùy ´©NÞ?ìû®^+|!L&ûa+vƒ­gÉÇ:ØjÊÞÓG¼‹Æ“a1Ð0Kÿ0]¹4Ì#jDûÛïþªßJSïøNØHÉEéÅiåéå¨UîÝ[YPü-—Ÿeò†ÂhõÃ`²)w_ÖÓÏÕ|Ó–ÔÄ1I‹ÿBRÐíÞÕ;IÝ’Ì}³[€'=˜ó[ÿGûÛÛxÖÙ)ó#Š­à-ŽívõXúΪõ×_½rõrŸ«ÛõCT¿®˜.ÞÖ°äí!Ë¥ñ:1;é@BÁd¾F«¸¨ËÇ‹«ß´ª‡(Ë£aÝPv…un+2ï(/8$ˆÅ-Ò±£K_\‚ÉJv¸{„±iÙ1é|äæ©2dƒlNÅÇÀiº¡õýÞ/x 6œ;OØ<á×ÀÙ‰-ü*˜Îá:ºòÆÅ¯"b~奯2£ÚÆ$»Ö©~þ^WtÏq—–¡øeïìXÄoš9VbÅ3´êDóX=™0A+&œ ï|†:ø¬ÒÝÝõMÕm#y²‡u"‡äp„hÃ=²}€¥<ŽøOæ ¸±5B`ñRÀÜxðˆÈĦösùW û7úñûMà { lÏ| Þ½rɤýBŒÊEkUa²Tï_0e'Qb±hÆ5ÅtnLÞš°ƒg ¨²xÿºSç­²Y²³&°*F`%Tr|jdˆò¯”—&‘ŒdëÖ$[,~î “aòóoyÖÖõÄžÚ¼?±Ä«ñMĬ¦ÓP"JËO@(©’a§''Wæ6" T˜_Y¸¯ çPDbÞ¸>(° lPGj{\[CysaE^ñž¦ô}ˆ©i*oðZ¬Ë¹xzºoô:q–g¯?zô̹¶oâ˜ó´Ù9s¦Í‘£µ¥]]õ|Ku)º˜AÐG‹G²ùóQv#ÀþuƒÎ¡ÃŠÿÒaý/ýÕ°5!ËA奪'ò'±î×EÚbßBrù´ß8:~ö´7µ‰OjhP”1ìóO{ꔽß½{ðxõÿæ±ïß׺d ûGcŠô²@Ž(¤.¨&¤-þ b>iþô€À?¸­ïÏçWBàuš|xtöåóæSxjGvID¤nNu`xº³õñ¯Å“GÄÎÇÓÅF®-²Q&‹Œ”É#ÛÚÛ^´j&g®8ªN¨@Pù\1eƒÅ`°æÐe¿’*¼ï)YQk]kæUîÓN0§ûN^ƒªyÁÅ|AdYFýhËf¦qÚ™q!i¼w÷Ær¢Xº6Ë—¬=îðM¸Ò=úlU[Æ7Ñ‹,ÜwnX.ÛZ¸ïµ?ë£ÜÚÜšÜÚ Ó͆]wcqeW)8D™ÔWæúýÁ‘ºGtl“ƒ`öt5Á#PÔóäD{ AˆX¾Èx˜24FqÈ'ÌO0q(¡()?!?¯å£¦-¢&*"2.”gSßýõÂk€Ñežœ'>þK4úù™ê!ùÓ3µš©t`ZZ `'}Ñ®œó!…»tÌRÛju=ÄÌš½ÜÚh4Ìh˜5lsíw6’#1¦ñlØAªß­e­´L—à×õá¹f;ŽcBÝ7¤ØªåëµZ¯­_Ó_ÝCü†æÈ0Z/œ3áQº[*H&'­廓±áœ–ÐáiKw÷MŽgþÅÃRòÓÎŽá½4ê '~ÿÆÓ§ß?„‰<<éþ…Í÷émð”Ñê7ÄáÄžB >§u=VMGûݳý`¨þ7eûÄŠaàÇC&qX@/ñNÜ-ãÊjQ Q„G2ì×9Ñ(vjf£ èHý¢F4â¾e2‰k¤lß`ï‰Þ¦Ê8G+¤1£Brë¥AÿeˆzŸi÷ÄP¥I,J…E°È”}$îS×_5¨ËµôƒöÄߥ9§»çʈI˸¼¶æ§6E TÒ,D£Ò,lÇÛ3ˆ}AE¹uéçÜEKm·\cŠÛP©G_ª×goû5£ò©¥¨í«/(9‰…‡IÛPèqÇæå”È[Q išQQ÷u±Þ¬lÿ‹èïUJHÓð! N(\àÀù50Å:XoæðsçgØt@ï)X€#o_ÆáYŽØ`Å G0€Y0Cõäæ«³ñÌ‘œ™ „ ÑÈ$s9z¾XC«•"&#))“÷Yâž“Ÿ•ƒdŒ¼6³ª®·øÐQ~Èz¾tLL µ‚N¸irfäªP’üÙbp/×:òä FÆãTštñôã[G3ì£Oú_þÊŒfܰÁò·—ø¶%×44Õ´•gV¤–ðUÇOt~Œ˜»·w,\½õ}7‡açÔ´ŒL¤0W?MãÙ"xH³Ì¨ëNVBþeت49:¸~°b T»ÈBŰãеí:ú?h;Î17í>ÃRl´z›}`‹¢^½`eƾ쾳󚺯úü”ŸGH‚ÿžH!(Lžã—ã’•€P(£ÎRþ8Íî{rzíûήëæïz§ìD˜PTTRŒê™æ˜zyDdBЂŸß'j`úíÃG#ÒA)Å¿„oöc—Û4 ¯€çôô½¼ŒG™¹‰Ñ8ÿa–šˆrP4XšÞÉC¦œ´ ÖÚ1*9uGƒ8¶ëä°œ9<Ó× Ð©„ÅJÉ}ä’ƒ ÷ë;¿`ÏôHo\ ]ëiG#˜ž$à:i,Šèª-,Ú·—ßßÕ[{`~ÌÎqÙªõoÉÞÛû±Œqo 9b{DuTBHêî¥÷Vƒ?yºšò: 7@l¿ÉÙk«¾„¬/Ý î'Ä ØÈ½p븻Çy{ð•ðÊw¨:¹(“¼2P,^ÝÚR_ÓÑçwn¡¯=p èÙ=áÙ ùåC¶ØÁe¥ãî>e4ŸÒœßŒ*™K§¾øúìZ[õúû¢Ä JÉQÕùaß7bœñ7tFu,JCLŠŒ¼FÄ¿®‡ÛâuxÂ:Œv×Ô”µóEÐóÀNy±¾6?Ÿ¬0Â'%ì |R¹© I¥~Äüº†M·/Ú6|µÿX:ÏÜ]rb£·–[»ßòeÏ^³‰zoƒÅœG ÁLÞÿ™_ ‡9'O;ž=`çt¬ÿLÿÉ/ïœßæ.Tâ9{×¥<ë²Ô¡_¥¼ðÉϯs!óÛÐC UAóU×LÙçÐ+d`]ÕëXwÆR}èœ9SÓ{ŒOÁ“VÌšKÄÈ//ªº°¨¨Õ0ͱõaò踠ÚUÄaš^ÃV~ÅmÎm£ËRëMŸÝüþZÿõ#mÑ$•˜®Ý¥1«,Ôœ%£ÀXÛÁXîˆz\OHk<8Ø·#¬ëPKg'/êýé;òKp¿z÷ªDŒM¸n_Ò/‘ú¢ðEþþP¹ÐåP·€_×ëDI<þƒN $÷zÔEã ]ÙCî©§°%QtØÈ¡ªôÂØr<éÞ0G ‡ž,~¨.+)DUÌÞÌú°P¼›áRlxüÍî-üY»[®¤3m-ªß;<Í*]x9†;øŸÚtæ?1òŒè´„²ô¢l–bI^LlÞL_,à©óRÒ2Q“VÝÒ; X¥š§š§•ÐAÞ$Gûè‡Õ$…#¾âÚd(%.¥¤ðþA>ÉÄGQs€‚é0û×gÿh;–âÜ$dç«K³Ìq1ÖdžW­oö=þl¯ ˆÇ«ð4< ›[¾&`s°|pï|»à ‘œµ‡ËÎí^—îÜ;þñGÇŽ¾oÍk¢¡ŽÄ ]xÒÂ)wè¹+"$ȧ]~ ¸$/¿„OpÚŸ”•’›It#¡*®¦ëÙ9ÐF3 …? Û_ ÷5:ÉŸÀÝ­Å5º¼[7éÇsø­/Ó RQz¶9~k¹\³ûÁ*s˜{æ:v'4Ÿñ¯1Û†šæƒz&ßßgrµ 5Ñ‘é ‰$ÿþÏ>=>¡4óØª¸úÆ’ªŠb¾ùݾ¤:¡½æ’ËÄ•e°¢¬°ŒV¨ y=7™Ñ8ddx¨º8Oý*ª/,62ênÈ/,É«ËÛwîÀ>£ñõÿkàܹendstream endobj 359 0 obj << /Filter /FlateDecode /Length 3272 >> stream xœ½ZKsÜÆ¾ó¤xJ¥Š§x¶Ê Íûá›DZ‰dI±)Úª”œL®HXû wAÑüÉùéî`g@,Ž’ÒAX`ÞÝýõ×ßð·^‰ŽÿÒÿ§‹=~p¾÷Ûž ·é¿ÓÅÁó“½§ÇZÀ›*ð N>îÅ.âÀ‹g\”98Yì1)''¿BcëŠÆÒT\yèpr¶÷}œðŠålµžN¦øÛ o=«7íä_'¯pˆ¡\%—ÝßL¦ZëŠsÅžM¤«¸°–]eã¶«EÝ60´R ^²“f1K?œdï²–³|úuSüܤ.B³/«hÅy“áê›ådûû<í¥<»©°²’ÁL•©¼µqKoêOëæ,[Xý©ùµ‰ÓæKþ:¾æ\²÷8PÎÁQÌ&ø(÷Ý®‚‚¥°ùõº9¿ sRp4Ø[À ®ôFUìï7Ëìm¾ E½Lë1‚¿ê†ìg&Bð?Oªn»6ß®t¢ò\u¦{1q¶ ^i8É À9,dÖ?öOt˜ç`ië9{3k/VÜ´€Eá¹Xðº`Y½<Ãü@yØ=˜êòrž­ºI#:Íê)-p*-¬ÎÀù U$.ŒµÍj¹‰Þ[šKªJ¨ ºõW“©Q°ƒ`˜ZÃÜʪ*èÁ‰ÇcÓ^‚å9û]aƒ.Ë^­.²Kü a½o慗ݤy¼gM­à17ß*·Úl©ÊKö6 d`‘ž]ÇÞ\äŸäÑ^)0Ä'Ú•#—úñø5Ó·'{?ìñÊP¤¯wƒö™÷•‘m{ùÍÓ§×××ÕzõËÅÍò <ªj–WOñܧҙÊ—Ìâ=™å#"DtjWõhœ`rãô pt°b\OE³ºœ¦RÂì]0þ;¾ÓV±yM†F«Ëþ†‡«Ðû4û~"=•Bo-<  ÀMìÂ…`¯ÞtÝŽäœcà@,E—xñŒ+¹ Ïà«eÏŽ_¾y–Öøf5™âJ„-C¶p¢y“ûÈyêí5{¹$ÑR¸ˆ ¢¤tl~Eï±0c>ìäÒÞ<„«0é4¬´FZÆ]… U¹¤%mÖ Åú é!.’g?nF0Lì5¤ˆ<„?(¯€òÑ:dˆ áØ·Y8þ~ g ¤¼*­¹nãxtE~Ìz·tvvŸ”au™:ª‰@È ìO£¸)´¯´ë`ç%Ž …a–íl½¬ñÀê9¬Ó¯¿Z]­·/ŒÆåàs\J%à"–‚=†€;𲀉)ƒƒq&ΛØÊZËS†fÂŽA©€Ó¶‡R2rLo:¸}PZ^9é19Ëcò†ʇi ¼¢8;ÂGI¹›ÂLR˜yèñämÝ|ŽF‘ûSבúeÃî3èíàÖ·Ø‚&Hõe }DÆÅ›í^©a{‘{x%X8ÉMô&0iïNʘ‚B›d ãd;°Îìó6†°ƒ-›]ç½u©+˹ËM|L¥Ë¦LŽy‰­€2øÞóÁz?OÀUŒä”ŸÜ>´EKz"_&]ñÄ‚ÇyMùáC—°Ÿ½­Ž§ˆÑ­—ëÕ¯³Ó¶Z­ÏŸž­NŸ/g×›§Ùœd@˸|ò”œ‘éK “`8 ôìÕwFÜÓa¢¾KÊ–’Žù0vhÌ Ù³ç_§ˆÛâõMt¾È*ᑺzv|„ÏH7l$‰Ž‚PK¯ž|Ék@ü‚`X¨¥îë’‰Æ÷³îvê2¾ÎJâ—ÇPÒ¿Ö©‰_Vý‚M¦&°íÃy½ÙtËOhšÖuTPäzQÌt+;™L ,sqa‡×cÁ¹}IÐ[8MÛÍ•Ú"ÞÛ@aÿ0@8Ôδ_MÛ´¨¸ˆÏSíÅ,}Xˆj ³/fkbÝh@üÌÞõk§\£X?  Y¿ÍfE püÓ&:Ã(Ì`Zvæ.˜q•0[˜ £83’œ'j šØO©(HE¶$¥ð=Äí-Mª} ˜ÚàÀ(>UäܯW1ïeK´@S|eÑX˜Ff¡tŒ¹¡–ä’È ±~7Ôø³N'ìãÒyƯ[$ðçtV'dxC g§á ²K³5~S›å+ܽ䢣†?Àšp|”5È ^0ï÷ú'›B–\—ô"Qù ͈gœÂ'Åøðáá6NÑX6±ÀI°ÚDppI¤e@Ë]ȹ쫉¶‰‘í©ÊØÍ&µ¥¨Ü”ƒ§m&Tñr_[ ób) .m’ âÌy»«=õø©¯ëu Í[äkâq÷A.ï¼éŒó¦^Öç³ÅlÙ&,€}wÚÌ–=@ÌÆ=…óJø;ªáœ×–Ž0r“¹ŒbJˆ}%L§j˜bGS©*°TîC=§*&ò²wÝ쨀úÈB…vƒrqu™Ë.Å·è#b ÎNª¨ã’cà/nq2A‘¥„ê6Ûć3½f@…Ñ•ò.¢$@ãÙnå8‰òбgøèãØk‚>Q/ϯj $i%9 ‰6ÔŒcÍ•±ÌåçfÝùÀj=$64(¤Àt²”‚~ŠC«Ðdqyµ³š [YØPÜ`•ÜÈÞáF ÷w(“[}« Ö«BÛ×Ôãðp`yI›u‘òî¦ Êç][C[ÏÊù"Q”xÿ5òI§ùS3¢-T‡º(šçY,À S]m1fÓ8 ÃÓœLø2_ùó·Û©ià0Š˜r7åݨâK±y‰Oò9ªÆ½úD9[s‘éLv…©qšÿ_‘{n+4J~JeÜÓže:.rO3ŸßÀÑhNA~„†º‘ ’2Г“œÙç‘Ћ—@ª+Íûø¶4|à.«_RÖ‚¤­ õûhHi)\öPš›ý¦WÆ¢s-ºdáv$ ­¼¹2w*N𿞢¢Ò´!ŠJƒ»SnvšeâðêD§îq4í þ{¿ú¡Ã6ÝhJ7ÒD>a”Ø7@þGqÁs@,›Ë‘ú­×ˆD3Ÿá³ ,? A|…„j­ýcnC¨ dHƲ#$éJ;H!ÃÓ°Jô.!Yñä°ê]F¨œÒþ®ÔãºÌµÇ4Œ~?;5 {' À•P÷i|·4 äÄÀrß‘= ñ»/zFá¤:ê$¹äN ޲¢ÒÒ°¯C='ÿ?7 Xr ™C peüR`à»ÌÃG0-BcÁ>Û¢´F Ï"d(öšî´(Fïµb‡t¯PöN÷Z‘!†Ž‰º¡”·ž©¸Óâ óN°(•`Û\¬«c¦¦–<°DOÀt˜gâ=Öó›[B"äêp½êŽ'BèW›íÇ{TF\(lù,Î-Õä¶ïL¥{(êX‰ýëEÓ¶#|ŒÏÚN†—–¼¨sóv•u¬sÿ€ÊzW™+ ƒ¨-—{´¦¾½ÄLêú]Y…dM.{¥Þ™TL»Jå ”.>ìv¹„»JéÝQF¹#“º¦Ø+ Ò Þ’ǘGLMÎŽ?cÛh´wiËá )p¬1ìÉ & ŒöH=Ÿ €ÔnÚMbùeÞì4¯Xsþ²jƒ±¿-þa½Z¤íB .­iÓÞ RŽš?—~\Ün¬&6ÛË‹X¡Á¡­ëÓ|¤A]]#üav¾®Ë(;KºTšßgv\¯â—w†ºby?¶)ÞÇß`å•°÷ó·ÿãÍÕÃÅ%±[\ÚFYGÝ:¥Àp·ÒJ 79VDûmU^G.í5»™“ø¬,Û«÷ølèd)õ¨t3,¨àú‚Rš)Ob ‚z¶*àÿöM±!ÞrMHA¿‰åéýQ½èd †µ2ìMœó¶ÉEŽySÄøH¡ÐÂè|c=R°Þ.¯ï»±ô|÷%ë´±À-äü{œØìvâøç9sb¼¨ç¹~ ½~UÌ ³ïd’¹¦°PتÔï£ú‡ÒÍ|N–$ ]­~÷Ÿ-]žêGÔúþšas³X”4cMŒû[×gúÛºÃð—° ªÏ›ÔÛçüM·.O–Ð(|Bf*2Ç_²é’ƒÓ@R¾uõß—i–¦:ú¼hÿ)ÍFsšûÛâ"-‹¯y½Ë¿ÚÜñ×ýQÚŽP¤5;,èçºíJª]Œ¨ sjP-yiœe v1< WÑKpÖöªíoT)ã%¯Fá`¹¢û1Qì8^@NŸ}nºzŒL0ŽÞŸöŽoݨçû > stream xœZMsÜÆ½ë¤xмe¶Š c¾¾¤ø![fÌȦh»Rv¹¢î.ìR¶òoórÊHwÏ 0ƒ]TJaÁÁ`¦ûõë×=øçaYðÃÿ…ÿ¯W/ÊÃÛÿ|ÁéîaøïzuxrõâËKÅáNQ•?¼zÿÂ?Â?´Ú•Ô‡W«L”³«À`c³ÁB¥tðÀÕÍ‹_ØûYY”ZZ^V¬í׳9þvÜÇêÍvö÷«sœ¢J§¶Ú‰8ÅW³¹Rª(KÉŽgÂ%7†=$ónÛU½m`j)%ÜdWÍj~XÁÞ&#éë»&û¹ pžž9QT­82^}³ž ¿oÃ^”;äªÊoŽ¢‚;s© gŒßÐIónÙ´·]}ÿa&9lÌ(ö‰¦V`[nÂ<™æÜêB(E3•š ?Õ±_rV{㯅-Õh§›v»ÃµHvröæÈïµrŽ]´³¹p°mí`»0CU)k9# ‚Õ•fç'Ñ4–ýÊxe«_gEÜ­Éœoª¢÷Û›ûm³ª—ð,Ʊ¯›eêµE׬{£e›uð®ŠÇy ô¿dßÓ³¬ ʶ·&Tè’3DL#­°dcæ¯ëå2n»b¯’…¬“ëÛeöÔo8sU É¢•”M/o‚U%g§Ëæo•¹Ð¢°¹…46úŠ&s8Ù]¬Ë5»ˆÆ%Û:;e[k mTo•·Ûz‹>Ò°”Ò±·÷3Í)Fêë™F/²Ú! n¦»Ü±›Eêô÷P¥Xû>Î(cá/ðû[ðÖb³×[ܪÞ6rWv÷Éu×döÎq:ÿi†¾ç¥xYß[WìÄÿÍc„…¿e3&.ÐÂ\Ð]“´0ìCj„wÉzyã½SÁNN£§È=U‰î ²×Ëïg#á©®^ekÞæ;SV“†©3v‰á§+ï|¬ºý³Yóº]nç?ƒ!p9Zì¼}ó§ø~;D¼‰”±‡—ÅŒKã5ûÃ^8Jø«ªªèõogD_Bj¶†È^!·kü0‹âì¼}èðFÿ r“o-ç\Zû©R~=³`i'rœ—h‚:l庿"6ÞÏ&¨·ŒødG>ii• ) bÄ0~Äh’BXîlÜ®gÄ(«€9Wlö› Ðày&cí=˜ ÑjÖ>$ô½l1)K´üÓQ¼6ì¯Íç!a»ŒÏ+ 6MƒÙóñ—%—€[Q–·q+ØË«1Žp,¤!¸ ¨ P † r©dÛ:L x~0k[WWñ…g%;ËB7C|»J7¶i@áAHü÷StÒÖ>¤%`,Ù:ÜçH ¤ ïmˆÄÙ§}%áF’轟n14ª‰°°&‹ ]J­v¢ÂfQáoèÈÀÖŸ”N>‚€/à&dgŸƒý!£<|ËC3¾ú a´RÕúZð-ËH·ŽC€ÈôOZŸò4ÇLŠøW&ÂÄÛ¶[Õèd^ÁæØ¿ÃɹÈgoþtÚ¦f_OBmég€ÙÙeŸ?®âT•` ðA ¹;›# ‡åq‡iŽOâu*=•«…Ò…Øb‡Å‚3'XŒBßÐkBù VŒI¸ÿ]¯1SÁ»*/)ÁK®âÌV±ÛÎ#†-€„ÚõWÁYyXÓã ¡CÖçpsÎyŸ‰†añ7q´ÂÑ`fìsßy¹kh=osÌpöã´, mõH@dÕ®‚„•(]ªÇî6_ž¶Kb+a40ÞQHŒ`•“*«6¤H(¶½ð:=0æœJˆã`™áewÄ5„ËMh´ŠÐø}ßs\”•D;g5= ¥ûØlÂxnØåñpMjƒO m>„ö/&"Á˜kÈÆWä ™¼í>á(Eƒ§"Mr±Ø~~R‚ÝLèÉua]vHè'âx'Ĉy2Iù×ìo¿Åûû[ÿtÛÝE”ļ’iËÓzÔ_­G@æãL£[ šj¶¦üõ‘‘xûuÒÜ&[É:<©âäU¼ÖÄ>sëJúËcLa4§5ÓEo½ †‡È¨uãš4óZVëÔÛ‘|LM¿¡·0$çy_km¶µ,uÞ6Ì âÍÇžzǬꇸ€ÝáOÆ= RªÇqq–]±—zÄÛ›í`Ç÷I1<»«""Þ&æÞoÿ7ÊËA)¤ÝÖïhëZˆ¶fFL([6[ߟàPÎ;„³f¤èÖ~½! …k_3O꓇M³ØÖÇ/édõ,…ƒÍnb>Ñ€Ùu»¢ÆH¬¡hl Ç\ïpf*c„?¢”Kƒœ †g‚ÁG“ö+³®\´«r¶=ýz`ûÅ®¯Ïâî v4‹Gþ-1l©xÅÙFŽˆu®EEàz9Ý[ÈX+Ó«AŸâ  Nü64!€ G±Cñ¹Üž6áÊZµýJ5»È$ý¶I9pWàW ¾ü8Kä íP¹Q1ˆ™.gËM¨í*ûA©J%“¬í"ÝVX*Œåõã WO+ÜPÚ}–• “Ê'îX;UÛ¦FWò3ñMY˜Jäíi×n¶^–ªÎÏÏðÚ5Ê„¯º4©Œ—¾ÚpÖQ¹jr…žê*†D‡3Z4«ÇYß*óN 7Â×x UàBßù±Øl: ú€À¶]» ³ÁåÚ¸ý°W ¦‰Ä~FÛð0Ð^î­F;Hrʓ݇‰oè,ö wÑõ—±¸Êj@Ð}+9B©ËeøfÑ£¯€RPê-ùÃCÝî— …v­gò}À¬@žY#žLñ&ý•É_g‘NÕ„«d©"HÔ`R@zÖxÍ* D‰P9•ÂfTI:ðÌ×kP²Å{|(’3fÚ©’T‰âV±ŸgœS§‘ÒwéÅŠó„kÀ¢$V¾ - ¬‰r…Þd²ßw¨ñA—@ ±Okƒr}>¬4OÒ>ÓZü° ¦1Üiõ•ŒMZXÖƒï„HìÛЗF“·y¶€*ëã,.sáÌ4½[ÙÏ8‘ØÑ0¡Eö[³ý`ÕP»ù–ý˜FXã+vê—¼ÝÆP›"¨Êº¿Deá„¶ce±Øv¿Ã „ßþæ›âäÏUMÆÞé¾<à¥å1œ+œ0#]~öÈòî]Ä åoãÓÔQ¸¦ÞQ¨@G]»áAß´0“ƒîÜßF€Qƒ`º~M|T¿9G âXåûŸIaž’û´iúA‚½ÃÊÎU½Ê Ó¡Pœ<&Ÿq¢øÜ©´øÃ39°Ì›ßsá} `ËÑUжr¬É?Ž©ÉYËì6¨©qEu(뫬²î22Ûlz‚4ýÚü@\ÛðëÑbKóSZäU$Dxm}®‘>_%ãßâmbB_¶ +¨Eð2ú{è€R|M‘ĺ/Ó¶ ê*á[lWñìÙ!G¦+¾ÂâŸiŒ›Á´#£ÇtJ© Šþ÷὚ïIÐÓP»uÛ§TÁ¹Þ©}„AQá_bxŠŽª:’‡P=®ëA<"ÞÖñТ噓©ç F~; ‚™ ÊÈ+›ÏÀ”rœptN×TøŽ05\ Ã/²¾õ_F„áÕÿJÿ€9‡Þ¡·~.ÀN‚Uç,•,(`1Kb{ãY㽄¥ùA~^ù²œ:>ÁëB_á`Â&ͬˆ¬im:ðÓ¤–??†Z–?%2ËxzÍ.êu}»X-Ö[VUH=V‘o¯›Åzȉ‹½Ð)Ì…}~^òg4áÁ;å !X8–Ö\(hà­NW#1øÍW(þ…\á*ÙO3Ú¤?0ì:_!+$7¨=?›jâË šå™GCa8ÜÿvÏ ïy_M‡gú_$ƒr¤ÅÜR»ÑçsÅÇ@³ŽGº?Ϭõ î¶»£Ï0 ÷CKÿžuÀ¢ç-ÅÎæ•ÿî§bŽ-/‘§.©Ò#fza3ª7IéãN«p)ò® ¹Çm™iÛÕj?Bû=Î^ççïÍüÎìiõ%©Æ³f'î;élþ›ÌçóEQ*ÄH}3¹†Ü±}ŸrÇ.þó?ò÷ÿ½l¼t¤G”—U’N²ðÀ/”VÁ*¦TBù•ûZCéÇbé!ùSÚÀz˜â„Ͷîšz¯$sÓÔ¥}áEgåÚ\Ô]Ý÷Ä’Ö;ËhÿÉÓqÎ3¦+ý1ÚË]DE¬ËL—¢°¶–«t;—Çoö‚øYóá#ž©†ÈÞ÷ñBÉ᫨4ë½:î;KÙ2ÞŽge&_. Do …=Ÿ„ˆ¶ÝÃïp–ŸòѨüSíò²ö5ñ#b|²G9X¨;¢Æ]Ò¥L ¹œ´<_áñRte%ÂÌnR‘¥ÝsÈ Á“ã ÄÛšYÑ3À¼«T9ˆÿŸ°$ J9AXû~ø°Ç¯9@æ~“õô&ËÏJÖ×a{- ÷ÒR`²zN‡;<Ê¿”§²O©ÇjÅØ»<å]ó\žê‰*6­”>7à™ ºØ“ƒ² û8À$X^~‡×jhøvü±îðÑÎîÁÕ>ºà²,Ôp°šxøñáÎW¸YD‡å@À|ßµS-Òô!q‘ô6îÞùÚ8Ý|»lo?ùBÄ»s3¡I„×±O~fÛ…\6œ6àÑ™ÿxŽ“Ãgz8qàÕg’–²4Ÿ_H>òmMÚàUYƒWKy ¡€ˆßm²’yZ|:g§—ëÀÆyéxÞ'KÃ.AµÖ“uh|^*êk±ûã3'BÊTìåå4‡¤ÑÕ…Î(…Å",Ád¸ÁÏ…ƒ¢§÷‰M{€Ú¢ù}t2þ¤³¦mZœtUÝæ?7ýŠÄø[á‹v;ª>$²cßæ†þ8Dªçݬú¶ÿ0ê‚nOõ$]Ì~SŸ™‘àïÏNߌ½«÷a º-Ÿ®@‡¬™îÁ¦PåT?¨ÂIÄ««?À¿ÿ;Ò‘endstream endobj 361 0 obj << /Filter /FlateDecode /Length 419 >> stream xœ]“Án¤0Dï|0Lw"E¾$—6ZmöÀ˜ˆCÄÌöïSUdrØCYzØîªn™ÓóëËëº\ëÓïýœß˵ž—uÚËå|Ûs©Çò±¬UÓÖÓ’¯ß¤5[uzþ5lÿm¥Æ2ü6|–ÓŸØš>5Ç¥|žÊerÙ‡õ£TO!¤§yNUY§ÿ¶â÷q¾Ó¡nl0§CÝØ§t¨;`Ó& ø@´$…0öD”iT*ëpIRè&×w‰R!´™O*à2žvg"ÊPØ-ÀØ$)„ÈÌU) 3Ç.I@ƈ1IÀH„I”Q¤QDըʑ•{ Ž +&½Œzv¨¶iXÙàiò5ú> stream xœ•XgxTeÚ>CÈx€ Ì žT ˆâ"M@)ÒB¯BꤗI&“:½=Ó{’I™ôLH!¡$€ ªJ±.‹tÕU?÷ørù}ï€îê®îõí•+ócæÌyŸó<÷}?÷=jð ŠÃáŒ]¹jU´(q¹(:E3cz帄œ”è¬àGOãÆ <’ŒM?†ÈC!,Ÿ?®iT`Ô#è‘Èú05ŒÃÙœZ U‚Å[œž!É&$ŠÂŸŸ>}fDy¾G¾hZøëÑ1ÉéâìdaxtZløëÓVM _.&o Ã'¥§…ï‰KŒN‰Oß·%|ãú×Ö­_ºnÍÆÈõ“§ý~‘¿}7:%#1zOœ(:.#[˜’ž–š‘(Ì&¤’ËÈ)Ù1Y EQS¦-ZœñjfV¶(W¼g¥$&?vu\|dÂÚĤäÔ—ÇÌ?gÞ°°'†3ÚMQOPk¨'©§¨µÔDjµžÚ@m¤6S[¨ÅTµz•ÚN½F=G-¡–R3¨eÔóÔrjµ’ZM¤<ÔãK §æè9À1pŒÇ̱p¬Ô³¤ùÔ`ÊÇÙ1hè KȦ»ƒ•¡†:¸C¸š‡¶<ô9]fߨÍc{ùü·ÑÞrð,sâªx.¯[“–Y=ª«s> @£TÉðßî}ÀY[´K[¢—äÑ*‹ÚÞà7A#³Ú´-@[=Pé’”¬È˜m‡:p[]öR›Ûo!šFf®1·ºY+.’XZ °ÿ  ö3up^[Gnà†*oDj–pÑVôQèˆÿ´e.%õ"|ôcÙ…À[èIžSZh ÕŒxiÒúm@ç+5uŽZ‡­ì=€Aýæî®Å3¶­å3êS{üQy±2I°(4´Ì à4|VÆ{¥êH;ÐU®â¤\­´x3»h U"P $¥ò2w…½ÊÆŒîÃÖ:ß§|«Ïê³ÔxÞà—ù{?ûènG|ÒªmøÑ‚ݬ\¤/)½£-ñðÝô´!¥ÿ]J]ò¡N?g`JáÁÕµ§§¸¾¯üŸëð}}ùÆàO¸yÁ^8 ªAçe+t=ƒF¯Õ–àq÷Zù²Xb.yÄWU°h€‹žGCÑЯØxt‹7óÅ©xƒŸæžCG›ÑtˆÇ½]¹‹]ßáMz~ ÊàñÜ"ÐÊ 3h `-o£>ZËý²rán–T{¾›ÊñDRCJž³@¥Ö)tŒôññ T^øâ§_tô{EÞ XJÎ?™Q–Ÿ•Eηª Æ%­Ë„í L›œ¹%sSb"‘þÍ}6¿¹”=€æZ|ΰ *ÈJ$ ±’Ñ*wàõÊLeªNB5A-Ú”š ô¸îm±tF#YW³µªèmSãS~ÃNÐŽr}r–4]™OÊZqôô7ßñ†Ë]LýŠ·UÍð:_ßßz³ƒ#AkòàPüÎá~$%B6“Ùg›Þžµ'-%•¹ŽN„¸ŠE‹ºLF¡—êÉDéËŽ•2¥\¨Îefã÷J¶j Q0µwÕu"ù#Ñdôs_‘’–E%§&5\¯„Êòó¬ïs«Œ—àèÅî²z ÛRWüC5™~¤±xf­M.׫ ”Œð¹]˜‹ñ{JòX ƒïh7¹îÝl‡J·Ô,ž¤ÅS¨²ä¿ZZn-Àê}¥ðÍ.rvé‹eŽö¸}€ žºàê äá™KÁ6:8EYªPÎèÔŠD<‡¿/Óî~m9¶HuôxϵÞfË÷“ûUÝß¿NïnWsë/àËTæ±1$ë`ýüùÔ3­ÞÆF¦xsɶ˜M™Â¸´((‚¦üÎrð@©Ûép3Cû3Ì2QRÁ¶­‡âÏ}÷Ýg(´1È”Žd´¨õv—³ÿzH`ÑßyV•ìJBÅÌ„9PôÊŒ£ýû«þÚzŽõ÷Î T**ô©nW,À Ëy³A*Èbò“_Ö TR}ž’ØÛ”N¹AÊ®5*|P6ƒÅäBü@߬E†Œ÷nñmù&\&ÂÈðÀ ¾³«Âÿ¾‰¶¸ •Áíy¹:Ç šããô\EÞ½Éyò[ÜÙY¶—Û×oT_n¶œRúPÜþ%8?;/žxfîWð·™xOâ`³‡Û‰¦~””à iÂà‰w]AÖVÞdøÑÄO*|œÞÏÑ[Ÿ‡Ž<Ì뉪Ù#\)™”ʨrµ²l<Š^ò‹é©d/q_¹ÇJ¦äOß  dA^EÉÑetÄ—™–·óPöþ“]göU0-çkÐpƒ­]æßúe½å³âºNëu®ÓæêÒ+V·£Ò[V fÓJÉ., iQEIe}“o3b` ñÞûêÑy¢yê@ïÝå×ñX¿ûßX07˜X4ÚtA¶ ]”C^|:TXƒ•=‡L¡h>=ØïÑ`vZ-bÅ/Æ&lݹF±‹ q÷-T÷³-ÿ’ûQoÆ’`?èþó-_à çÚÕto…" ä®`w7Ù– FÒ‘XCîÃ%°ÇOM:yõÌáÃ.'kÐ'a¾F.œ4ydHw4;\õà¥Ë¥•9É’˜=â/Ñ#Woö—1M1¼íx½*S”•“'È•¤HÒ w;6´áÛpô`× /•¾¬V:h‚tc"vñ¯ïÔ¡‘¾@Si|!±)M…N­Ò1Ù¯®ÍJ$®¬°Av=w=ƒ–ÚH£' OÆ)8‹q~ ~=U×jvu2šUdÃÓÉ0ï#4ìZábLD<³ã¹ê6ÙµE@oÚ¶u³ØJ|Ù<¿ð&| o×£'n êÚ·w{®ý͵yx‹õ÷ð P¯—i2¥©JÂí¢d8ص¯ëÀÊìÕAzeÒ?ݺŸþ&7nJ~uïscùÁn4 NÓc¼²r{JT$0a ”WYäÍ¥ŠRÅ¥5 õ L4¾È#6¯”„¦¾è¶­×ý¼øC¥©;Ûsö$Ð¥šè9(âÞ¼Û×­ Aâh¶>WÝÇJVZnBLWΑËwz¿® Z1º,¨'ýŸºtïà›§ïCد\¼H´… ;¯ØÕî5טËY³—ÐÐ W6Írù+ZZjjôö< V²óz·ŽX •Qz£n»V %÷í6áig³hïÎô(ŜŌ0.6&®ðûÀËüÿ¢éž2‡ÛÊXÝh•µ®Uûa,%h´2]‰.Ç#»bŽ77”wø˜ºC®¾ çøqП902ˆŠOþ3**@!Ðw¯lÙµ)žÑ{´­éµYÕÉ–,˜ÛVä‹Ô}ÑÍ;É'‰Å‘¿—OÊËþS>ù¿/d)Ó©l:¦eñz•s™ÀRãÜG"Gа©3õB³¯Éœ¨WoŸþ2¶fï=Ñdk´5Þ7èÕäJ‹àŸá?u4‚BW‰·Ú]a22f‡çø>4­%£[S—¡³»¶Ñ±³#ýÐo×÷š1íèBü8æMê´þßž¿ZsöãÙ´7¥è^ÿÉr/íäÍœ?_J¶ÕšÔî GëÕrŒ=|íD% P_;/Öóƒêüb5…æ¢>ô“ÏSƒ,5£ÂsŧƎ¨B«yŸ)h†ºdb‰·¨,ßå´’?{Wsó~W7=úG“Çèv¿´IâËh‰®Ø8 ¦ãÇ…x<žvóf ®Ì&ƒù-ÔÀ÷®í‰y úàlåñ}GÚ;Ž•öÒË-¼’¨œ±‰qI;òˆ–-ÞsòJ¹±œ¸Ñ~¤nA£n"‚Vh,*Ïtšò! tùê|yœ$.%M¡”+åj±]dɆˆe†^½ÚêêÌŽ:Æ#sÛ èÑ/8sÍ¢¬q®NU¦Ñ«P"¯P]¨-"2 f“c/Ô’ÌáÖ¹´ÁDte-•ÓƒÖúJ}h|'0üžUmV©µ­Ž)eÇ“'¥WÏ*»ÒÚØ{¶™Ý[ÔD¾Mןm¢).7Q—ÏŠÜù•Ð@ŸýªýÓòvHïb\ £Î´§ÔSZ¹zßRü¦–-ØÝžeÖ²‘IÉë!Š~½oõˆêv3Íâ*uú8R_ äYjìÕ/ÔY_¡&õ =B|AnàëôS]H +0ûþO…ZÉÌæ©1ÏÄ<­÷ëoÄþ5ûlÌÑ%Ĺ<òžš"s€“5[:ÑCˆcp=ë‹ç®Vç+Ål΢TLC&-µ(í»oŸ‹©ùØÙgö=FÙÓòõ4¹ë=›Ûí#Y†°þ:…Wæ¢dq`K.ªsŽ^ 0C—QÞÛô9f§˜o*%»ÛY"Ókó5 Ñ\CÐTÔ…DÌdü8Çq½ªÞïºÒp´ãaðQëŠuÛ[²'±òdž4*wVÜâÄ¥qÛv”Ûtç€õtýWÁêýT‰oàEçÎE”R þÌ3iÁßxrfÅÊøBâ+w¿i5ö˜,¬Ñƪª> æx çh/œüÓB<[$c¶cN xØüE|V« ­¶*¬“‰„r»­ªÒnwÎmZx º ­¦º¬Ôy¸œ´3pô{ž‹oßjtDY}hHÉê® [!¡€‘C—ËåA©>®ohÿ0fèà v… °a>ƒÑdrËK e†°°:C+‘]+Y¼VcØpŠú?uÚJlendstream endobj 363 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1705 >> stream xœ]’{PSwÇo¹¹blIn/`´7±ÒÖ¶„]+R*,®Ï-­òTZD@ Õ&ùjF1‹43[g\IûÂÀb(äºfšåoÜ¥¡ánŵ“ç;ígú.@# ÌÊ=ÅG’AOì7ç¶ï¨îýÛžÓÞ¬Œ]ź±il ’l¥É8½ †k¾<õÅçýð¡(¥`Äè’D+sb Rzò-šâ+ÐK<½~ÓÞy@ùÝ”{`×âbk™;SSä<£âŒ¥35ón ü3 ­4£,òhI•¡žHÓÙ$A^fªž¨«¬.¿&¯wgiyžÅYLÖÇ_ZÑCC0PCÛ„€Ü[ÓéKèuåVU eÐL,™wâXŒ¥ÛÑ {úØn»+éÀÁð§àBr^Cn}IéÚQ‚Üu®ýŠý–ôá룞«Ö)k“[Utƒöhñ×û²•uK •²ˆÿâ¡çÝ}J¿ƒþMÁÎ=Q;T¬ø†ySÕÙqBzaâÔ7·.Fl¢ÍÏ$Ô¿×Ó¤»OÀù©çÆ'ºeh…oýÇéðÁòEs]³Sf尶Π¬*8&ƒœ½) Õ²ÂIwÖiÕ›¬+KÍ*à›/δÐ:!k`‰ØÚHcW:MþøÕQ£õ˜´SÓ™£Ò'l97^D.h%J¡ß˜¦ü÷zøD«šz²h’Ðô€íŒ”tqèoïk=ÙÝ8—¡c›ñpSTé» ã€ñý1´lù òÆç#ùóˆ¡Ì™Oÿg žŸ M#˜dóbÜÔf«t4ëjb<_ÇÕZjKpS Wû]ÊW‹¾DßéxŒtpAÌH™³ €f‡ ¯ƒf²¸ÈÆÚ„Yœ@3ÔÉáe6CslhHX×¼ ¨mb6šúXenuš\J;9†©EK@´ÔRºpÊËKË*ÇD"‹ÑÔnü´ÂXV^þ©h†ý•ÀDÖendstream endobj 364 0 obj << /Filter /FlateDecode /Length 205 >> stream xœ]±ƒ †wž‚7£†Å.mš¶/€p }û§vèðÿáîrÿýp¼[yñHA¿`åÖy“` [ÒÀG˜œg內׃Èõ¬"+ú›ŠïOŽÀî|W3ϪܯʽHKT’ò°NÙY+xó÷Ôî£=~V•$ κ5aMØJb›QI¢Êh$ ÑdIBĦ”$!бkCÑó`çyÆœö Çõ–ø•VB‘sRçá·µb®â(öÉjendstream endobj 365 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1369 >> stream xœ­”kLSwÆÏ¡PXÑmé&ÓSu:™Áé.™’øa¢l¢À¤ÊÕ*ʭЖÒÒôB[Þ¶´”Ò‹Pn´Au¨Œéf2çLܧŸ¶Ì¸,óË\ü‹; Ùfâ—,'9þÿ“<Ïû{ÎóâXl †ãø‹{32²ŠÝ2AµèÈÖälayCµ@2³>º®Š‰¾ÂÒЇîSséqÀa'vfûèóHõªX†Š–c,çW+SÅuJ‰¨¼BÆ{sóæ·’“™÷6Þa%oÇ&^ºàH•¸QZ%â jKyé›26ñ2ÅÌ¡ˆ·A\Ë;,¬T—ñÄe¼\áÞþœ]Ù9¼÷³³ö˜“´é©Þž8Ä0lqíÎzYiEeM †eaë°ll?¶ ÛíÁ2±eX<3%‹]À3ð»1Õ¬õ¬[±UqʸnNxcÃZBQ*„G_¸ê¼ÆŠFÑZ®Çà0ê¬Æ )Ý&(¡2xÜýž¨ÀøZŸßñÇRSøùÂZÒ8[2zÊA"SUK [ø„¾ÃìöÙÚ‡:HïtÏç'rI”­ C%%O)£_7«M0'Ê‚_OÀëí .wÄÙ7ðÓ GÀ3îø?Û@8<}fê c}òÂ\ÍVäSÚJ0–È>#ž¼3x>2á!«<‚A8úGŸ³ÜbÍýÝÎEÉë~§“Hú7¶R ºÃà2·µY´ýòƒˆ^h5¼‰ séƒn ýÊFå¨ #)µeŒ •úÝ·éØ’œwöÐñ@ÐIl½ÝÐn»ÛîBdôôŠÎˆÍ1ZÊF‹o|@'P ñÅF`N~ÏŠ¾A§rýЀ«ÀÕ ÑÊhÄÖns3Ñevëu‡KÅ*²¹¿æT#³„~‰¦è k¯d]?;1<4B•ÓëÕû mo[¢Ž™Ãä4x|v›šaß„‹ˆö©­è<׬K“‚ŸYbQ$;yþ̱{ýSÔð¥é“§¸Ò½Ýh¶ÛZç]êâ^Fy8ú™ÉºKo75·™ÕF²±ààÎL ä:8èèrø¨oÐdÜvÐ A¿š¨Íì1´†Ñt$ö©ŠþBÜÛ¸ú$cüo™‹åÕAiM½¸^P Ž …†H}™ëðÚÝ fK>Þ·:ƒæiç›Ües:È‘«ç΢ϫ©ÔX˜‡J~¯ÎÌOg¨«@Ýnµ´ƒNbPé“×ÖI*…'$.¡˜qÄ™¯éèµÇkf.ežà}êI‚[Â%˜–]žQLnîéáCdªrS_„ ¡$ò&³rü+Çáq Ú=vgøòT82v‡€8ÑÛP$5Éš‹©š£Îjm2Ö@ë[Z‹TK½ÅŠ'3yíU  ¿X,T2ˈû몤õµ¢PñÑÁ¡a2AÞMõtu¡º>v(þÚ2>6WÌY œ%Cv»Íîµõ:]>;‡sÜé°ÙmN‡ßæä,Ű?â72(endstream endobj 366 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2554 >> stream xœe• P“WÇ¿’ïC…ø ý’¶®¢òP«¶Ög‘ú( X”·<@yÅ@( #"(HŒÒ4PÔŠŠ¨hÕ©uûÐu¥ÕVñnížàeÚýÐigwvÎ̹çΜsîïœû¿"ÊÖ†‰DNËW¦$E&{{yÆÄe¨"S‡¼­®"ë8ëx1~ f`›ìÅ`oÛ5n˜Å 1m$†Œ¢$"ÑšøMéšE)5© qñé //_a­ˆÒ(z*–FF+SÔiÊEdòÅRÏžŠRÔ‚3Aá–’¬ˆŠ‰TÅ*RbÁ1k«ƒ> R, X½2h²çÿUö—ce|Bp|Lz$EQcRâT^Þ>¾ÓgÌ|gÖì°¹n“C¦†26µ’ò§VPŽ”%£FS,5†r¦\¨±ÔÔŠ£ì¨aÔ{”=õ¾¨X”³€‚²¥bEkD6#lÖØ|-Ži[(q’h¤"i­ô$mGÇÑgé»ÌLfc—ÍV½Îdõ2ˆ{ñó^±U‰‰,ŽžôA§q$ì¯SÑG<{‚2nV q°:#<#,Þ…ù¥Þmó¾˜ó­útÁ ›®î¼ÿ`fU±0–è–iÝ“Â`¬4iéz ÎÃ%¸RÑU‡Ã¿ße‚f¸¢©s« ‚y°|¨M`¶¶š/©E¸ñ;±õ1f%í­ßÝ*·ž£÷AI:·b™/(QÑõBœSû™Á½tœ>×›Sª¤‡ø;`ìÒ¿Štýñuô{ñÌìÔùâÉ ¸ç,{i] ÙTéö¤üð­Úü¼!â/ýí_è…\OôE©÷Mâ@$ ¦ùu_{¨ªm¿f|^ë9ãå®ÖsÀÜûbÎl¹ì2¼»î½à$˜$ºètPiŒ•–±k¼† N0â°gM"ôìîÜ'¶†â<n-¿ñvíç¾Ýòíž:q^À¼DÃæÏÌõ†Ï:?-g>q¥º˜¶3Þ¹E«6…Ë“Bô©ÅyÅiEùWœ_ []9ÔrÒ[­K&“q~ÉáókÏÇËm7à0Ó²±>9M™îuÿC¡Ã½{ϹWTñ ³5CÀzúÚÐûšÍš'YCˆeÙÛô›Ç‘žÁ‰ ë¦ÓqEº\¦JÚÆŸ‚vÁNC›ž1gÒ Šv·ÈïЋÍ,ÑKQoµ{5'hódj/®3 sïOR3 ¼Ú©óiæ/3Ÿ"û‹³lô(y(‹Ãg÷*$";>ŽÃ5´Œi(¨oÎorÅ‘ßÜ«.ÓïÌ—Ëì(=l׫·­Wû ÏÃ†Š†ÐòOòøBaØm+äeËÉ Zy{ÊKø²RnwMk×?á˜B+45±%`ðHÒ"Ó6Än ……x:ë<#›?¿˜ç‹w»6ךöíË2%æÄç‡Êe .3®NA[dž?DåÕ]¬o{ïYLNíwqÁÝ”»Î²A¡k,Ê|ñ’ص!jne8ªåä¾ö‚§†0Õ>­¡æó]Gz¿œEFWÂ’M¤™Å÷å²3(~ø]äÞ;XuYqI¡ÛÓa9$õìÕ‡›ð)s£©ÿnÍ1Xaæ>…µ°~ht‹3­·¯7 ÝZ, ŽfÀ…%…RâEÕ…uLª4‘DILR¾f×WÕUåe7 Œ–[âãM\‰þ6v+„;G+¹“4œáÏTt•÷Ü®貫 J yuã‘&Q{gC'ê;‡&ô=ZtGcÚ77dW.³ü½õÄ)¸Âôùœõš4g§¦$¬ýnwnkª)kUòŒ pcÞ|îÿ%{ÿÍ-Æç,n ˆ'£nº„~^xj™ —Û¯}{iÍ"®aeÌ_3{Æ¢‹½çš.ýtœÃ‘s7O‚ƾª£¥ïGãW&èCoÓá!þ wq”À¿ÿ´PÔ- „þÚ'x½&,‚‹ K„ù qüy2Ú^hk?gä”4Ùòv説Õ%7rŸÕšÓöfd'mÛàßõ²‚JC¥|Z;+aŠ/'èn6„‚ê+M+tÂjoh=Ðy¸úƒæ„Ú0FÆPUÊÒå¡o¥YfâbµÈ:Ñz€%î8“¸“™Õ9(G å„’8ü!¶ë¡p¬=-¢îñ@ ö³õ›!‡|$ÍÙ juì•[H÷ÖA½œ|l+œi¹ÁRíë³=rë#éž¡3A±ó¦ˆ&ó¥V-zzNŒ?’|x(ɪ%Žè¤ÄU€€ ãB‰ŽÕC]ä™òB¾`«÷"[É­%6¹D ïÀÜÝdÄ"é!#o/­¦¬”/ÿßè˜-„oGž…:Œ’9ãòV` ôPT§‘?q;K|Ä™+|H£ˆÓ–\AãôLAiaYåݯQvž;‰6U(†™×Ÿ á-/Ý-"fcáYË”—î-düïZú5¦™xÅ‚WšË(þ5(Ò„ÍÒ=P§VÃfa×Lšéœ?á °“æÀæúz¨ÛÃa36ÑÂú¯ŒÃ…Œ¾BÆ—Z2>úñïîS¢…Œ6×f ÿÊÔVLt2‹ºûqz/Îéxà~%Ò{ĽŠé¦ž‡\³Y§ŸQ‚yBϨšé‡°%^D­b¸Áûàd-D'@Ž;—¨’¶ð}Ð&Ø}h2‘v‡êCòn§ TîÃ3†<'ËXòVæ9\„S•‚ÖL©g°DpB€VA¦j«„F.»qÌc^#Ê0b÷Ømu?>kÂÓÙÇC úXŒ S…á[J–2D‘Hô$^è‡Kq*úb7e © oÎòp#ãoãN¬DùGÏÐÕ‹”q~,NHM =SÉÓAìI 2 ÖEµ˜¼»Ì 5»:œfnoöà ;Jv””ì(/)á/ØÛ›Úø²’2žçw–•Ú ¨ÿð8øendstream endobj 367 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 346 >> stream xœcd`ab`ddôñ NÌ+64ÐõOÊÉ,,M ªÿfü!ÃôC–¹»ûgÒO/ÖÆnæn–Cß }üÄÿÝ_€™‘1<=ß9¿ ²(3=£DÁÈÀÀXWHZ*$U*8é)x%&gç—gg*$æ¥(xéùê)øå—34òó’R3sÒòÓBR#Bƒ]ƒ‚܃üC‚5õÐÝã3000103012²¤_Ã÷Ÿ©õ*²ïO|Ÿq˜ñõÃï¥w¾‹ÜaþñWì·è÷[ß™VÞ½4çŽä‚w†~+wsüneû­ñ׋õ!Û¹ïY¿±}çý}‘õñ ¢5.v¿¹€*TØŸÁú]…í;G÷€¾Ò…?æ|_8{!Û ®'Ür\,!!ñ<œÝ<܇z€ ·ˆ¦öñðl^ÙÛ××Û„}?æòð20l‰ˆendstream endobj 368 0 obj << /Filter /FlateDecode /Length 6144 >> stream xœÝ]Y“$·qfXocÿ;ýÆjk§Œû C+eQ¦$Ëš=ÌÚáÞån‹3ÓËî&—üÕþ ÎLU ÕÇLSr8öa{ªQ8‰Ì/d»`=_0üÿópÅ﮾½âôtÿ{ó°øåÍÕ?ÿ‡ñð¤÷ÌóÅÍ×Wá¾Â÷B‰…Õ¶÷R/n®n»ßl¾[²ži®¬ðÝö‘þÐ^9×­î—×RŠÞ;Ùm¾ÆÏ²÷Šw¤6Òræ»ýj¿Þí×o–×øÌqg²÷|ѺÈÞ[ ×3Æe÷q)l·Ú¾ÍzøÏ›ßÀ"Ï!¼è­v°›»«N.oþ|u­ ÉµÔ½3ŸÞvû÷4„rƆ¬”YÝíÖ²ÁïÓWÊvq\/aìî‡ð…âÀ”6ø—Î ­ð£àÌu0ßžIXÂ*´Á…î6áUïàÕ80Úú1ûã]™‰îU·ÙÒ_•¾È6 þjÚ>ä„êŠp@T.tw—=~-‡™ù®Ø¦¢£ûa)&öòÅïÂx6ß½o튷ë×8Tï…·U»¸jnº_oî÷Ÿîâ`ö¢ OxËcˆ)±Ž.û¼Z6²—½V^,n¾ººù§ÛnSÂ2­y°´L„ \@›­:"Y$Pq…dYδrázoŒBÆÔ²‡ÄâZôŠ;«‰?ï?kÖ½áÖÛEÖð¶ ”Æy0¦S:Ìp ´ä¾äà]\“Ó%‘ÞÆ1T1ÄêáCèG»Šm{¸_+£»p(Œ"4‰ ŸÀ“ýòZ3ª»iMÓòn•·Ï?¯÷ëïé 2k†c]ÿ ¸ ÇÓ@Ë}ÁLÛݧi+ùÉÜh e·ÞÑÖ_kßsÎ`³‰¯œ {ø.LÈûìTYe$R‹!pØkš—‚ðc¤ æ\±]ö1¶âšÎãKâzÍ \Lbî.Ž¡ÝtDZ=ÊŒ0;eºïr)¸_B±âDeÜ:3ö¿$3²GAË@fþâd¾/HŽdŽ«¶M¢R?Qu‹¨®%ä"ŒDµDÔß¾ZöIÅé #ê…ǦV/ãFo'Ã7÷”–§ É M6ÛmyÎÓÞÞL¶N›áÝ~“&mH•eoô+©°yxˆ5ÉÂsSðÀ¼¬‰êCÑšü¹Õ‹è¥æ^¢;pÖÎ,²9ÓUΓò¸Y:Ò˜¼K[¢s}_(âývEâ™ÆXä@㸜wé¹R'Yæ}m  ž} 6¸Ôy»Pf)1>¶ShIÓ]„+¸˜ˆ QÛï»'aȶ8†µ Š›ƒô²FW{b¹æ'2Š*“ le‚öq•Îûö»âÈ® ø5+W( (vºeF—*áúÍ÷ÓwÀrGl&œ#š? )[ôôm’ñCvP=r‘É¿*è˹DÁŸµt¸s6ž¬ÿn>K6жB”"(žö¼å/š‚@¥æ&û·ÍÎÐô<‰ŽWÝ D üIâ¬5!K-ïIå-?iÏŸ>&)¿1w eh0_333à•@µ.©ÝOŽKÔ‡Ò¶…³ñnƒ/K‚+?9Е'Î(iÐÈx+ë©ÌRáÀ."Jˆ»¸œÙG×+72Äù|x õ,ùÌÔ¹¶~¦ñ~~œ¯Û“7¡ìi“tê“çNÐH Ђq¦§™EV~Ø„ß;€¥ðJ@[ ¹§{I>´ý×íf™$ìÇ(Æ‚M2#m³…ÆŸL¨©´1Vœ*mt"êßµÈn!w¡d•>tö³YÒr´È›ORekį§Š'gÎOOö|†… Y¦§‰$ãOIîr[MÌXlõr^y{Œ·á¬\N‚Mˆ^m§–K]Ѽ㹑6žpÛ mã wÑÂýãœÉ²Ú@m5‡/4¬™ùE‰Gû4¨‘Zž5^mgz÷÷¢X£^À<ÖH-Ÿs˜Õ8ö"ª±œ|®Ϙüq‰tòRåÜŸ&‰ÜéØè4¶›,Ü]â@Ì£H³’0m¸>áÈJyºg˜i@=}Ö š÷ÖåöÄ]n€"n ]MçÃ|Gý²ø`²¶ ’½³nØlrËztߨiÇ+J_¹/(…/ ƒØÌ«è²Æ®´¬â4FJѹõžb«ÎzŠœúà{U"«þ]ÿby­˜§ÆÄ?E“ÝãƒÍ]qrfœß”£*{߯‰û|âªÃ@%yº ëbÈD'§ƒ×ÞõÉk¶­œcâb–…Д× ]?Æž$†:¤l¤’l[ôÎÍx«ÀXåÇd¡Ø©,)âZ¡6"“–÷ž‹¨Jô!ó~\Xõ2âÅTÌw˜0–– ÆŠñv[¥”ÜÇ<¯ÃFiA6íÓKÄ‘á 9¹ö½;]6£¬9÷P ƒÃ!6¼}RÄ#‰F. Pvï’ߎßÇ¢ÑÌB½ƒu¨§@¨£kæÑ1¬y0;ù#2"­ÞÇ|†éZ›”1¤$ï6³©[c’Ü‹6Xâ (­UÇÑ’?ÅÎÁP#^àJg_ Ô8AKv4üŸ"*޼ÎgfÐ'ÇËh è—чâ Ö85væ&!‹&g:× àCqVø(Œ €1öSæ¬S¥P¥t±]\3«¹L›2€‹ˆœ—É%Y›öÓhV»Xg‚|¼¨rœw‡8ÂÌËÁˆ¯Á,òƒ@³ìIîqñk8˜qÈézÉÐ^ņ3cpY\çM8’E62låÏöj„oŸ4‡„Up58pÚÞYÀ-RŠaóÂ&ƒ:6G7D.”È,ÈãZÊ«×Üö„Þ¥9ynÉRV‚NN2”«éÃáözƒ˜0¡0KbˆÇb’3²¨™dDÛê£YƒC°,;€>°i8•/n®þp²·õb;Ÿµ]M8fm+L|¹¥¬…;ÌÚî„ØíùýáA·yèt Ã&SLX—9w(sK/–qÙÇ9¬\üq’R¼ò€2´Aºc¾ÅwS90¦±:Nªwn¥çz* ñ¹µeºõ:$i§¤Ž‰#‡ñl ¯cš/°j·›%hT O× OòRC³ÖñÉà ©á‹æ™†m!­_!]!t¿ÖÙek0t–4`DIÆ@]AÔgÅÌx[“‚ºìâsŒÓŠZc€qXà3³Ž¬MxÐìÚŸºÙѱ7LêE0;0•¼Ú€x6ª|­”ÇÕ@ŸÄê*^:ˆ–KÊN Òi’ñ@§FvyÆïj»ªDZ•YÐ>`ÎN±¾HxJ1×–æ!×6»ƒŠ5vÀxÃ$¹ºþY›Û¥Áñ®³–·uºôQv8zé--‡)œ¥!·G)"ïËb¬(õ%Ü™ÁÚ©i_Úo!׃w×)EKKË™zU•½Û¯VûÂÄrcJ~ÚnRbvÃ"À¾µÇlÑRõÝn¿<Í$a%«œ›USæZÆ8òµ:˜D³/2{¶‘xs­4£Œð‹¨PŒ>e þ—`tÊã9\ÎÆ8»s…ÌVö>|b .Ô+…ç¶ë~ýî=™F5¦aÓWL”Ù:_ÄÊ`ÆA&;Ýf.¥a§i8J›yáA¹OS:!ó(ñ=/Y¢tÉ ‰Tƒ'LZ—¿!ö’ŠŒ ³— '‡!L…·vÃZD!ëžÏ,$(™Z(î{'â†þvõÍv]ð÷7ë$±r´éÁ¶M¢á-a,$iÇ÷ûåsXÎÈá8Ãh<ó˜Ìé“E¿Tä3À&wРž‚˜ᲆP\´FŸ7qÁ w+ËQ~¹  بHtÌø¡ÐÃxOÎÑ…“î÷ŸþKÁ´S?ÇpmŽÞ2”9]kua5yÊþm¸Ç’™ E79k—7:¶ÏáÄÎð‹3ŠtóXÉ(vó9V Š £ jвUg¯ç¢"~²®[·S ‘òжÄ×"µ¸µ*"†+H MÍ­ ‹h*”$B«KEFÔ3ÁcʾÑeçÃ2Ä9©ÚÆd½Õ¦Êr†ó@EjyJüpˆgWˆ1 ´Ók@^ŸšBuNÖE½àù¬‹Ôò"Y‡éœ.Óä'Ë~ú)7éÒ9 “íº@ŽÂqcµº¼Æë,@Ä»Â_f¾ö!=/ÙîHHSxuRÌæ°ýu ñŠlYíë}Þl_˜>3iü»un°¡ñª4)Ø‹‚)X‘Z k™ˆ‚(gõ Õ%8JW™w˜jt%4–8ÈîÐfÖ~µÎŠ.´-nez©™«âÔñÒ¢ú/+{‡ôÕKkô‰U·èÅEOa½ dg¹ü‡²À•µÇd=]üÿËËú¹|™ÔÍF†úÜÉŸŽÐäçSìfÔ]SÅqÛë±ÜÈ×3©GJë!sjŽxV1Q?­ªükŸ©æXeVò»æXHṈ̃ö ÷WUÞ zk]u; ʔѺËUâFùp#O…º +Ô˜¤-¢]Qú ÓX@ [7ÒŠ<÷·Ë¸ð}|Ýu?õ>šhž7Uõèg¦®+Ó‹P®tá*7¶òÏÛÍ*ø€üñ>­±.œÁB\ù¸ýÖöƒÏäléžÕöÄ䆱7c†í—Tƒâ2A>€AJê…$diýWŸj[+&Ò—“õ;J%˜Ž›0?>D©†›~î6CÞtVHæG%¨äInœxý«·ÉeTM(UÃ×QϛʯÐÇgøWå%ëõ?®Îü{×=¬¾¶éDéi쓞ÛnGü¡Õ®ˆkñàqÓi É‘Uf< I}EÁ¤!6å)6Õ¨*7ž×m¿¼väGQݗȆ…PvÜ›˜€  ^J¦âðj1¿ÎHCA ÌSýHÔĹyÓ½Üâ›Î`’ëºÐ/¼,üÌë¢@Æ}.­>MóôäòžÑ¦Á bò†0Ýj»ÈÅÐ(ÚI£«i|‘fhêˆR³FW*Ú¦Sœ!DM‰$Eu¶¸u’¡››%ÕG<µzÅ‹M»@¤3©Ä–Ó‹è[Ï Ÿ†éyÍjccÒ …ýªDõRáäØs»KÕb)5¥I†s§!½ ™ â3uªü¸Žú ŒTšñ‡1Nõ%°œ#€È}>ž…aÛÃ{Xéâpâ#SlˆêMS®óLÍ„acL+õ1U;¼‹­àÄ–E39]¬P£TH-€‡(†ŽT§AŽô¦Â/Å™ú!ÕÍв¬9Ö°($¦¦†œ**5“P¢ˆ©wÕsÒ…Oªvm„,ë‰R]*?Ä€ÉLp²ŽR×ù“9I»Èj Qºy.Š+—»¬øk’;NU³ÇHظ×9……˜ß`ƒ_îâÔQn"ùPή,y´¯Þ(Ž9©žŠkcžSÛ4Ö¸™I3 œT:),ç\ˆñoR•‚üa_îãÃÛø][‹:å†d•r+(ˆèÉnÌ®ø¥Q+Íúê©Ô×ìõ–$ìúX™,á1µ;Ùm›„ÿF’+¿@† îïØ\«7~0Cãt0—¢aïT5ÿ™ŠŠ÷±=ðôz7Ò¥Þƒ3Á×xëg?¾¼R„¾o_¨ÙÆë—€­¥ªeÒXö*ˆ¥Ï—1úc%6Øhš‹““Èб8oÚÞôŠ]gM³Í‘'o"M¼63IUå¡úÎî>7lÒ&ä I\c†"Qå6  Åk± ñØN®^Q[—¾Êˆ>IRÄWÀÌ:ºøÇ ø3Ž}/œôî¬_QwLfã!ÊÒ¸Þ2ä<‘'˜RVí+íñÛi7¥¹!ÍcA¬‹„%Ä‚±1Œ÷0M<&B• y]¨]i¢wØDdÒxú 1 €šÏ •Ì.¥ïèS+ž·n!W`L®¸3ô?ßç&MK¹'º_uë¾¼Ž8s•+I¾¹ƒ¯™:ÐIW‰þï\¼Cå£ÃÁ»/^Á»³óêXÏlX™˜q/·¶å6ïÅp¶½Ó]t‹w™8Ö„ªñôǹÀ O—9èÉŠJ¡µ[û êcœx}öw¡š7=àX–Ž®–!å¥O¨àW\"ˆ:)‡ã´×*à e%¤ÞÎ݉qÅ}.ÄØdõw¿*ˆ»÷#ZÏ`%r<úJ:úM׌ѧW¡ Þ›‰¢!‚¬}^ÙjŸ !º}Yz°5¢9©`õԴ×á܉ñ­átª è¤ÏŠWr¿ajÈp§©ºÚ71ûe°HÆ_i8p¾j–»s¸|Ìñ.•_¾/ñÊ'« õâé¾è¶´ö»”/eA´‡×&wñ·KˆÛ4t¦r‡á “ÜùÇ„¢%^²™"”Ùf€PR¢²¬¨Z Ø* õ«£@ÑU¯þòñq³”X•Ùà¢SN|ªJ-]Ìj e©ƒTâ\„\'X¹è>¬Þ¼í=%¨6 »·÷çÕ§.,&%Ln1EvŒa;R·qÖWBŸæa€®Îñ.(L¡Sk¢S¥x ¦u‚œˆnB FäìYðØ‚ 0²Pá^‚x¶—PÂæ¯óî^\  =ô&³^o»_ÿ8ss¬+R©›yñh ó24ïà^3ô+nŸQÀ¼“GJ\"2‚ÌÁÀºÌI!àŒ¼.òž:g¬§#ïœ@qêé?¾Bþ~ÏÁL?¢ƒ$§)Ú Éöïxn"ÌÁŸ‡I0G—>Nöän·ÎsîÉ]…W·-‹Z á':D~ß³QS×3²áb8‡z}ß çŒ?è€þ3f‚\Ã#ìä<‘îw/RLÑNêPG¶öRoÃïh)ƒ…£¹+zî´žø'TÜÛyRå«´j9w‡z✠¯êÃÛòŒZ¿DÔª×ívï÷ý…›×@ÏÊ} ì>fZj’Üaýô@aý:øQ† äþðú5•iýøËAŠJ̺ðŒ5Á9ÕÞ0" êcn T}ùuœl•Ò°dâîÈÅIë²gݱŒËq¾tc¶Ž{"¡ˆv¦¦·ÕsC'·Øõ‡«ÿ®1IÒendstream endobj 369 0 obj << /Filter /FlateDecode /Length 5185 >> stream xœÕ\K“Ü8r¾÷˜“ÃQ7³jšxvø0ŽØÑÚ;Ó>H>pÔ­­ª.M‘þÁþìÍL$dUWOwÄb@"‘/¨Ÿ7M-6 þ‹ÿ¿Û_5›û«Ÿ¯=ÝÄÿÞí7ÿusõo?hOjßx±¹y†ˆM+6θÚ+³¹Ù_UBnoþ^¶Ž½,MݨÜÜ^½©Þo›º1ʉÆW‡ãÝ»í5~oEkÛªÆíÿÝü§ðùÊÕÒ´2MñïÛk­uÝ4ªúv+]Ýk«ÏÙ¼ãaß=L­”‚‡ÕM¿¿‹_œ¬~Ì޼˗?öìë‡]ý~ÛÊÚ{¢8¥¤¾ØÎßïã^8ï®…•µôvs­LÝZ¶tóêVú™(§­ªpAÕøº±¾ê‘¤ÞÂjø±¢Tõ¥¿]nÉ+˜¡>eóÙŸºÛ8ƒÒÕÙè½¶Õ×ñC|¬U5~ÈæíÆHˆÕ·?¼þþÛD@µ?l¯¥ªTµBH`ÆnH£]ÕM{28xÞ¡­îÙ ‡“'Õí"'¼LTÆCxH»²|ȯŸ€H”U-ªÃrb+­•V”mÝha«±k’Ú#ü©ª:°¯é˜…ªörsó?W7ÿú¦Š‚ø~_o¯ô@¬I–¦ó´þ`çÏ­eû> ÛAdãñøùÔAãJN°%î¬@J5nñ¼ÕÕÚe£LuH‹z.Ë»nÊ@/Êêð>~Ö2= ¤±œ=nAëIÒZç«_¶‰Ÿühwݧl"ÚWsb_0%¬»ëOÊIwL Ö/" qg¤Ñ,ÊÃÌ'ùÄU=ãZu¿P˜Ý.ýE¢J“¶©yOAÞå_"½Š?íâ¡ÚFK5ÌŒœ6“vã×Ö]zÆßŒ‹óAê%ïç\‚û_¶Æ ÄƒuÙ­h¾ñº-ŒìR(ý¿<£²Øø|ù$âñ´oã–-;D<ÁßÝ\ýé*xK³9>ÕKjgj¥ÄÆz]{'ÐU¾©¾ûš JNð¾+V‡y-ó pò4áõdtÐ…Ü¡ÐÆ’¡€ÃŸe¬Þf{x*ñBˆZH¿±­¨µ”äçßVèçŸÇ!€)ÀŒ|^Ù4m—Mýä9µ¬­Öùœoª·[0!ÎmõÝ!P…hµåJô ɶ-Ÿ`¾[°“¯€ÍÖæžg’x­ÑÒ n¿ó/ר*­¢sÝVå^±U“Æà~À`«`ªÌN¡©gÔv*®¬.7è8?ƒ´¾hdd!ZŽ»8¯+XºCqmÐnió0-^ƒ‡cú`‚6D¦7·‚‡Ï™vd|ã~(÷BÝq$)jª?>à¤À 03‹ãÇÅ´.8Íg£Áv3æqc€RŸ„M!«e“¦µMôôÅ)2%Ä+vóÊŒ7Ù‘<äqü&HïÈ£K0ôœ<ÖIɘ]>-o °3c©¶MXŠÁáI`¯‡d1u-•^ ÊW÷ºB¨Q‡]•œà—o•u(9Ó‰­È†(Â{‡†ð„*ÜÐ+²‹& cn#så­Y—:åø>@–ÇÑ]Ó\Î7• ¬ûÎ"ª Ï“ ÒÂNó…ûœ;; ËÖáþõÆîøõ?R(ƹ ²§–á f˜ìMAœ©ú1~AÓ4ŒÇBb²Óg'Aƒ*Õ”ž›x8ܘÌ çuÁúàž…­U s”ù2”âÝòíÿš ŠÆ9]HbtAãÉʧè­ŽQÐØ‰0/õ¦Ñ:Æ\îQhÚFñ¡Ñ`K ú†¿œó«HYÏÔl[†ƒñ´ëï㦅vQC¬à#büæ!†âˆ±ø’»~œ4èëdÊL:ÔF#àÔŽü¸øVrªº}4wÆ~ª(}ð$˜†3’œÞý|—·d>~ ’cÚÒ[½ÝÑ9_`Äi;`–¹¢=:färªú1¾Õ1Ì|TtWh±BЃÔ)ç]jxëbðòÍ2¯©éÉc¼ Û;#m\×K)XK\ÂÃ/ýI–œ<á³’ @ž#8ÄËv…Ç$1h¸•¤ ¸•J’¸xGœ„Å£Úñèd–ÈÒ®ÝÆéæ)~°WºÄ¨%Ê1ýM!hñ¹ !-nqEäÆYQCªÀzsQ›` „Êêõô’ i˜¸ï¸S â7 ýO¹çJK£{>Ä¢=-tÿt‚¾Üœ$‡×µj¢ 7kA^Ь]k×$œ„ ‚ös €¦:Ú2 ˆpè÷ýŽ¢Z mBVÂsÍ?rÓ^Àä~ L+ÃÚ…4!²&¬]d~cHã<¥Ê†”pÄS=—~e6ß«òÃc™âÄÌÍ÷q¸}&™23—ŽŒYóãLœ’†&¾]Ú‘7]‡ !ç"Ñ…¾ |aîns|ÐïÃÿ1áL"Ñǡ ’¤^â,€¦§„—´ÐdR/rÅíJv™ÎjÓþ§qøqwwc<›´0Údc”Â4íIˆÆ&%Ið,ÌÏ2bÃÿ"h|̧6áðŸíц2m,ñ‰Cü‹&sFž'Y=ÙMÁm%C^ϯ©IgjéÝF~“‡òÑ3JGð½32Ÿï Bò¶1½¼_ìd­'/ V¨pµÈ¥ÅrCuð¹q² Ó.¡0®‘c/Ѫqo•8Æɣ³â9ïg‘U1-²$¯ª·í ¤Ó“Igqè]Ѹ©!(L žÐ#€´ÂÞÂ}V8³ ;+ÂGë)÷˜ð¥˜¢Fõ·:4¼~ˆã)[(çÔžm ÂcLãïç¸x*Jžì×ÈKãqè¢árÈݸžWÛç»Ûu Ì „L-/k½EaÕ}.6Ïp8£~Í˾ˆ£·¬òl^pâ9ÈD΃s%EO –úétcÎX:úód+@®¥œîÈ!H:㵊^ZâïS9ve…Bêó.Ò§}DîŽ,qÒjãž8Ô–¸Ê[Ц€–4aüœåýBJ\k»HbtÇ=µ¡S+N[c7‡Ðt¥+œVÑÒ8.{™Â*šîdr§Á‡kw2±Á;À™ÉFcvŠ>c Ñ·{*£ÈY.ƒ#š õ­ŸÚI+þ™çÓV²ÍÔ{¤mÒVǾheÑäù¨¶H¥BÐò4Ò˜¯}Li$³‚eé9—æ*º}LÆãf©z¤B+A·Û†qÞI"AÊÚ ¥Ö/”ûƒh¸ÛÝŽ=ƒ/d~éz‚[°WË–Ø{Y¼±[XbjÝ7 œ;ôòªÞÇ!.é—€žHÄÇêMÚXú“ÔNÚð¥Ý±'X’¤€H…ÝñØàا,ëÂ㫦ʀát3(Ý2žòô'–?†2U;Bd6Àݲ¨y­E-g=I þ_>§—"!¯¦`Oxí3ÉGYíþC|  hŽºHs¢þþ]è–3ÍFø/âsLãóùPw˜Hû±Ç„ðÓ©­˜&Ö4±|1¤Ñ`Ù„ _0áü37€EsõC;po䬬 eôߪ~€ti×ú1d]Ü1aørá'¬‰ù¿1tˆ„äÙ —Y\ý*´’+:U)ttþ²ײWÝTVjEhQ ðëEŒ£bS03õ#úš®–¼]­ÊÇäC—_Ù)7NטØDR<:$XB®¯DÀJ,!B[PÞ~¤ÔŠíCHU|Ï1º„wn}Åd;Âp²8F­ω0sŠ0÷ÂjÏrDh°L½ý—G/ j7¢…ÐÁ«0Vh9T>«ÛÂUHj8‹—eøa‡Jê&~!}>ƒ—Ãñ˜ ÒèÌš @ŸõJiW†Hy*„3Ì95Ñœ(ïÓV<[„×õczËUE9pOð;ó½älò6O$\€ 쇌b¬y¨¶vf œÚøk4†£L(ìú.ÅÁ‚9}Dرa[O€ØØ  øž–pÙr®úöSf°ŽýŸ·Q³æ„%"4C Kdµt˜-Ä;œfJXŠ”²·òzÿiw·¿{Ø*ìá (¤0E04äÓ§Sn <Ÿç_݉î~ ŒÅü&õÖ©&Dü‡šÅzúi ŠsF4ôàÖS¶¾¸Ot¨)¹_Í:I܈§nS*nSö²fr5u!…´!ÝG+úü)œkó{‘4í|/2VùMžø–¹´‘R…á—§Sãjâô¼ÒáwN¨åmØ6ÉÿšpáûoÓ¨¥¬•”ü)ߘg·jJ‹&yž/6½ÑIO)ÚhHϧhcó³JÄ=GÑÄ/`‚]@ÍíNöiwœzW;`u7oÑo¬xKØœ4J¬¥v—ióðœUR‚%xk.f˜é³:Ó ¼–av¢óy=7Á ´nþy±³ æfú庌îÛE¡Rb¶Ãž¹Ç‘Y9b7Fžø¶('E`ˆË€—9¼ç*±8^nK¨'Ÿ/»w—k\y…-’•¡Ýì+¿ÌÏ/l'šÑƒLC &u Øã]j‰îÈf?:ö™²ÖtuG‘’‡OcúŒkßÏÏ{^þûø“-ÎÌ!érÑÙKÑóçW¸O“{ë± ! µÚb:ïÿkÜ–/zŨóö«¹A têßr3dÑ£'Ãoq«)¬ÉX$°;6Êÿ—zÿy|ßõï¯w5pñáö?o~øóïVÑ·sp¸&NP¡$Ëû§«¿rØûendstream endobj 370 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 403 >> stream xœcd`ab`ddôñ ÊÏMÌ³Ô JM/ÍI, ªÿfü!ÃôC–¹»ádžŸ^¬=<ŒÝ<ÌÝ<,¿ú*ø=ˆÿ»¿3#cxz¾s~AeQfzF‰‚‘±®.´THªTpÒSðJLÎÎ//ÎÎTHÌKQðÒóÕSðË/ f*häç)$¥f$æ¤)ä§)„¤F(„»+¸ù‡kê¡» Æg```La`pe`bdd™ý} ßó Kжæ¬Z¸¤{ë÷¦%k—|O[•³UhíÃïSº\V˜ðcƒØ*v÷¦I{ä|fø­ÑÀšÃ~~JK”üïLö¶‰ql»9~ó+hþý-ø^í;÷•ÛÌ•÷fÖ`ø.ÎÒ]ÙÝQSÆ!Ü——[‘,м¸pká¶ÎcÝ«»wM<°tóÂÕ[VèÞÚ½¹lYêôêî®î ¾Ò…?œç|Ï›6q!Û*®§Ür\,!!ñ<œÝ<Ü‹{z°bOϤ£<<Ëû–LžÔ7½¿¯ÂD^tcŸyendstream endobj 371 0 obj << /Filter /FlateDecode /Length 183 >> stream xœ]O1à Üy?€@”.KºdhUµý1!C_ MUu8KçóÙg2ŒçÑÙ„É-zõ€„u:Âê·¨O0[‡†µUéÃjU‹ ˆ ž¯8€ÙùU.@î-£µÕì&å5¬A*ˆÒÍ€zJEoŒ@àôŸÔî†ÉüL0Ö0‘)™ž2åY)`”µ“¢‚u¯Ë5åNI|Äj‹\ªoÕØ%­uðý<øP\8½´â\¤endstream endobj 372 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 561 >> stream xœmÏÏO’qðïW@JŸÈ4¦¬zø`Ù ÂòP¶Õf«6T“]:< (>†L.Bü<ÄÄÔ ,X­u¨·îýÚºvý>ô¸ˆŽm]Þ‡×ûò~c¤ï@ãþÉ©)Nð¹cÁY~)|Ó1í]ˆ,q¡¿•]½€Õ‹ê%h™¦ÐÜ4£FÿM­õÒÄ9ºz–†zcOp-%ÝãWb!ÿ‚O ×]®G;o‘Ùs’qnn‘†ý„[~AÆSNòˆ¶ÑOùe2ëõqKó„Ÿ'O½ÏÈŒûþ´›<œ~<óÄ}Åùÿuÿ*¼!x1ض•?èEéHçBè9ªám ÈÐþŠôèò ï¸Ÿ~2µðejnF1íièè-`þ5b,CE "²¿GŒ"DªU¨”YSK7:h´›͘«Ù“ãN1‚P²U=î,W j5µ:úÆÊÑa3ìoäŠf? ¶£¢²“ƒK.ýÎÇjöÁ]:$ïmUy°Ä@ŒgÄS ã®vU3ÿX/$!ÕÖµøêN²˜²RÛ¤f›H¤Ö“°jIÊ¡Ï,µÿŒk.Ã+cêeA8„(•ôžH‡¨ñöû˜ y°ì¿y»Ûž‚É0¢5U6àƒs‰ÀÆ«õ<ØN§â–õ›¯sŠRʲtB•³Ù”,¦Dñk¶Ù[ÿRélt5ºÙ.çü]æ40ÝõŒœÍK9EÊJ%†©g¤‚"í—”¼,1gú‹{êendstream endobj 373 0 obj << /Filter /FlateDecode /Length 228 >> stream xœ]Mn! F÷œ‚ ?’EäMºÉ¢UÕö c¢Y„Ad²èík›¤ªºxˆØ2ßp:¿œË²éá½­é7—27¼­÷–POxYвNÏKÚ&kºÆª†Ók¬_ß5=ÀÜý-^qøÝ(G¶¥uÆ[ [,TGcà˜3(,ó¿+û¨˜òŸ§ŒsÖ©…Ž™ö¬:f²¤–nç‚èéȺtÇzÁ9ŸH=µaŒ÷Ò@ÛÐ;Ö=.#DH½|ã9/ÿˆ³yF¡Ó½5,›(q.KÁߌëZ¹Jê:·s!endstream endobj 374 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1129 >> stream xœ…“{L[UÀï¥ÐÞAEGÖâÖÞE#j„à–,º˜Ì m¨s®°R*¥…^ T(}@ôë½}Ð^hÝ€ÒñÚ@¶ÕMc"`Ш›3ÑÅÄÇ?jô\âþ0äÜr›éåe2crrNÎw¾óßïœKOÃpÏ-+/W¶6TššjõZêÙ¢‚ •ºM«4¬®=Á=Šs;Ó¸]"à™¤*iÏ©¤éŸìÌØƒ¶£©‡Ñ¹G0Ç«uf»x¥Xßl2hÔ ­äž¢¢½Bÿ9_*èlÑ›ÍÍÍe.¢·×c³ ìQ\wÿÜùÿº^Í|ýõ¦¯z.1Ьb\[騲"´—o’­ì—D`Èh„³üÞ~‰:†‡a(²élçú7Õu‡3̾¸K á”Ï`¦úò™ÉÒP#6X×1~t^>R3V胂$› †¶ ¹ÙrÕÊgjà%м Y m”Ádj¥»œ— „5 ỿÍݦåH\‚wÇ&"‘ññ>"ô†Ѷ/0ôNO–8Ã`–§®‰Í`4ADÁ]G†`x#h,s QR5¼žÌ_BcèÒZÒê\!̧ÄQ.À$¬ÎðÓ’‹¡i~ŒŸ¾¯øû‹ó»Ö þ”Á Ýgó§æP$·;Øn ¬v»Õp:\Œ÷¦FyÚî²» ;ϲ³>¬œ›GÑ/쇼T]º€Ò%O%Äfã=f¨pµáIw|CúÎ&WTÁ-‰£«É©ÜBK⮭̨*}˾û|/ƒ^ÆHÅ7—EtØ×ïƒÁ<Ö²ºûÀa•§ø¨Í¹¦–°3äâæùh.ëþl(Ä*øªÿBy‹({_œà–'„·A¿ÊFœ¬Ýæî´yäÎÇ‹ß8 eP;{ðVé§o â±òÝÏPÁž Ë 3 ?"~º²7à}õwGnV|Ðñ}¹ü‡"ÛNø“9WcâDf"Kž)ªÞq@º ¤Y1&bè°7ÀФÒÃôû|l„¦“¥aØ?•iu½endstream endobj 375 0 obj << /Filter /FlateDecode /Length 178 >> stream xœ]O1à Ûy? ¤Uš!bI— ­ª¶ p1!C_ I‡>Éw¶åcýp¬‰”=‚S/ˆÕ·t„ÉXÂkªŠ;+SÍÒÖߤ<Ð$Üø]ÎÀžgÞ”ßLÊiX¼T¤€tU%:DAÀê¿Ói3Œ¸+yRf`‹H”‹l­kQ®—’vørp®x4¢j l,”ž¹ž±ð{Õ;Ÿ]4|zìZ¶endstream endobj 376 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 834 >> stream xœuR]h“W>'I“ØFm…@ÕcòÍʰ[ZªN™zW©íœ™]›0½¿´ù³mÒ&_­õ'5g¾¼ùi›ÒB·X?XzÓ¡ŽÀ@/6…Á¼ðBT2Z<_{¢Û‰”Á*ž‹ÃyÞçyÞóðòb¤Ó Œñºýv»(ùZŽKî@Ø lijèp{{ÅP™ýP]U¢Q7hÌŠ¥LZ0éfëÖпjèãÕô÷jT‰ñWýÑÌ•«3÷>Ýìù½>IØÚÔ´­¡ß;×°ÐÜ(ì»z‚Cá¿ º…}öFá‹à/ú…ÍÁ€àrûÄ^ô÷AÁÙÙÒÑ)´vp¶wÖ7¾7æ;„+ĺ¥^·Grù½Kèm ûÅ;P¦š—Þo‰æÿtKÔÿleìë%Þ¿Ûîï‡B¶ºfcÝA„p§pgð(2ò¹"úµ£ßð~,áo5éµUÿà‰«ˆ°aeqNÁô°¢U-™f[œ£6…9ûÒ°LÕ¬héѲŠè®3Gµ½™c¶.®Ò8?EdaÇ1b0 aÚöH«Þ! ›!#§ä´÷ºóg;Õ0D¹€Æ@m§khî¡-é4Œó1‰Èpî¬Å¶S`Ö½Ì{„ÕJl;°]À¢?² º…ú¾û†)œÌÆnzkS‰|”['õél2g]žáÏðÉë—f J½ËÑÁÓ—<7ñ1"`Œž…è…Ì…”löÍ:ŠLCs( €Å ´—™ØeÖÆ>—e9qc4ùoRÍY^üùŠZPï Ý0IwÝnäÿ¶:_Æ"©Ú]Äô‡¢V½Ï¡¹È"¼B#Ev›ÌfX®“‹ZúKYGªfØm;ù»,b纊©DãOˆîï[˜¯ø›_Í!}wÝ­ÌÐwìëúæcé¥^SŸgS|~k`4–/yjjÕŸôt<=“gÏ=9ñÇ¡©“÷Oï2NéÙJ:]A˜K/A"?ŸˆÃq8£~÷ÚW[ºXš‹ŽÀy€µgrpÙBtWô«¢yuÓ¨jͧ“z¥òA•¥RçšV€©jòRj<9–Ì¤Ç 7L&%ÉŽóS(äè+ÓJ„þtª§§endstream endobj 377 0 obj << /Filter /FlateDecode /Length 4899 >> stream xœí]Ýo$9î-ð7 y£GÇøüýâ$@ @â.o9¹½=6h³á’ˆÿž²ÛÝm{ÜÓînÏdr;Ú‡M&Õv¹\?—kª¿Ý`D6Øþóÿ¿¹¿Â›^}{Eܧÿß›ûÍo®¯>ûœølÈæú›«ö²Ñd£„B†‰ÍõýU÷×ÿZ©"ZbР诿¾ºi¾Ùb„×Ô˜æáñí›íÎþ®‰–º¹}zÞþíúvÁ¢Bénˆ_lwœs„1k~½¥ a"eó—)‚Móüpû|C3ÆàÃæúîþ­ÿEÑæ‹€òm8ýã]ôë“„ðæw[M‘i9IRîï>cÿÓ­å·×W½bÈèÍAÀ¿¿âÆõFaaç›ûþB…F°à÷W_\qÁˆ5¦ás!•”À ©ºO,Õ-5ÈH*­ˆñf; åÀ ÑN’TÙm½é$f•½Ä4a –üð!øÛíûaÉoíªÖüŽN$b‘,’u/àŸ D©ØHcÿ(Û\oµÝH¬šÇˆÿù¯‡Ÿ‡Ex'"_Π0Ä¡[ÿ²Ý…•…ÞÍ€Ñ?o÷¸DËÖ‹’#fBNý'!5éH«’©¢¡"1Z4!,»j°$P')íN­š´Þ‚²‘È(àˆ ¦¾án¨µøæÙS -a<± É~Õ‘…€DÑÎwüŒ,™sÖüÏûš˜B5L0 ¢ÖÞ4Ÿnw”˜6_6¤›†ÁŸ1ó 5?Èñì~ÒÆ“ü8¿zŽRžäËmn†´â´›i¾¸ (ý™°-D@;B%éx"9–¤FŒâHfãŒR² sô@Nˆ4Xá—͘HYÌ@ƒ²œkmgÊr)‚U ¢caŒë!~Ö§ü¬<-ÛÈ@s~,ÞÈûœ°R™DùA Aû)hŠ*2ˆ4òFWÞ[oѶZ‚8+;Àˆ‚7-´#¾ov feObÞ³Zn÷ {Z®žÔr#êh¹øÞiyN|©–+ÆC-oÑŠ‡Z‰á‹ƒD¤€_Mà*ØF,ðÃHS‡0þ20å' 0M‚r–`?£Àð?&nÊùÎyµù8‚Nà6iÅbʆ™Q˜å±lÐõQ5¥ YÕÔt°r’G\êXìÇŠeÉ]ë¥SëÖ£\8+ð“Q8ã`B,g5,Á_ŠlA¼´1L´¬ªD³Š¸£–ª¯hÕUýTÊÑ1eRåŽnÕµ¾Ï9µZE’!¼ÅñÎ…7`29g»§T|ÎöTALòTe1Édc6ˆ › €]˜‹Iωv‡‘´ȩ̂Tž+‡·”ÑXÄ£žøÓ¼D ‘¸Ûðw¹}±;®ß“ü+7 EL£ ¢ËŽ£àJøøœ6Ë)¹Ü„„änm¾ÌÕLË=Ðä5rçXžPîÓbï=‡Äî`6‘Ø?õH}Jåpl¢È0Ò¨¸ˆ¶N!¬0# ¶Nm82Ö繸aˆ2«2ÑX&" «ÍõŸ,;?ÛW`,€MTºªƒ“{m ìY^¡ÀÃA+Uà*Ài%¤« œ‹ò;©rºùÖ+§Ý–Iåt*ÙÆW‡øêƒb_¹F¹‚øê? ïP˜¤Âb"ˆÔ" ž(ˆÁžhŹ `  3“݇ü\p.ôdGHvO9Ú>Ùݾ\²;àRúf¤»“õVH€ì3ZÔÊÃÓWyÞ©’cXäH§bÓ„ÓåQ:5H­'#zš£é§Oc&œ§@žÞéx/¸¯X°“‰åf\1½sF!»‚1Ì2cFu%3Îùð‚;Å=364cÆm R‹t\ín€«™Æ°#"€Ì †ž+Ã4‡s oe~=°äçÐwÇ—€ïnÂ|oÎÑ<ÕOrû '&‰ ‡ãt''KøI~+…†˜´Ù…”ÓiëùÐkÀàBü2hþi¬vp|¥!ìøøå{\pã3_›+"aÙtJ’x›ëÇï{&ÍLLÚ™½î9¹ª|ƒÂ|ë^dfcÅã×ýœÔÐD¥ˆ^¡JH€¦`)ËŒl47p‰e¯.–¹{µ žóIJ°g‡c|ìà°ÔÁ:8ìâqÙS ÖS­A±Š"›8ŸWZÄ”(±üŽì|J‹RÆGÃeGX¥´(µLŠxO““¼—'…¦8_{ÆN“~ºã{¡q}³nhªR¤Û‰1Cq^uñÎÏ,‰ba-c­­aÁ"OÛðÉK¢öxË™íñëÑ ¸b¾¦q‰̜ѳ´òòz°ÀÊ[;·ÆÍ;ç£vΰýó¤8«ŠÏ«;‚É·8‡ÿGøè !I’WK EH‰Z,¨ƒC:âv!õëÁlâ6’èù׃u,¿,wÆ¿ êÃï²>8ã7ᬆ!Ãeœ¼®TÄ3µÛgXÄ3½Û•®²2Å%W6[Œ\ý®Ð±¾øÍÖ ŽU¿)ƒ´u4]"å°áp6 ‹üUâk6œ°œU ¤h1~úc3²ß1žÌP–›Ñ‚bΕ>3S7=†;äõVTô½‹3­Òë°Ê€zbÔâÀ6J†¨ÇUéq¦%1• Ÿ ©lä©Ö$Y¸Bë™uzÕ’,Këô2†¢пî:½ç·µ;Ýtžç§cž‡º"Ó°âJ¦›n¤|\˜žNvžœWçOV¿S/¡8§Por³ëêNËú{÷ãÆ®û›.Ô{•Æ˜å” ›2c'i{u°Ô;^cŸNJûèeH§˜äÀmÃI» ñäÅúuG»¢vG¾Ä„öÚùŽšÐð¶¦sêv؆.À a.ăi™ }ôýö^¢õ˜øeŒ¨f ZÚN¯Ã…=XMЫ«Ê~)?Ì~¦ð5¤ f‚9— LeejæVÛÕ¶ì“ÕèTk?;«Úîø ':¡-öÐÇ~‡èq½L­âšÒ^KÝFAî"ÚZ ±ß7 §«a¢ó{J¿”•NÔ¼æU¯¤;§¯9 ,¨ˆÍØñP&'J,Ù•ÉɳEjý÷ µXÀGð ÄàÀEy±\F‰+÷±€§ °€§ZQ(^Æg›.-ñŽÝ/õhºðpe›Ø…à׉B"ðˆ¦­šyν’¤aC)ذ…âöÁK÷»—x…åAW¾·¯ÙÍTˆ9næÇ×Ü®¨-×Ts;c'cª°¹]‰Ãë›ÛèÕ®uëɦ_îZÓå¥ îàq¥Èçåê8C39Mkºýî&/Ùš.y»q‹C÷Ž(+-Ñš¼×°rsy•ìõ9×mÿM0¹w»¸ÜáHiå½+GU ¤®ösp#G,ã«BË¥tm–íò:ØWô:ØÓ—qÆZge)÷ìuÌH+–r•iÑWX.ÝáŽ[Êu^ÝáÖ'ÔuÌ'¿EÑ™ í:i‚‹Ë¹<<Ðh‹C0Êì÷WÂvn)ð\PÍÅ@^º‘D"ÌPΕ´së¯eóíÜ:Öp›ÀÝ%è¶ãmVÝWuWt¦÷퓎««ûêW:Y÷õ2݈?κ¯=‡b1¸C0x½2°0¼¯úúØÇdQ{P`º‚¥F—Óe`kLêÕ—Çèz¥`ÖŒ$-À—R°K)ØËÒŒØt¬óf°£_‡v G¥^üRÀ3¢K€'l²ÆâR67OõÚËÁ%FëU‡ ìRv©›e´—ê°ÃÀ°Z<+¬KlùR!6fع 1Ôxãø1gàñBxàžJªÅ=U<Õš 1l[aéK+µúå“GpMËr¶¶Blï<øÙ^U€2;eƶãR,´pÈ]O ¶MGT™¹ëKX©ÊìDÕ45«Ì¦•£¼Ê¬šnds9Ý/XªŽtR7ÀóÐV5, 7jñ•oµWIN7Z`Àšzë{Ö׬«…ú5k“EºCP;+«PËy¤‚µ=ä•13mw§ÌÌZRkf¶§ö(›ð“ ”¡>—ÊÏ¢vezʈf¾µƒA ò((T$n>Ö}–º¸]E3ØÖ"P‰(†8 ,Å@€ÖÝÞļÞj+N¡šÛ¯ÚdÖÖ›÷o[+ú‹íŽS T¬ùÝVsûƒnï·ð¡Ñ‚6ß…OÞ~ãþ ‰–öû¹láïçoÜ/ +’ªhœÇ§»ÿ¸ ŒVM7$°ùh ÃF›æöý›p¨˜‹ç»‡ÁOnr­¹ ?þÚÌdóoÛ±ÐæMš‡»[ªaÔ€vÏÅkyò±qûôü„@bŒÚ§þðÁ Y-üöÍÖNÇÍjI¸HGŠžøyÐF1Ø_X}±WçTÀÞat…‰÷à\¶/K‘®µa`Z±²Hð`ܯõ„7 ¤A`G¡$ž£?õòVÍó»øùV9šxqwþW˜Us§Š脎™x?ì쳓æùî>¤ÊJŽp†D@VÌû¢–ä烃‹dÆ‚÷f~•ß8á´Ùó‚ ®‘åâ ë¹ àšš`XóôþÁ+=W²à:³âÚ A‘R6@¨¶¼X!3ÁŠ…Ì4’Cý !ò°ŽNÆ ¨Lt“†âà·mV߉Û6iÚ¾UšÙ™ÂÅúÞK%Ñwr´Îð)ò·O‘º}ß c'ðpï÷HHÑDDõ‹Ù<·3ÚƇoìÏžÍ0Ål¹zÂTg…þ×§n(å·Ý&JLѶSauLoû ì;L ßà¢wÖ˃ù“p7ò3iLÆ‘>žˆØ¸'û¬ÊòíÙZ+|X-3Qºw;B¤—ÐE–Øåuí6 $-xâ5évD“‚Tû<ãÛ–½ÿ¹)1åMä n™¶7*–IEYj ˜ gQÛ¤¦óÝš}±eoàÕ¸ÔöûDFh“>ÉM¸²½f ) ßlj±_\’FQéóm­ê€GÆ]AÓ²{ÚsCÒðÊ#¢¥ K²Öxã5R™#a²Œp÷H—€ß ¬ÃÒ7ÍýCë¹5ãÓ;bAvÿº•ûœxÀ–í…kþ$Ë*¨‘-ø ý×nìdM‘°8TòN%PŸÀÿëÕÿ­Pyendstream endobj 378 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 908 >> stream xœ_lSuÇïm»å:Êt¦¼½ !ÁÈ&Ó¨‰$H˜4m7J‚¡Xײ²ÖÒ®k]ÛÑvm×sow;Öõvµ[»Dº0t*›jî'µ8ôÍMü“H4’ßÝ~&ØE_4†x’s¾ßœœïç„LB$ùxGg§Ñm=â6Úmæš4=½»Ñµáì·“â‰ø„°aýÄZ{ È¥ —}ºnÝŠ[åadx„‘äq»o¨Íáô¹l½V7óÌÞ½Ï65U狌ÉÇ´63íFsŸÃ{®ÏÆûO3íÍÍÌQ‡·*ژݎ~ÆÔc5Ú-ŒÃÂèzôL—öFËÖëRkŸlþÏlÿVA2·õ,M]Äâ(™$jªd„Œ“¯H´’ŸÑåúûäGœ/‹›P·—ëÐÎ!‰PrÔ¡z­¨€°+rU]©ùµvZ€R.~ÕžÚwÑn–çxS9¯kÈÿz”Žº„åS¸•ÚU;O ÕOµj<<â ÷BP©¾Þ{Io!F ëïKÞür㺪ŒLˤ¸¥Â­J׈¿3’#ÃqÚýÒ)Óéj†x®8Ë …UÙK‹÷àäaÉ2çús'¡ºÍô¢`¹T€âã!ï¾È¨‹Ž$I 9È.C4 Ó¸ñ[1Ä”CYirš/NÒ©ôÄ_œýN‘šÊ¼·ÑÀÎÌÏ_»~ó n¤^;wÌ€¥oUÃŽxüT÷Õ3ß½Šè©*ƒôÞò_ äÚ™ ÷•týÄ¿ &óƒZ@§î3F¾=˜×Ã>ÐêÏš£Ÿ«K&0€ÛtSK@1*˜ƒTŽM]§§V¦¯½Ô[ãç`Øuª<-}ø±xpÄ ¥§p>_È <ÍO\˜ç‹¥ïéRºÄÏN.ræËW>úd¨ÏXý + hUF¼?æ Û ö€Í€›¹ô¿ž!yç¿:€tsbãó’_ÜÛnK×¶£Lüþô*®çWPD!„ 6”Œùã´ûð¡~ ? x›pÍo‡‘4înR„N†- B¤¦ØT‘§—Ð~t`µ ­€v(±ˆ£ /\‚áîÔˆä)Ô™Àäsx~>ð‹Ø¨ø:Q1ªÞ;#¶e2Y䜩-×U6Ñu2CþÈ7•ٛʲ¼0žÎÊåosÜe¶ZiN`å› âOäÔÙ'endstream endobj 379 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 502 >> stream xœmŽKoQFïe€Æv¤ÚH”(ÃÝ4AqAº2ÑÓZ|0Vܸ”:ÊŒÂX†ÒZ¨zaäc …´WºÅ`ú\7qÓí#Ò¥‰›/ùÎÙŒ!<;ï÷sb„•!>.ø\ðÒË8—EêQë/ïX w…$eæw,‹…n戴eHe~òª=¢X{"–™ßGfYÄI¡‹[íÈÜêâ®Ã24ÌÞDýNiäŠwäJ^Ñ]çˆó@­4ʸmSWÛé7oßm¬1:ãùH.•Z¦fn+•Êe²Ì¶îÓí‡ÙZ¡¸‰méUI–5ï ÎyÝy[Îç X²ÉM©SSJÕ:Cì‡+ºÏ”mdvv[ÍÊkâÂ29³'•pÛ:Ͷ갬տ–Sû_>™ûãý fœ ž¹FŸÀôÄ~¥X,Õ‹ÕrY©Òôè©ÛJ­R§Oð¹˜ÒAendstream endobj 380 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 402 >> stream xœcd`ab`ddôñ ÊÏMÌ3× JM/ÍI, ªþfü!ÃôC–¹û7×Ï]?uX»y˜»yX|ÿ!ô=Lð{ ÿw_fFÆðô|çü‚Ê¢ÌôŒ#c]] i©T©à¤§à•˜œ_^œ©˜—¢à¥ç«§à—_ÌTÐÈÏSHJÍHÌISÈOSIP v Vpò ÖÔCwŒÏÀÀÀd¨ÍÀ ÄÀ²ŸÅoÎ÷5|ÿ™, º×ÿX²¾{ýòr¡ï“îˆ ïø‘+¶>•]½{ÆùûØ—uöäËyût%usŸHg_Ü}¶ûÐrŽ?“سºëÔåRÓÙ¾»õ³òýbÔ*ûþfÏ÷7åŒ?Œö0ÿÜõ½OtIIw­Üïcß³Íè^\\Ô]äÿ}œ½®¸»¸hQ÷Lùï@[]wñâ%Ý‹fÊ}?þýûŒÅÝ‹åùÊçÿpžõ=oê”ùlë¹nqËq±„äópvóp/ííííéëÔ;iõ žå{7õ÷ôOêŸÜ?‰‡—Žá¡cendstream endobj 381 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 329 >> stream xœcd`ab`ddôñ ÊÏMÌ3Õ JM/ÍI, jüfü!ÃôC–¹û·õ/³Ÿú¬=<ŒÝ<ÌÝ<,¿oú&ø=˜ÿ{€3#cxz¾s~AeQfzF‰‚‘±®.´THªTpÒSðJLÎÎ//ÎÎTHÌKQðÒóÕSðË/ f*häç)$¥f$æ¤)ä§)„¤F(„»+¸ù‡kê¡; Æg```Ôf`àa`bdd û¾†ï? ó†ßje?$ÿ,güQu˜ù—Ù÷>ÑåÝur¿?|ÿÀ6³{Y~~wA-÷û#{]Aw~þ²îòß?þþÀVÛ]°|y÷²™rß*Ùg,ë^.ÏWºð‡óœïyÓ&.dÛÎõ•[Ž‹%$$ž‡³›‡{Uo_Nîë™xœ‡gë²É“ûû&÷ö÷OšÀÃËÀ^r> stream xœcd`ab`ddóñõM,Éð,IÌÉL6Õ JM/ÍI,ɨþfü!ÃôC–¹»A†¥è§k7s7ËÁŸòBßS¿'ð``fd Ï©tÎ/¨,ÊLÏ(Q0200ÖÕ’– I• Nz ^‰ÉÙùåÅÙ™ ‰y) ^z¾z ~ùå@ÁLü<…¤ÔŒÄœ4…ü4…Ô…Ð`× `÷ ÿЀ`M=¬NCd```ÊÈe`ðdðc`ú‡…áã†ïkøþ3[÷14¬ù!¾†ñgéwÑ™UÝíU]í­rU!qA‘Ý%-³Ο¾bÊbùi˶gì>Àq!f¯ªF OV®ÜÍï§úgv÷vOìžSÓ]Ýõ›¿I¢ª«³«­½½­£¾»•£jv÷Ìîyݳäæuött7wwtw¶Uþ–û³G¢½¦»¨¢dnÝüù g-ž(×7µæ¬Ós/HÌ:ÞÝ»|Û–“ÇßêæØ7//!8A¡6C¾&½£¹»Š#|oú±¯‡¾‹-‘ã“a9Ó°æ»Lù÷Å@Ç¿Ç |TøÇ†GC|° ê”…òs6ïüÎÖ}ˆãjØ=¯ °d¹¦““º5»ƒýÒSÛëO+êæPÏû­ß];»{œމ3úåg]ttk7ÇÚIÙeµ©­…ò¥†%F­E ñÝm’¥sëæÍZ8yÉ4¹y7ç^˜°pâ ‹${û×_Ù³möÌ­Ï@œ_ژܜ#ŸòÛ ­¬)£»E×ÚïÒݽ½ý’3–]þ®ð¯›cÏ‚‚¸ D•’ ù¶ú¶:¨¿Aü[ºð‡ó´iÓ¾ç/d[ÃuŸ[Ž‹%$$ž‡³›‡{MOoo__Ï´y“úgòðlÝØÛÓÓ3¡wÂľ^â{-™endstream endobj 383 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 588 >> stream xœcd`ab`ddññõM,Éð,IÌÉL64ÐuÊÏI‰«üfü!ÃôC–¹»ùLJŸA¬Ý<ÌÝ<,~¬úž.ø=‰ÿ{œ #cxVE­s~AeQfzF‰‚‘±®.´THªTpÒSðJLÎÎ//ÎÎTHÌKQðÒóÕSðË/ f*häç)$¥f$æ¤)ä§)„¤F(„»+¸ù‡kêaqªPIFjI"S…C$c3ÐG , í¾¯áûÏ4™¡yý‰õŒ¿ó}¿öùÇãï¢3kº›ê[ºëÚåò œs“º9R+ç®^0ãÀ” òë¾ËÌ:¾nÕºnÉ Ý‹ºçפ7Vfvq”Î)^³nÊ=ÂÖù¥%TÖÉm žÔÍñ[Èý7Ví¬î)Ó'uÏè—_ñìòÊ-ÝçVfWÔ†4¥Ëgþ–­ñKÏÍì–lë.é.Ÿµ~êܵÝK8V-ÎÊHË‹ ;qâȆmsgÈ-I<\q¤›ã»Ðùïükän7<tû÷Ý»¿ï^ϸóÛ÷ÍߘlÿQ'úÝEy½Ÿzøo¹Ýœß2ÝV¿U¯{>=÷xÓÇ r÷¿ó/ûnÜ}ã·ÂonÑß©l3.}¶rÿ¢-[ìîæØ:7>À§Ô¤!Q¾>ZÔ÷÷ó‚ŒîÚî²þÚUõ˺7wwwYøbÚ6¾ª9?"z¾­ïëa[Ïõ[Ž‹©¼ž‡³›‡{}oOO_ÿä ëgNêááÙ5sÕ´þž ½{¦Mãáe`èü,endstream endobj 384 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 283 >> stream xœcd`ab`ddòñ ÊÏMÌ34Ð JM/ÍI,‰ªüfü!ÃôC–¹»ûÇÊܬÝ<ÌÝ<,¿7 }üÄÿÝ_€™‘1<£Ð9¿ ²(3=£DÁÈÀÀXWHZ*$U*8é)x%&gç—gg*$æ¥(xéùê)øå—34òó’R3sÒòÓBR#Bƒ]ƒ‚܃üC‚5õ0\````ì``(a`bddÑü¾†ï?3çq†Gß·0~ïü¾…ùÇÊïÕ¢·¼ÎØØxyÙØœñºuëÌ™[r|¥ 8Ïùž7mâB¶U\¸å¸XBBây8»y¸öôöôööLêíí;ÁódKßÄÞ‰}}}&öóð201«g>endstream endobj 385 0 obj << /Filter /FlateDecode /Length 10740 >> stream xœÍ}msdÇuž?oüRó%ñ Å÷û‹§ŠvÙ®’#ÇW•J‘© ¸„HXÀ‚@2üùÙyžsNßÛ˜Y»ËÈRI;s¦owŸ×>o}ñ§;øãí߷ׯÜîÛWz庳Þ^ïþöÍ«¿þ]逺ë~÷æ¯ô¿ ¡B »šë¡Ç¼{sýêËý¯o~8s—}ª¡ïoßÉ—ÜSkûó«³×1†Coqó~އžüþ «w}~ywùöì5aÍ·2=×7C1ÅôÜYhç|Üÿtêþüöbšá½ù5H~F"ôp¨¹‘7ß¼Úûxöæ__ýý›W¿% °ÙC컃?ä¼ó)ƒNy×r9„v·»ÿ±{÷*ùÃî'ä×øß¿‚„ÿøÊç®îJË ì®Éþà1]kéÐÃî â!ïJÇÄ-)¤¢ßUçó!{Tl¢’â!V´zHÐ-Dô Vày(òTq™Ÿñ'kŸ…ó4 Ñâ¡bï ø&PC|*%ùÈT}ȇ¦³`§C°Óg€ƒKœ×sWé.²¡Y€‚/€„rh] @!€8½‚$ºPˆ Z )°÷X;|µ")B0ÈåC”µÁˆCÁS­GRà@6Ôn(T P ãE'Îò[i ±×Ò ’€”È]R÷fë$@ÀÉ^²Ñ®Ͻ´¬ÄkØ¿'Á]5:4 @¾¹`[iØä,5’>„ý'ÎÒ+yCöŸ¸6x«ôåÆsš±n@ p ¿Î*ç)éP¥V1#ç’A(ÓZ¡Ôaõf´jØpƒÙD­ã“¤¼=Ôƒk3:ð²zh aÞ ´[ØÝ:þ’Sâñ8˜žêÄÀŸ@r°G„¤‡<ó …Ìi¢k… ƆÞEÝV^çÒ+Qàã¡ÕÍ —þº$ɧ®)’DRKóDƒ£5À45êØ?´ÂÑVö:éà€ø\ r|” 8 v šȘ§¦®Ú<Èœ"\d J&¤¯DÉ‹ÜQ*ò0!ÉQû@r VÒ](ÜiŽX–¬š ªÏyZO 5cžO3»j[0c?˜ A†/â°ÅCêóSøÍX¡¨} XÎŽgY=‹Rçý`Qe‡A¥6¨JË‹é%¤T² ÷P w!HÜ)!ÀÂ¥YPB§*Ot@"BÎÊ4HP²+„Ô+HЭ\@‚ÇH-…ä&HÐ’UÈJ é"Úµ Q¡-O›Õ e‚QŒ[8Ð’ÕàhŒñB£ªUˆ=‰%Éb¢ˆ$Mž+Z2šÅ$Ó&`@KVcUM >…黎RÊBV@SV!K  ½Ú΂ýÓ’áP…ƹ T°[¯,@CV\³Y°}Ú±âƒJzvQÌXqQ.eüD3VB¥â„Ë€“¬åÄ4ŽjM ˽í20 +8Ž x(Sxä;>Db5Re @©æá*8DÇgÛ²°NÍÏIål äu¡¿$»Ò6c€Bå4ð5Ë àl[å¬Ä.V¬Pºd¡’šX±Ü»Ù’ÈmÓ;4×òúðjbü BÎ<µ@Bw*^_¬îË:j@æQãð\G È4j1Üë¾d5lÆ:j@æQ&²ë Ìc`=Ñaõ~9ÛÈÃ"gœ…zè¯e3œƒtö'd/ð[‘o4aüÖÖßà4¸Í·°ù–6ßÊ*kÈPÈõ©žÈWåÅ‘Q¢˜ÈfT‚g £>2Èf½ù²eÍ(³òÓ¨ÅîO£ìžF-Gò:ªÒ±ró¨ÙŒ2¼§Q%žÉÕU{RÊ:œ$Ø:a+x—“‡ö>®Óú1¯Ëú±®Û1ž ú†É|^tTá~-ÿwäö[hÎ _ô¨^t熸1*q\%q$Š÷ÄwÌ}œÁbÃ1IÆQ#AT¨Ü Ox#NÇÐqÂ&s²xÎ%qˆ ï·&9Ja–$B°-:™€œ-ô]šÈbÐè1ÔšÅyLX=émÏù ó:)y° ó˜àV9ǰ ó˜âð€° ÅI<eÏtøçß¬Ó úŽÉ9óSC³Ñõ! Aß1ÒOU ï«K‰J– ¡T`pNß1æh>¨A*Oõ ì)xñM#»13SvêÛêÕá0Þìr7Õ„ê"¢Õi`%†„öêÁ* —ºQFTHHLVÂÃÓŠãæÀ ôæ± Âð A0&ã|¤/7 B¯ÎpÂÒá³µ2 móȆÐuì½ÔY0»³¨qÞ‡Wy1¼ê h˜Yc†¡%X¨CcšT«hãÕ+X‹C¡®aO¨ÁFáªdHf…c€´U¸Ì™[*ɲ˶éuÕ6£F­ot®ø*Ñó }£sºæ¬sma¢±#Ãö;ŒeÌ…Yårñ&¿Cår6㿨\FÜ j9T.ƒ® •£Ÿ¬.éP¹²™’¡s4iV¹ f˜ˆ›ÊÑnˆ³¨\‚˜0•KQ4†*—ZBo8‚æ®§:ƘòØÌLM4?¯Þ16Õy‡ôrÃÑW…7D™ÉÑXl£§fÜëi1.FÔÎ\› Ï|Uœyüˆ ÏÐ8D7·Ngt–†·ðIa+m,8slŠ(Ò ³Æ´M}­)¢w÷»‹«‹·÷·—o/ïÞ}‹(αâW÷ï.nÏï/oÞO±–ª?;S嘑T#¬Xç>ö{yu…vü黥PøÑÉ Õ*Ž$H’´_Ø$ƒZ Sªd1W먙G ±ŽiÔ¢›ë¾d5”b5 ó¨!Šë¨™G=À{¢ÄÇ¥ƒb¤uy”ÊÒÓ:K†øV6ߪ~Ã>*¾µÍo}|«#U4~ λ5™°Ÿ €qÌ_/x5}áÆãQ¬J‡°T`˜Á:È›A•á~™`3ˆ5Ò2/g€yÐØç2hÞøK³71àŒgÁ‚NO°Ü«úáüJÇÍ·Ìo©ùTjæ™^ËÑÔLÊq”ÉpÎQY¶oRršIJM÷‰UG3™ -fê8&¤ÈÌC^,/SVâÇ;‹Ñ®ÝÊ70Î’¥‘²Šž8o¤2Àú’œ&Ö«ju0û°ÔìÇRÕ·9tÖêàDØ^tåÊšeMÓîÀÝ0v§TžØeF’WœÉÐsQ'{! Àzh j2äR ÏW¢qugªsòº¡:#Û™èdnÚ‘nÚ}Ôô¢·(ÿNDoÑê™ ÑYjPåU?ªžÚBvÇ0rKv¶·Ô Ù½¥Þ–ýõ¹éu82ƒêÀº¥ Ùñ¬nf!;X^úLö&±ò³C´õ9x1’˜^ùÐ&>ô¤¹œe‹n¨Ã‚…뤟Á°z+ƒì©è3Vá|`Øígéï°f--®ÉHÅ&n)}`£NF$G^J£Ž¤i2#kÂa’h—©dÚŠ«*A¯¶tå&n3k» è"ò9fs˜}ql”Â4pÔ$¸÷<°ñÌ `ÐέxgV¬“vÏ$iÉém°ÁþÀØÔCÑb /ðâA¡L¥· *[EPPG‡ Ô«fï,ÃìI_0¢ÑmžŠ£H¿¢.0¡Ð.ø„£‘ˆ-6‘g*pŽI¦Ì†maØÞ­4šyŠü&ìÚ—v;¢žƒ&¡|mâÉ& ¶ö„X";©æì¤…FâÓìc±ÆÍp¢³o¦1ø Ÿ2¤A! ÃøP2~Òý…×™}¶z°g ¯:»’ÕK 3 þ‚¾5îR´ùĉ”eß½MÂæ¥HƒøM;_˜Ù %âèçaƨr¿nLÛ“˜@,ÿîiQ¹;Ë:À5·+3Á« JìÙAø–!Ajüà¢HG–¨F2ÎIØœƒÏfÀ9 Ìôd¶‹hŽ â@áw£èí@¤Dq £¤M¾ÎS¬s,8ꮈ[¶ì(C´‘gÊþˆˆƒ„l¦h¹ÀóŒç$kk <޹[»YðØ„ºé,5èiõ¨ì³.–Â4xi±6ð­诳5»X¨›>jŸU€h‘Ö€Œ<¹H3Äú-‚rüM|– Z­õ…T‚]HÔVnž2)°:bB5U”¨2Lƒ%B(REÉ‘y¡h@–îRfÌ,?ø²ºlhad>š[Š -Ëgó1Í-´;–ÆòÇ’¹xËÌÅ?í¾?¿»»xgù ˜±ýÅíîúæþæv÷ãÅw—oÏ<Ë‚µî¯.vßßê·Øö7ßüðötNƒiÔõ~qû k6A èû7ßÝüpwÎF››?ìÞžßÞ}ºÔÆpC†gv½0i@¤WàѨ2ñ‹å¼F-uÔ82¦È4ÊLÞ4jL£Li§Q dõ£ÇuÔ‡ô°@ :ÌgÂY=I ¦"p Áj²‡¥eùF_ÊÓ·é7ëh±çð-?îyHô!úܵ’éj>èZ™GiGǀ̣Æ9ºŽZOÖiTÐÄí4Ê ›QŒãç °Ãþ\æAÙŒÊI’ÀÓ(ƒlFÖÓ¨‰Ú³"­Œ0ÔÚ³"Ü(ôÕ:“òÖ/ìÓòKÈ6 s^oMiNÒÔ0´’î`G|¤"¼Jæ5Zž/q”UioÈ—X—î iùÎôH¬Ì[¤åPü4ëv¨QKœWj÷­ •Ù‹Ûg@õ›'˜ù·ª´Í 7ÄZClaš§<ï­Ú‹lÛÏ=Œe Cö²Œ– ¥à¦eÎcmcÏ iÑZ‰Uœ•PmµÈêSÛ«›7¿ VÏšN ­üR7WÔqª‡8hyXÎuóÌbŠ6LŒ§Ø¨jµl›9Õqò/󣯻Ì[Ík!Y²Îû•d1ŽŽ£EmÎϯ /dìÒ2‘12Iܧ$8,yCFVþú›fBvšÆ2ÍcD[æÙ&?(©0JŸ¢'MÖ7I3¬A; • vk[¤µËARüpnïRh=2é„ÔÙ:VRéÐ#h7ýŒÉ oa=Æ<‰—34Ò¯Zeå•>cõX¬›uRúb0lÐß©¤|öRwË® ‹Å£øé‰OYçVLY\mÙ­í_ûÚå)qi#íʤC³Ó&îªc»¶>¶õ¨9úJOGbˆ]SÓàÇ9oõ]ðNéÙ›š¥H# a'»>Õ¥*)i Š 2)Ì®+.K´‘x¹$ëÆ1^W×¢köQbfg4—ÖæE‚Uë#Ók ^]lVàÍ 5ÿ£d #ævu!²d+)"Œ,º6ÝerW¯BD#t¸|†*?Ö.fÒÔ]Ž Œn0 œµú ©uQs2MWorg-3+Ðt^ö •É5áVF`’V½¨©©¦X$¡OZX[ Ò•,ŠÍ\¼$iÓ •v/Z$xe-¬‡ \Gdøã…E»A"‹ÍA¨ìLžX8fÐDªÚ~ Ò¢ Jw©%3ÕÄ6…dS¢d±t”nMqd²i kƒ´¼.du~ž2ö”NSG6*Gå:!±hî02Ò }³óDÈ?ÕO¹ÒÑ”ëU·©fØÄ³ÎÃ^~ŽYÖ¨ÆSG¨,Y+D‰¦I9in62©J®³ˆ.¦<Ö ÅøÌjÀ€hBgš!9©d€Qú]DÞƒIFMI3ƒ ÝY·(Bf;À¥ÅÉJ„éQº× ÉÙ4²͹l™…X«W­^!ÌÆñ¢ #{–¢f[•`lÕI*Ý6¢Ë‰—yçªD4\2—jCš“ƒûs휰—â¬ßÂIr“är:ÆezµJDdšÒGEJÛQšÞN“¦Œhz•#óRØ€IÆñ—.•„G’÷SËÈbG±¼•¶Ôð¬UµO­Ï,f]˜}ÕiŠÜÁJ– DîÁ`ÇVŒÂé$¾&âM§©’¼—Ô¥¶ËðΕ°¼ë59Ø¢!LÐCNê¢d‚…íp¨dýªušH=ARÑ ìЄ¨<Ó½ê/vsÕ!¹ž¦­&<®“‘Ê Mƒ®Žó€çj"¤ ȦUz>Û0Ï·"b“Æv’êÓ“jO¤Ó” N»c˜0#ZÕz!¥FÊ xZ¥ç J,lõ*Îqö”T¹ª1‹«F‡Í¸Ù«Û "÷{„ j¥»f(±g§I@ CššÓØšO’–lÅœ­|`”©¢’Ìn£ˆÃc‹ÜzU –KM˜IL,‰^›3‰Æ›¹×ëÒÏã„ ìhºVR b¿Ó†ž¬D´rG=éBâÑã㘲2—ØÚ‚ U Ó¸1qUÐSˆuñ*6–iTùª¹SD]ÍX]“Lµ­ÄJ‰9áÝ‹f^iÊ¥ÅÜ5ÓÇm4Ú]$¹cÞÌ–o^u‡7,µ´ë½éN±Ü^b^ÞbÉÜHRPŠ!›˜²ä¤¤jÈ{¬=”\Iš§¨RˆªOôYóÍɳÆ"*ÖFÃo†ILg­äyƒ‘äuv-:q«A Ö© 'Rk ¹d¼èOIZñH¬Ã»G¹‹ êd+Ôú.½y±=wM8¶¡‰ƒ&83¯€í¾rÒZ•ÙFPuŒOZ½ðQ  ’ Qãx*ˆWVYC"픪fõSˆšÅ'U’öu±S3«`ÛhV^zÞ¼ÆúÝNÄÄfîGE>´U{œÙ1v‡«¹éREÊ‘!Ʊc˺{T("¯×w5ÅÚžoaQ#å Ã3U#+ï%†gR>c·ŠºÜGÜ®]Ɖ­¥tœ¨zÖpçä“T´'ÏY¥ÇÇÑoçU‡È`mšc l2c­›IAnˆBó¼Mùà!š¹Ï‰ñY­ªfÚoÀnS=c‰Ý§ÎøÛtâd*Ä{úTNšŸý¤‰¥•¨b«EwiPÕl)É_¨BÓÒ[Ò³Ce„Á™¨ÓÒ ±(Û3¨ÒÝ5¹}ž)ß: aJÖ\"niíÈrÅ[YÉéÌUMÙ›%ó8RÖ¢d&=´="hyAÔF§Á R°´4¢ô=5»µ¥2.k&öEgIr•]¬¡Š4/Tˆúàá¢AL@ÈaÒµ#gñiD~mi5y+¦*¬ªÓ7ý‚mUŽñÛ¶Ê1z™Ý >N͘¹4{-ÄÇFiããl­Óý¾ÂÌUœç1Àfm`4íèC»,³i§rCÿÝ* ‰ï²)û²ü‚/íX»å“å†gfJ·[6Y$uƒvüºªQ†· ,I.Òuõ8º¶“©›ˆ£ÂÊΉ‰!-RXb!11$qåâøÉëšukB³Z>ûÔ¿(z–‹g §]Ѿ+á­º,‰U§/hÍlMãôny¼Ä›¨§e­Ò4l« UÛ#hGœõÀé¾DÌ õ¢ä±¦¸&!6èê] áôbsì"H•LÛRåËW$” ö ½òìçñåJE ªŒ¶Úe¾í%M½w™WQŠŸa? ž@6)Û+¼,Ó§Öi;Órβs97ËŒñ’f ЙÉe&RJÕ\µAH¶fX¿¡&n4~CbÞÅ`Zò–Î[K×dË|Þ¸W¬‡€¤h±ö"DÌ_†8 Z*K¯¯ #9cµ ,/zÔY¦åæG˜å>±cG§1ÝãJ¥Êô‡Ò™´ÙóXƒí¢f½©¡YÕŒ‡V›—cOT 5KveyU³´l`¨Y3—w ÇÝì…@,]òCËøb’™Î‘ /[%ëvS|á×Ò‚´*YóÃTßYE®ë]S‹øPöM|LÄèÈid´(™]Yu¬Ë7‹’-E˲U -ëÅ|ë¡e­XÈcJÅ WQZÆ P˜µÌ[!fÑ2¾vk³nbW^š÷Æ·µ™1´ýSŽóŒ!ûœg*ÄÞ‡Ê EcTãgjÒo‡¢å67 ,¬TÆÅÚ–æ_S´ÔŒ2‹¢å2Xi2Â2©åCŽøž´‘µY­TY8ÓhyQ4vSnÄšnzLM£Ø„YÓ$vŠMËæË?¿­zyT{*gí+#@^´o¼±mÕ¾%^´yk¿A—I¶­úk…«úµeæAÚ°°zè_sv†‡âxCßÊEW­Ë|p:°¸Ñ¾–Æ1hعµY¨"=ó¾ <:R䫽’Äã žhÂD‘o"þwcÈ–—| O_8ô7Ü4_²÷Îøñ`N6Pà°´IutRªG²ŽÁ±ÔqŒ×l–Ý}vV­ËŸý_]Œߘúé1pcs®Ã‹<Šßaê pø÷'p`–á>Í…ò1\Cþ÷’wì~t&h PdºèPbîè®Ø˜g7Ü œßƒpkO"¼PqCŸá¯#Ì‹¸ùS üÉ8Œ+=‰ðWgÿYÇ<°é¯YOÊð…_/ŽÊÇõß|öùd{:eÏg?eõµæo}U{ØØâw§úû1þ¡pÚçä æï—úîwðy?N×ößn&8¹¾½8bc¼'&û¯>Øb?¶OX;ÿÿÏ^3÷”½ÆQýKÚk^½ûEmOLQýãªOGi¹QýÓq”q®´Ú^‚2\¼ö‹Z» ¶áY(„÷§PŽíÆì„Ê/@˜–þí·CìuëE}´ ûÏŸ}®ÖŽ>£?eíþ4‡èç·÷[³¡N§šËßÿÓø\ör±Ë"Ä.?.–bóø&`{ûÐîiô•¶ñˆšêÖ7ïãÐxcÖþ Û²â?Ø–ñÒ-%Ù$éö„1ã+ÔÚüsŒÙ…M¬œº_<Æ¿8ºp;´¾zžîèÁ¯ŸDÒ×ð”½[|»êý3]¹U~ÿÉì]ÈåCí݆±_"[þªŒí-ø }v¼%W¶èÿÅ1B±¤BÏìÓQ(œpÅ<¦ðB/ƒˆl:I E‡NSÈõô S(|…N(Ö3)4Ù¾áŠN‘˜Oч=>u19þÉvLö7GÍ›ï}jO˜‘Ô²Ié(¶;ò,²¸õ¿^'¾cœÌù}7TÛÈß3b1#Þù©ˆçOH »º^bÙj®OY'iàl홎Ítt=ÆýfulâG%Ò>—ª€/e›ÛÖÞ«‘lŪ ôJN¸:·wÛÔÛÝpŽâÞ½îΞðNªœpOø ?Ÿîrk¬•“ ›!ÃË”ô¡FˆýLq¸O'‚-Ï®òîI Xs\`™óï÷?ˆC ÃjŸp²€D õù#q˜K߃û.¬<1¾ß@u|Ê®=J Œ{8qú±•¥<›Æ?ÔFoQ>+éS |ãg êê2†ð1ÇŸÞ?:žáùã>_À»?IŸO¦È-ùÓãKNm;v^D§Róét\Žø÷œ(•ÓlB'^oŸÁãEÎ\^ G­æ§3§R-=L¹¯_†>ý„³â]mþyòÉx•äy uœûàíužv¼€<í$yønÃSäyŸô¤ÙÝèë¿iúÔ?/}Z8áM<#n-òØç“˜<“§m±ëæúû5±{­ïÛSe/ý[®G¶$0ÃçÄÕÍÕÚ<«]Ç;P?ÃdQ™qyo»Çöß® |çËz}j/˜¦¼)ÿŸê:»zØl*Sæ‡Í×ç·<ÿúAC‰ÐŸfR\j#%»~ÿhƒök„lÜ\›>½mèHóë-päÕŠ—î åè`…>³aÅ–ów2“ç¬æù[áA¬¤“®‘•óò¹PšØ5É?f$T%رmМ >ßÜ’¯òÙŸn$Ñæe™µÇ©óA75yíÂþ§1V½Ÿ!QGZ¿—%–;uÒS~›o/k4×ëa<ßûÒä~ªØM›öÅó^6îó|_îï­›Ø üG:|ÀÀ¥M×Ìa é¹–Žp”¤ÏÂ4*±NT-½´¯Ù?­cî6?lÕûtg¦4[Êïܤ5Díy2Ê^äµÀì¹½?¢r£Êÿ=ý—釿;Øó®¾æ¥—H_7\¤Ý‰/RîÉ¢V&]1¼°•ÉX:~·àž4¹= ñÊ}&ûSî+<>êv8„ÿjÃŽíæîîm}(ÔùÝøü¸szeô혹€üúÔ߉:–K}s¬34£… A=¿úùþrc†Øˆ–D^ß,ÐQÇnè/FJe»Yp=èùñ,'¹Ë  ²sµµ"’óþüòjkž/d˜4x<ì3—åzxD-ÌL*­ø£cXüùͱÆlj†‡Ÿò¼Æì˱ÆlÝÖ3/@Sº¶ƒïÿn#([qÜœLä[î\íÉ6#‹1ËÀù†øvE|[#ã‹ _b‘ó5ÿà0ÎÇýù<ËLNñ¨ þ}]y0§íýõzL+&8е:bvek§{”ßßN+Úš^vQ@RÈDTýÙ–†Æ½ð’ˆ]ý×üà|¿½…ô ÕôøNu­mËíWW7Kýñ=<»OôÀq¾Þ4XÏ&ö°¼í¤nbµv¨ü¤òú»ÿ É?ª!º¸¿û£y)œ«ÿòzóÛW+aà/õ·¯þœËªßendstream endobj 386 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2282 >> stream xœViTW~ESÕ¥¶ ´%"XÝ.¬ bä¸à(B³EPQAÙ•¦Q@11æ — ‚# *‚`+naQ0 #" Ç3Ñ™Ä3Ƙ¸/·zΙ×`LÎüœ}λ¯«¾{¿ïnÅ s3Ä0Œe@|ʆøŒäØ7]JœéÊU²c${3i¬ ±Ë˜ÅJ¢a… +ÌÚÕXà +Øg #9ÃÌð_Tä¾(ÂeÒ¤ÉótiYë““2ÔŸxLõT¯ÎRøGퟞœ˜ªv¤‡ ñ)º4m|jFH²vufº:,&5]mòýûw ÿ!$ÎMÕ¥­OÏÈܼ:+6.>41)9,h2š‡–#_ä‡4È @„<Ñt4… +d4EC‘Šj‡Ì‘7ÊEWg&žyd6Î,Ö,_¦”ʪe¿˜{›W²ˆMf弸hî†ÜFî!?ÀÛò³ùOùM|!ß5Ƭ%XIzFÚNBqbÁ#{ŒZ–xq¤ä½–7®èuX d÷N²`½äÃRõÖ †U ¶Qžï” B¿š}reßµsgz;Ž­ Éôæ?rPuÞgùºXQ9óžœBd Ï€eêéE –A›ô@f9†øÍ ‚À f½>ìüŽx« +ôµhÆ9FøÍùliïë×zïª(P: Ÿ` æ …Ê–Óhˆú}½©çêB›t=˜‡Q¿€̼›y7±E¥\z!1 .ÈÎG¦&‡òÊ>ªn6úOpŽô÷óêyñ²±ç&…·¥TÇ€jªÒð‘fƒ\¹àþéÓ÷î/È/a¼|[a>ÞŽƒñâ˜^éñ–’lÍ3xfL}a1%)í”Ì„ÀMë%˜'c§kˆ,±8¥|ƒª|SYÞÅl˜1º>·$gòkâ6%è¾>%æo/Þ^MŸæv“a?†ÀL|_.m¨m¨=Ü‚[ñ¤ó¾ÄïÂ踒­{q%¬VßÜ}<}y¡h?d¼…·™ƒúV Þà@ÔÄAãÞKþÉM?óSí¡]{«Ä§ò-;¾,ÈÅ|B^q“ ?$hØ$½”?(ë@Ž7I'GI ðH®<¼*Fª­íá9Q÷+äDÑü­æª¿5ˆÊ¥Á&öØô:¯FAˆd`‰3G˜~"©LgXØo`$b§ê%O`­ÿJý„ÂXš>©¢LH?´­å¡Ufs¾%ŽÄñ³ùÄ*O;®èÛCéÅ|ÛѬĬ¼œœ<Õ¦­kröŽVgE,Æ®¼kWÐ³Ž¶#[h$X·këÍ ŸÓÔÍsœEÆöòbpÖ©^n|¾É Ã lCÂ<½÷•H¹}ÄãÝt¡,€ìGpùÂbW¥"\¥º"¥·Ú^jª¿÷¤5vî‘v¯)¡/ôÌ]p…S> )Åþ<ù´•1nn+Ï<¥<âJm<«raóªÖȾuñCÜWÕÚÔ|®ò ¾ÏKyƒäL3` ;DkâóïáW^¨ˆŠ*4gðg£¬ïøÃêëî>qª£±:-Z$ý7×O4ünKÖò7ÁW ÒD¥‡Åˆë.„W`º+æãØ+ÂøîÁ•¡Ši³šº*ÎF•1Iø ­^°zñ¦©˜ˆ˜X—N« ?ãß›ð÷áΚsí —ô0Øbµùaò¥˜a&Õ˜5?«7–%üY»÷Sì‚g雺9F›‹°®<ûÄ–cÛnã'øE/®>x¶æpæ¦6y&ÇyÒ¡ mèeîòÌ>ÑÄóî&è^1.ÓÓα”Á*“HrâIÇ qÇdJ±jóÏ95¬^2 Μê€} Ñþ{ªÙ0F: ç„þB:Ë ¹h0ÝgA¢ƒOØcpÇ0%ì µy'îY}œÐOBgLœ’ ;ŽP¸ŒC’a”¬=ÈÁˆ¡ l±BªÅp„þ Ù¨endstream endobj 387 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2679 >> stream xœUiTW®¢éªŠTʲU°»ÆADA¢HË"«ÊÙD¨q×ð\&N5(¸¢FeQ„(ˆ5 (vƒ€tƒ¢2Ñ1&·8¯=3ÕÌœù;çÔ©S¯ªÞ}ßý¾{¿K†I’c<ã“3ã3c£õ+KÞ„äM xSÂ)ÇSÅüôq'Bã O›ŽÝg MaÇxˆš@ˆHÒÑm‡EðŠU³f϶Zº>5;-1a]†ÜÖfž½<&[þǹ{|zbBŠ|¦ðŸ¼>UŸ’á—¨ŒÙ˜._’þßÿ_(‚ ÌRÜÖ‡¦¦¥glÌŠŽ‹_°.1()Yi3ÏÖÎÞa¾ÜqAøæD±$VAD0B¬&ÜwBA,#<ˆy„'áEx„áKø c‚%&1™$1…˜JL#ÌÆCaS=I‘äa²ÙÀÑ Õ Ä@#ÊÕš¦^1|'Îßw‰‡©9ÔRêõ;-¡sè_™Ì—ÌæÝGÒ ÆŒ…KF|*äWY^QRVX‹î"mlÝ܆å^ºR×1 Õg}%îjØ· Äàjj?ä`3xҵ讳›ÎeS¢X»E¹!+mSÊ®UÂOž¸ƒCE³ï»«#¹„E8Éð*ÞJE–h N#‚³| ‡-ḭ̀;VhÍaXô¿‡Åàíð[Éö»r/\ñd,I : 5‚KxËVòY ø`Æ<ÃÝã'œ .Ii>uâÀߊ¥íô—û¶åæ &a{^© pßuBõù7÷À¡žÅvŸ9™/Z€kh¶J¾Raï}í”§è,hûæ•ÿ”²î*T}¾ºqJmí9${2Ìå‹Äs(sÕ ÞJü)õ•\x°¦ÀJW$~I½å­ÓY‹GNämTd…Žöˆøïª¹_íE{“ò×ãçdpô¨Áœ«ßƸizêvåW«˜^êðeÔˆyr}}¨l#›½Õc³9{oÒV¿ ɑȃ±jñÿ­¥ît}£ôëàsõè:r ø0ƒg‚‡ÖïÌIËHLŽùkb¼ã/ÕÝ))8*ëÏ/8T|”ùN—‘’à-·÷ö¶ËYª}.¸ J™Ù`Cì†]ä`;;`RžÌ‘Úâ¸&Ú1ÖA0$õêçí•1ny#å¯*ä}ÕàZh|NµBóVó;‡•Ü|†ZW·²Xß^¦¶˜ÄÎXчI0û¹æ|s•ŒÝ¼ì1søhnàî"l,c/ãí¬ýž‚Ý{ú\:ÒZàÙ ö*’σ­êÚ}ksybŸsí,!æÌ9X„—à%f`ãzZ*pf,‹HðD!(êÌúʬ‹».æÖ2Z¹ÃCzó¬ÑkŽPà— …°&H­P>G„ò±ï¦®žB3b4…PÑ ßt“Ï¡H>pŸƒlê.ª*(¿ZyíäMôé Ux†×PR¼‰!|!t]¸³sH¸­ž1œÒõ  |UWUÖ”·`|F³C¢#‹½L0k‡ÇàEvEË«Bdå‘5iõèª*®laRi,[eî¾¶¸i§t>õwK•PH…~<{¥âZõñfã!œ7Í&ãt_N[³OÅìê%ö¡ÀÁ¤š'}²?WÐŒlï WÔ.öAÙ#Ø­Þl‹ÏíÜ\(=“—†Ö2£¦«õ¹óé’¨ŒåaR¸HAÚH¼yoÜÒ»@P|,ø tE=ûvXièhÖKñ ˜ÝM±Aü>lÂÁÎn]›ìú`2(|Ý7| É>ÑWŒÁêi(p©ø%ßkð/±œÂi8M ðíQÞ•)€ÎÌ[_´áZðÏÉ]hó7¿‚%Èç¾Æ¦¡±[•1²bíQ E á‚À¯ L¾‡ÿŽË1Ÿµc¹ E|´w”û&;„%¸ÞX^p?Ypf0z ŸÀÔy/1«Ú/Û ¿¼E÷ÑØc F¬Îܳ:ÿÙ6þ^ |Û^ 4Ý×è½g´÷~R)Ч‘bϳvÁ»—a‹¼…yI•CV?¬{HdFá•?†µqé€÷ä 7 ;Ä›ð¹ÇU§KP+”}'6Æíañ²ÓëEJÙw cbœ§á)CÖDù?^À”®ØF§ëRv3PÀuÜŠZêµdqxU{û­ªûß5Ô6:Ú¬°qðoìík¼Û?êí!ð£"ˠ쵨œ¸<æ‰Ô‰ÚË*š+*Ô" 0ÊVEÿ×è{ÉE°fË™XާÌIkÎÜ©’åbÏ•3‘/ZsmK;“ø\÷ýesmý}íç‡4½~ÞØÒ-3Ê8͇ùIǨ§cǪŽד7îc‚ø7޽endstream endobj 388 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 3244 >> stream xœ…W X“×>òï/7%#¡iWzQi‹ŒÚÖz©ÎbU Eà­´*r‡@ B$„$„| wä"7A´ÖµjÕ–§mp]oíºÉ\«íºvÕ®ëv~{ºg;‰ˆB×燇çäüßy¿÷}¿ï| /$æ¼'Ï•ÇË’r \°15}·,Iáþd/ð÷zñ÷y+H1ÿÕX!ø{ƒ¿Ïé{g[Dx,kfá—g#o`‹¬d•<¯D‘™žQº02ò‰ èß%¡»JB£"Bc“’³åÊ‚ìÌÐ¤Ü”ÐØˆ¸ˆÐur%]Ì “ç†îJÍH’¥…ÊÓBR·†nŠ_½1>tíÆõ›6Ä?ñ_±M[D=+ÏSîN*INIMÛ˜)ˉ|üɧ-}fùаGÃ_^Ðz´ý Å£´ mFQèEôZÖ µè ‹âÐ:„DHŒæ `$E÷¡Ÿ£ûÑ]È=ƒÐ ˆf¡)[Èu¡ë‚¯¼Nx¯ö>ç“&|AxYÅúY;—í`ÿqטï=¾•~wûuø‡ùçú  H è ø.ð—êÀw¿›%Áù}àÄãΧèŸ.œà æúù¦±uP*`w0‡qYë7;Ž>!Ѱ5sw1˹†¬æædi£d†œ^hö5f ˆŸ`š ¬Sáðj!×½åÒ¡“ÒÓàW¬O%JIÊÔkû=¯-˜|¥èôNü;§€,wâ ׈¸AX *"Üïñ¸ï¯:±ˆ]['.‹X¹òYx^ü°ù‹æÏ?zӱe‘î`DáÄ¿pâÜéžu)\]:×w4çÙˆïô0+êë KRd,÷0ê·BSêSåLhá}?™-ËéР•f,åÆ5;˜¼k8éˆvB°°ðËṈkæpQè1ÕÒØÕÒå ¼b?ÞxAÂiÞ©ÿôéEx=ªìIöiðÓ.¬þS0ÆãýÄæë‹a+D—ļ`–p®AkU}¦”x1en<9Y7´{É#ÉÒ鋸‰iž”$Ê€%Þ ¥UÈ…ý¥åïp€ÔX°WÙû «Åºÿ6ýXäÄeNo¾‰[ÿN‚:eÖ˜FI®%¦±î°¥wtðKø#8rLùÕ¥¦ÒAK§¹£Ñ%©m±ôä@èÕFÝ®ÎP d¤/ѧ«~ñméUõñÊ÷h7ëÓvfEÂK°©¡üc¶º­º!2 ªÈ¨T=*1ªªà€–š¶Ñ£é§«û/„×ÿh?> íJM¶kƒ+ÁõU±†ÏÂ"±¹ÍÒ™Y`(2–jâ6T pœ²ô°ÜÎW°VˆSz<¸=Ém ‹Žmš0·›÷eBHéÙtY½Døä”À=ÐL·^½Âì"FÕcãé ä"\9Ú|õ&·N"¼Gtáˆ`.o[òA ìvæ-¶Ö¡×p—äænöG»w‡ê±÷ èÙGàhý‘>–‹Î®ê„6é¦ÅÒà˜sN·“ïtƒPP•“@kÕ™b™¤á\òeò0섹PeÈiWuÓÄúu£µöššŽÚ¦š½”íà˜¬ü9²ÚH$ÛT±œfæTìߤ£p¦îÔ Ë­”µ7’öQ—Ð#Iø·bcaUQR ÝXš²*¥q1l;ì8ñçQ<×â)»bç:,Úáä¢+xI ¿‹èïú‰uµ@ÿ ÇÛhnàÃ?,ž†Ü5ݹ} AnºXTœ¦-)—±&¦ºÝÐYÕÎrAh3Ö {în4]Û D£p>HEMh -B9*¥”³žd&È s…Éòt©"5!­ºÌPç¨ÝK³D=–²6…”{-4”%Uùº¤ýÉ'à:¼%¶¬â% ‡fß<ðŽR¸¼ùb,[ûrAOQTTk”•{ò^zÁú³ÍŸÛÃ?ÃRè„Ã`Ò2ZÕÝÐ ]­Ö~¶ñÂC°ÈJXBî-{BIü7‹ßo|mn÷':Eç]w]¡¦/›æµ?ñ¶ŸòZY(ïOmÙ !D@[h8mžT12»èâ£ÿŽÁ±ýCý,•®¯V¿rÛP„ÃO‹¹‘xí†Å+¤ÏAâ¹æ«-_]zï,üÇknb‚w?zßM-—ÇM%^ZcEqÎÖx­VBÌÕŸ°ø$Ó í`mÇLâ×d? d~™År£$PyyK±Æ?£Õú~PLÚ™Þ ¼uNÁۊʇ§:"¢Å¶ÙUêZóé öý4˜ÛÅïçß׿»Œ¾ ½¬~R»Y2FùFÃpVwOœG•ïqV¯ûÜ=½'Îc¸èSx\ˆg´Š(ÈpsíÇö~ü> @ˆ©Êœ2ÐÓbí›Ä–å~DuíuaÆí²ºR©DW5üÈbëPh  AŸcªÔ¦•‚ì¯;N³Aõgj톮ÊþâÎò†ó£ýκ#MŸÄM_Z«™›æL¢‡YòƒZ¨˜¾ˆý¦U0q–ðc¦¢Ü/o/ªÚ­K¢Ý ib 9MÙ,÷¥&§±G5,=#¿­;ÄÎÇߊ+_LøU$°«±¥¯dŸœr‘¿»Jn·íîƒ>èÝw‹Q^MKJKKJ60ÕæÜas²pÆ%<ÇCÏXž`nê­[õÌös?ÁÜ ¾Í¾Òá6HÌÔ¡ !vîÉHÑ+Ú² ²ÕòÝ,wiÀ¬„iöTªphš’t(º=Á\™2”;ä^+áN\m>q숴 J5±)D-)¾säùðG/4_¥Ûáÿlwg£qòv: íÃo‹ñºe8˜xy&¤Yäž+#ëþ@‚Ý’àYøžËF7ˆ$lñ]Ëa;öíÁaÝxÞ'×.Â8ýH™?I} *EøyPÁÇO)É|Ê·š=—{H1T–ëÊ·,Ú¤QVÓáPe*ª6È«­†fZê¯ÚNz’È~µLW9ùûèA—ÇRÇ^3S=Òøœxë´åÌ;ø;Û½Ê-ghS©’¼B¾—hö˜ªJ}–]7è®nl¥å]B0nÆiÑÓ«#ŽábÛ0¢õ1 ‚,ƒ‡ñß„Bñ ?E9]K}Wƒ7Š[¿Y÷5-Æ Ø®Û¾{o”`Ýôd“Z­!{i—Kª ‡  ¬VGý¡_·½ p J«^¶týÓÔèqöâ·lVG¹ÃÚ¯l)¶ ²µúõâó«Ý"f>kÇÞ7µÅ^ãØg/¡hÖP4€7‰©7£®R‘·@Bé–]‡ðM\ahÒX zƒ^OÏ/hVöR|ôR°ŸÈ{óQŠð $ “lO&×£®{õ¹Üýé,ª~ê—WA,䜨pê»mݵMõã®7>‚Kðjôžg="á ƒƒüÞG\7šß› ©#3Èti½ùCpém±•¼óÓÒ(¼òs±¹]c¯¨#õ8F‚Ïb:@6ÛÍí!–®ÒFhÁhRiÉ¿I²¤B_Vi,®ÞcÓ8tø$I•ìÕÖšè –.w,¼åìÄY‡EbÛHšûö#×5¤>CŸq©îß3pl#l> §›² œnúž)§Ã¡>Ã1êi ×èÐ> Ö¾üޏv¯¾A[GNâTI¹ÃX£¥ú•W‹*ëu¸žÄHÈYS®sã ©*l-o¡#“¥¦ÙÁ#œ,©·5Ñ|< ©^+7‰k›õU#ñº\€CÚ±Oëu'¾?òy[ƒÝRÛ\c7uäC.""¡ã’ø >¾Û‘¨÷â»íGGÝëSk>×ýdŸå½±ëBìXmJSeu™Cg5âyDPNžÒÄ–Î_Oî'F–Vh tœ©¬¥ã<Û¸KŽb‡;Øb燨r7O5Ôp{ÝxHˆÃg¸:fÒÕ„ý×óƒÌR1L ‹¹³v©%ð·=âzgöá òC¶.µGAY‡º{m~Ü6Uy•ÆXl0*«ËK[aˆ~S{³ {Ý‚'‰1{ãù[ =fÆ–p†xýë!!9}=†9‡uÈÃO7µ‚[<ÛH†GåsTå F> stream xœÕ][Çuv·µ^ò`#€ c<¸'ÚítÝ«løAä(Ž¥ÀñyX:ÈK‘cÍìXÓC‘̯Ï9§ªº«ª«ç²KA1ôÀ™ÙºŸÛw.UúvѵlÑááß—Û«nñúêÛ+F¿.Â?/·‹º½úÇÿÐ~i]çØâöë+ß…-8w-—|a”iP‹ÛíÕ]óÛÝÛe×vŠIÃ]³ /ÊIk›Õfy#oÍîkü,Z'Yój# ë\sXÖýaýryƒ¿YfuÒÏeMaˆ¤ß’Û¶ë˜hÞ-¹iVûWɼý-lB²tÜñÖ( ¹½¿jÔòöOW7šÜÕZ­ñ×»æð†¦–ÃÜqD#µhzøÆ o;ÃhnšÀ›]øC'›-}†ýJÕÜ'‹M×ÖlzlÄ`w¬y½þ‡q~hÛ;}šoß&k‚öJâN)öD3Ë ãËæÏ~=š5»µo€DJ¤GºËΰÉIÒúkèfarXÆjN*ô|›|yý& kLaýi6ü¾²zçš­~ì7ÏÆÍzXïüA:Á›°iÕ©æàÐ\kà—=œ£‚I„†½ôíòFr $4Íí¢«²I7Õ/‘§˜h•t|qû»«Û8Æ-¸ZäöNkÏšôE5¢Ã]±“õ®½@?˜ÿlÎòì|£>;®ýú…X°¢Ý}A "œ¢—]ØYF÷uÕåì²Yõýúg‰*È™‰ˆ4ÓúÃÕ­pÖÆÃýÔÂn‰ëOgµÙ|XZœOéÙÑt¤\JBp¥ÓE“N²:¼-؉ZiÒL™A±|wÃ@,?™ý~·]œÉw¶Û"e5Œ%@*æhð@<&´—UŽ(zÕÇ D¡öŠq|+)›a^+ó©ªíQ2^?Ì‹þ}B9:fÞ¼8¶¢$‚P­2‰‰Ç§DåXÑç{;Íq=KÖvŒ]{úÑJ8œuºÆBÌ—¡QzÊëîd.¡Ò»Öý@K"u®v ±ôË[Ý'¼–é¡*sÊÑ!‘$JÞ¬²^``Ìí æx¦g6‡uªõ6ëRpüL»qŠ {&r¹]}CÜÆ´.õÏdd7v»@sÏHù õ€»p~nÞ"Q[ƃE â´Ï”öÄ"UÇ‘†°N§9…z—t°ÿ<0RP)ûŒÐ©¬¥vwƒÝÑð:Ú }v¹*F“æëÌn÷oB/QÈäûp"BËãŽJ€:ðvöÂ@XÈ0 Ó ¶$SÎÙb–é I×é<&ºk¶¯ovKwöÖ/k8J²Ör#bŸôÀö»mXYW‘yü]«ŒÛÓάýæCØŠÿþ:ÎaRU±:Ä9àçT„V™Šª ÄìD`Ö*£7ƒ¡“cAýpïþÕf‚ Ê3¬5–Ç# è¸àAøX*› @YîR}Q uPómv´9ˆóã0Žl=,sÝûŽ«dé«Ík°ÌYç­o•ÃIÿ[†â"Œ19ÁS¦È¤¼¶‡# V]¤>|Lè¾° ½r¹ ´ôü€ZIz[ç'L™{j'2’ŽÌ=nÑšþË<b®„ ä9Ñì Z}”üZ•‚Q5Ó,`` ¦mX†ö0*y,h·$Ÿ˜¿rS}XlúÕv\]ÄI€WˆŠ`¡ j l€{¾Ô3Ó„M N©sUæÀ¿ó‹E¬`OP(ß­çtÍXEW7² 'ü~µÍè¼FÕ ¨h¥¶ù†¼úig” ?¨ÉÂ̱Y¶Í’›ÈìD>åKy^úRÊ:iÉ2÷ÝD§J"ðÎ5uæTE“o"Šz~@@»’Q’¡Â‰”ô l#UÑü°ÎITE·K+<ÒC<¢ˆÒ—˜ðå ñƒž^û”^‡Pò_MQ£y‡º”¾9%#ÀœZÅ`¢Šó€ohëC °Ý' Ì0Ça¢ÌiEa€ …{¨rþÔÁa¿Â·¾?ìWáO°¬ÜNŸ°(cÁÓÙÓÃ1I<¡ À”¶L&wY¬DÈŽ°E¨ð³²3ÃÈ&æ¬Un>…p0P®£ßëƒe ´&ñƒB®¸îž¶’¦iûœZù±„… ž7Ÿ§>Ûíu8{¾ Y!ú_'}r ÷ã¹x§iìD³Ó2ÁÕ=øås­˜acÓcÆc4¶æ3#shUCÀÁz¥˜÷ÐÿØ cDƼ&¶ ^6K_á†I 'D(Ý3) ‡‘ÿš¸ }ËV›ut ]óÁ«.‘ Æ*òŸ²À%Žêzy€8[§ÍÙfÚg”¾ÍèÜ<»~vý’›vÎJT ]Ãïg+Z¸ªœ3*i‚RÖQˆ„@øæMÓ`Ún°fØ7 {ØDÎÆñ4²¥†>GJ?är(Œ¡e]@ŸRŒË ")atŽ×|+$å¨õvûtÿ¤­:A- KóŸá…/v›Ã/âߘ•<ë®ö¡›ÓçCyÚi03؃5Ç9—iÈK¡ýÊRA »+ƒFZdì¾Þ®7«ý&ˆœÔõ ©UÔÓž‡%7 »û/Ï›/¯¿ Nx™` ´–äwÚVÃø‹ÞJ昤0þ=†ñKïKµ ÙY$ ïšë/I,:EË­9ý‡ Õ ì<•P Òg4øQ’þ9[pèžÔS°'˜gdI6™öSb¡(œÑ€“,6]à*U»eä—6 ;ÚÙ‰êÃîæ „ó¤Óƒ¨wèàþy#…<ß(Ùr¥XáG5ML‚+D+Ý…§4Ar$Z1s¹ÆÖF¯½Ü–ª]8ò°jÀ/dòˆ.BÀ¸ÈC60mÙ4UãXšcš\û&Å0y›ùa¬þ(ÃÄ&w7NeÎfÑÿ&%^:0ŸˆÑmdr]¸õšˆ8µÞ—^‘ÃMãÿ41ÆdöSÙŽ™¤°$'²`±2פóòØÁ»5®½cÏ$Ï&!§ð“â¨nêzVò¼ Ñæ1)%iw}ôªJU’‡b~6G®ôlÇktŽÿ=ÞIØJJz"ø·ŠL+)U*ìH\pÛ[oM­OàõáTd‘FÍή¶ÿ—4º†Üy{»8´b]Ä!ü@”›>Ë5¥;øZÁÒ¸é ‡7g¬Ò/ë¨P÷ѰÞÙ.üYLð?ä¡‚]\²0ÞûÁ!@VÒZ֣̠ùƒp úý !ßΑ*’‡±0$XWY^¨*Ž]_píq|ŠO™i¶ƒ>@’”às‘94š }E°Jcrpy@ä|ò~µwÌh *SÜ­LÎÂ×E¦”RÂN“_–ë!ÔcAg‹æZ–% ã c¡oò¤St7Þm2ÍšÄë2p<û$?$ T™RwäXeAÐþöÚ‚£æsDcCÅæõLn¿c3Åöô;²`L 0–Kð‚Zf‡lËO±»Ô ³±µYÀñ˜ŽëV`4„»œ9‚Àá÷F˜©¤%¥Ø, 6ø³Ø¤ÌÝB“7Ñ”³¤‰l­-ùŸj£ðV(æTh2¬%+²0Œëô"]õ¯CKÀñéiˆÖ2’$€ƒj£Áº™´q݉B£ìX%Ö$±ˆˆ>ÔV®ZL+À·ÆhØÂÑÃŒ™›‹Î2§t×€¹‹tºx¤Zçç%´àñØß×&4À4V\¼8óüé#ÚÐ\-Ò–¸*¨Œ |¤À‹¨°#?çÝ|Lvš<+v#Y+Àå:µ$­í&_¢E ¢% ï&j“ªeÀ9[eˆ®zݶÑyáËBÖTÞX2TÕk¥ÿ"¸h¹°ê{™Ú«GA7pf›> H‡œ(|ô*†TçuO×µVGÆþŸe$¬VwÎSÆ06Od3žAÚðº*›¦µ¬I˜öE}Zmy'ΘֶŒƒgŸN{w444Yç-“jàú~ŒÅÖT°jçõqBi_Óu„¶ãÍ etª ¬åæMU¶á£5|È»ÿš‚ ‡X5 \Ø9=X› ÀbÏ.OÅ÷á.Ö¨A›™ ­Wåè ] T%M”¶ü›*3(‹ä`䑃÷Y8oŸi±2ßãEßÍ&RÇ ´†àM]IÆóîùvô«è`àÏ„;ÏKo¡I~ Z¤×pý«0O@Ä~eeDtUZ„ÎQ†øÝzC}(I‘Gh) Ÿçs¿áD0-2­YŽcR¨ÑMí¶‘GŠh™Î^XÈ@òã蛎ILÞô4KüMï¶oRÂa·Ïo¯~åkÊÕb?_Kž A¬%g°`ÁÍ#/Ú¬%o8JMöòñ4Õi$㬳ÁŒ? -Ož”W‡ºF–+£øùÊHCÊl;WÙÌáKԃDZºTû¼:âB.ŽauƒöÐi²h²+Â)âÀÿžŽ“p;èÞÓÇq†ïrú<å»Ôxv˜â  Ó|±Š™1e,ˆ^WF‘ ›G ÍIן(ƒ@®eç&ú±ÃóÈèÞcß¡®¥¿é¡âÀÝG‰òÓÈêx>/ÇÉàÙ¡ç^X"_óBµÀ—UÈÖCû4”—ÄöŸ/¯)N: Ýä|ù”Ð ®-ë4t 6­”¡TêìRR+²¤ºMæS寧åã”eeÞQs†‡NÞTFrÔJ‚Ç ¸o¥›~³~íïP( Áaó¡V¦QT9Í„KZíb¡éŒ£íÀßÔ]Ô§?©gFQ„І×3†R‰·þnÖ]aÝ_ e#wfµÅ±À)CÒ‡=•áùûêúñ1xY…‘YcXq%eZêåKŦ̂S¸™ì_Yú‘Üû8ÌWzU\—䡱ƒ`Ù…†§ ¦E aQ <ÙýYeùšÙtz¬¯CÇy"y>þåË1iùo³qÁÿM…a]†² i¤Õ-‡ŸÆ…ØÓL:èÐ –UyöRŒàF¶Ðeã]°F:8ÕWKPØ™ ã¼\=êóN‚sõTGÖ‘zµß/%"w42øÅ ´‡ô38'÷¯6Xä¿6Ÿßþá9 H¼Ê„QÍuò¥†Þ˜AílÆ¡+šTˤ¡TG½ØPæWÉdÏ—UoÔˆiÇ}C[®|Ïßå\u=è§Eô$Ç{\%`•-þIMê~þeêô8þ{Ô"†xñ±y/Ž`åˆ1Ç?ªAÓ<ÎËfV/°j9[ýgõÕ“>llºøÁ‡˜]ûˆ™YÍäQ†ÔÁ´ós x•%1nC—xËcþ&ËMŸ £r‚rU”|7`Jk2=ziZÃÕc6?6p2µ)mÕ÷©aÞN+ú%Å'ò(Î!œ"|~7xñŸM’ݪ‡o0«fêã)Y_¡`@i2áAxÐJ”>4¾ÊzE ÓxžK++Ûó{¬CqÚÖg›~7Å©IkjBa¬6PíïêTë¬6üœÄˆiÁÔd©„9-ÃQ¤Ï×nå´9æ×.Xü¬Õ@<­<ñ¹ÌfÙô†Ç„zÉo,´0P‘ãNÃÔ¿\Žûžˆö—ÌqwÍÁź/fûªÎ&oY§ë;)Ù·R¸‚ýÄ1ö“tq9á¿ÓžÆ÷ÂGh8Ç÷?8²!ŒøImIüQ€!.£v]U¶FÐé¦M•.Å*œy©U‚ƒ6±¯+ö;ƒ6SûÌS¾öšy:cíÿ?”E$y†²±ÐtÚ@bê.Žm5q+§Ép* d0ùN.U{Ÿ ÷² ޳´uù'åçbK^Ö®$:±å/«5XêIò¼‘^ÛÞHÌghá$«e¦dTÓ¯óÂàušëøÚu•+¦ø»øtÒ!|c¹¿ð>½ô{N}ïkp#¯ýEL}¿YUPÊ‹â3Î’lå5ý¤Ê2ð‘±è0ÏÚ;V]Í+‚nü²”WÊ"ÆÀ*IKï¯uŒ$å•:ÕÒŸ'9\ƒ¥V²¸òøíÛÌçÈs{}8f*Ñ÷yX¼dsŠß Õ ŒR­‹7Oë¬$›É­/ëÜ|Ôø3=Ðâ(iþÕn„åØ#ÕGs¡»I¡‚öLø&,)Û‡Í(y30Í!T‘$u&X8ñ Jáf*Z Øâø‡à3ó—ÑAN“¨ù‹äo‰Tk”9™Í<¿É*fáí°Š_-o1^ƒ­…"\«è‚ïÛL/–Ä–C ¢¨Åã’yìfI[ÓÕirÄn¡åÝì[‰§qp¨¨™ÅЃúã0NE!ŠâÙúéaq. ðt´çËŸ|Œh F:²|Ê:‰X&“á‚ñ 'gïœÜàsÑËbÉk²âxÞ·<+$+g{8ÞÜÆ+»b¸1qjP;Fþçs“„¿ÿ1ÖäZK9ÀSº™-ýð!Ë1Ü5ë[ㄧÔ°Ë4pühÔ ÔO‡`YŽ51Å)Òƒ‚˜—½á ygNœf¼6¦…8S%Ç–Ñ/ånáðÙ =„,ÈŸ6å&?ŒÒÇ¥e¿#˜J½.±fèsÝÄF)&‰Zò¬ñÊm€*\àk±xéï«Lk€NÖ!M“ Ÿ6€åi?†wqŒsãNq¾Ïª›õa§*É&W0K’ý[¦áòêGYÞkÓ+ðoª ™¤ ãN¬x4²±e5H qÑÑ1aKo-i—ËØ·lok¯V |¼Gñ³£ÂÂmþñ"_ež^–ÏÊ´7B£N<þM†±$§E]ÑÒâ~¤(k L‰ßL­¢"Ƕ缞€Ë’:”ÌzÝwnèj ƒ ”8q€êðL×Zñq´v% ê#gÚ?.õŸYEÓj &úŠYµRVmMÑQx[C}”ç:ÊmÓEðsN=IÑaéêj¿Î•áϬT¦R3rŸÖí*éÔÁ©Þ)ȂӃƒ‹^²·#Â@pÌÉ@¹ Å11r€•AT#U×Nabñ$À›-ž+«Ø¦Ià¿­Ú¥¨ü´_ýë?Wm=ào9âïç3À@¥Óv(ˆKœÙO‡=«> stream xœí<Ér#Ç•s¦}vL„†}˜‚E”s_¤Ð¡gìÛá%lñF9bÐM6»l€H­ž¯Ÿ÷^f2³ª°t˜  ²2_¾|û’__³–_3ü‹ÿ߬®ØõãÕ×Wœ~½ŽÿÞ¬®ÿýöê×S~i=óüúöíUx…_;~mµm½Ô×·««ÆÌnÿc-Ærßj'aüíýÕ]óvÆZ¦•Þ7ëÍÛÙ¿;îŒkÛÝìï·À)|>…´­ÐÖ¥)>Í•R-c²y5¶eܘæšWZÎ|³[¯»¦–RÂÍm·zˆ_¬h¾ÈF>äËoºâë6¾ÂUóŸ3'Z ·ÔÐwOÙÜq/ܸkÎ[¯µÀÍ̹6­|Î¥n1aO¢µQ®ùf¹ë¾ZøÊÚV5°—o~ C¸l6zä”xàw'›Õ:üìdsÿ°$$áWÛüöö‹/Ó\ZÝܼ `2¯U„‘Ûk8GË‘yÞJí¯ç¼µ^J±¹‡[íFµNkg®³‘wÍÍŸ³Å¾œE<”$4çÞ´‚ëë¹`­2Þ‡W_!Ò5áö3vÀ=,>qfË4Þ6ï»Ý;ü¦Z/àí¾ëEAˆÙükÐK˜ð<é‰VÁ‰™2‘lç´&¯à).oxó`·ô)›hr\·ªEùà>Îd±íVÝr±Y~H´xƒÏ °‡ÕlÅMó!þÈuóq¶Ú8+€ÁL³ KaŒ°Ín‚j¹,XP»VÈÈÅÍ/"-T“;cR‘ÆâÈ]Y0´iaqx9y×|(s éºù²‡‚k ³(`«£@Hå˜B“á çr šIs–ËÂG¡ã²?[Ö´ÚÊ©e˽ËVGŽÙ[WµB o㟮ÌEËÈ᱕KŽ[øΕÄù` ¢¼DZö“0;eŒµ—ڋ݉œçÇHÄò,~˜˜Ár($¼k:É$c¼é!þ®d“t`éeài€ÀF¦a¦ç²0 yº4Àl·ï¦ø4gd?Ž"FG~^/—kZÐq"©d­½møQÑÇÈ×é™h¹ˆÊ?oÖ‹74µ‚/ïâéh@Åz?ñ.‡}±KGh#Œaë~5“ ¥üs1³f˜“Ci¸½!z˜+ׂúŠT|EGöž$S”¢(ʘR…dzÊŽ1[~œýàLƒ¹u4ç)6Ñ™öê (ÒGd#ƒ(²¨ÄAQ¤ˆÔþßcP(Àƒa~ Á³‘h¥X—X§Bº”0gÂG÷b[®-“ù^šO&%@&ûùëQÁÂ[îĉÀ÷ˆx6쪵R”°ß\ü†Ïœ7Òð'ãGD*ìQÕdÝÄIL‹ç4rB<Ï9˜.F 8ƒJ'“‚¶§Ò#JçgÑc²‘ÿŸ|zìÝ•Ÿ€fˆsÔÜ-tyê´­µ&R§%»E­jý><:G”·š›ãGzI‚DKáA°äN%ÈÄN?À…%~ c€1§l8~A¶|9eÿx\yaËCÖsùãæè€îeÍx5Ò>3\çœg0P£‘¢ÈB‹kÁèôõcµ8ѯ©,çÅ…éœi»Ùœü@ (—_d…õç>µH|þ÷0%òY`aÌwc[:tž™ñ-=ƒîO!nT‰—LžëIyëƒõû\Çæ€O#{ .‘8÷®UÜ#}€vqðqd¢~}¤‘I„ íÁĦ—à@Àõ™åî}%(¬GýÁZ R^Bm¦(nÅAÚš§AäJhÉΘ­Þ„B#@ËÚ/G±m ¦aÕl¿ÓÈvjÙ߳ĎËݱ݂óÏ34÷­Rîåç `ÅôËÇÖ ipGª ÌZ… (ó'82’ÖQ¹ÿÛjÄ(Á”s—#O”ÇõLmà}:ªgȱÇ(2kµÑÝ~õ¸è( ϤDïÛRL!…¿•hÛmÝGÆÕiax©\œ, ár÷;ü×±ý’0ÒiÙ?ÿþ7£dŒ±d­yo’‹ ߊüEý³˜ÈÉqÍGýžuiä(8_Ùz |OS‹iëƒqùÐ$ÜËýPl'%ÛRŽSȃ£I=ýå-êm ›>Ö F§lÖßdGµÙᬠ¤ZJ\'Šá3auÿþå!ªuŽYvOeŒ- uo»[üD¡mv‹Ýý†cšm·ZQòlTá—èfÈ}i•!ñ a›e¿×¼Ï#[]„ o1†óÁ»ýÆrú.ˆ}±\~C Ö9 ¶Ýý¬€v*ßÔ±ùt4EÜìçÅÜeú!,Ž;ü_Äêž&âpˤÎ>JûÇ«Û_Ý5Èï€è’û3"Ùî:LƒågøˆëbɤõRà-Náêü\NWe²¤Ïvû:×nÛÆxó³¸h—âé±R#©1ç)!„³\Æq¬Ê¡®W¹à(rºiC–—ñöÃÙ-P”b‚²JõÐt 6–Ý?CÚ§Üm—¿‘ðcÓGp¦8eèpcDrŽ–e ¿!…HËž”gŸ¾ «àŒw¯o€be$tDóÖÄŸÙˆVãÏUQ¤‰¹,úŒ-ÁaRÆV ^O%®L+:aúf«ìÃGëÍ®ßì®’¦a^ÅãåˆKôX·)˜ú>6ÌœDèäD ºc$´h‘á céåtÜ=Kj'QÅË6å׋Ä<˜…œ²êAKˆV‚€Ö,0ìQ@ &É‚Ó Ñ|µxCOš‰á ‹÷DYø@Çt’Îy@ N…Ô·Œclì»'ÀÄð@7eýôð4£yAíºÅŒ#/ù&½‹"Œd¶’*J&mA°ïÞuOi= ¸‡Ý»ø˜ƒ‘p¿­+J« íCô¤"Vnß`50 l”P @ô% Ó”®A FYKßl²‡$œMsØ¢ñ";âåˆxx¡’{aä5º@Ù&¬dñ³«fùî«°””Öz3Oq"…J {8˜eÜ7k‚ b~]Ð~ä ˜QQ‹m:<^<ht®ÎyÑlÉdðdŽÔ9×€n0+ ¦œ„z &ÛH§Ó=}€x e9{éê$Ù'Íz͔ތ"oßá!‘k(ÃÙk]|¢ÉÄÁß…*ß-¥|dö»Í•lÖ£îv BÚƒ{ʪ°7E©´(u2§dZií³c×G=ÛÂõz=šñ­Ž>öpéò†4ðfÂoçCŽî_ ü…ƒ@·œ‹ÉÜá`‹ájJXp¬·+óßC.ãPº±r¾͵œ)Ù‡&YÖèÉ[ ãœÓ)¯}Þbÿl2 Ím«­IsoFçÖ­è Û d_„vŒ\ °Q"Åx åEIÀƒ5i\©–{]' Ø8Êaáâl\ŸƒWÂFÚ²Td™@§&ãwœŒï£§o§fR—¥†)'ü yœ`ãöÂ,rÝ~ŽÐ8!O¾‡þìÆG|m¼ÎSd°ýË,ù]ŠñNøc½O³Ì$€s™QÊÛÑ­j̰£â ¯bȲIs/|E xX7 N Hä¿.ÒGÝü×"éo·1†u¤­¢ßh5ÐkJ–a“n›žXQÕŽÆÈŒMí‹î¿l·qE—Êéé›V”&*Ãyá‰kþgr6\“Fy70Ò¼!ÀJÞá¦+@¹Modæ$»>ç<+J̈<…S§eßÌ÷ï0¡¢œž ÑÄZ‡%îGŠÆ•˜ µÚr±ý¬ãvÐ\#h®óäù´5$=?A´ñç;¢?LeÞ¡[’lw À4„¤ñ”ý<¹ßÇs‘¢—Ñ–_„ –ôr‰ú…•fFJ¨Ã3L{QÌí„âr²•Æ\²;jê’u1Unú’gëøP•SÃ!jaH@ñѾ"¡1žŽ‹ô­¥<õ3˜«sK£XV”ÛK[:îÉÁãõ)¶šh9úʧ¶›³…¬êÛ§ªýxË{¢z¡0ì9•ÿ˜;ÄÁu¬•èg—Í1Ÿ ‰#/ÑZ*cØ}·HÌ™`ðõ$†U€:c/Á°çØ™ƒÒ‹ïÙøî[¹ˆc±>Å+ ò©š¤½)vdNáuïíXø¨Ð”ç69)`Xtjæd  ÔQà'0¢7Ö=oÓΚÃ6—“QóüγTIÒÕ(€”²—Hç—ü ÁHfsÝwT3e&`•Te±ÇÍDkÛñ(ÀÉÍ‹Þ8Y¡ñóDðæP4Ë‹u/âdISé^äh9ó“õÉñÓ³%¿Ÿ{yi÷"uç)…f&o=Vëõ¾åÙ÷<½_R~_m ¤T“ ù4s¿{9YQ°êç?E÷DŸªÖ=1áxZ†xÀ.DWk _q"mOO5Ò <æ=ÃÛUiÀ.Õ~”õMù˜®ˆMk$XÁ†û¢º© íÍÔÚOelŠéÐ.2sFzà›¾ Õ~ÈôE/k0–щ–©¾ç3žÙŽ {ÖXáÿX¸´d2Ûb›O÷}(ÔÁbR0t–eÙÇ}Èô ¨2ô™v“¤«­¼nvåg‹|Õ÷ͯc:Ö °×ã9‚×uNíϪ*s>a‘°Ö:{T>§¾Òƒ±â±ØP(òëgyUèùŠ0äŽXÊÅ0ð3"ÔX¬,N£õ\”…\Ûø2MHÉ{)š§´Rs;s2tê.^ç•?9Šo¿½½úëU¸9D_oν1Dpª¹Ành[áµ!ˆ‰Óž=Ø&Vú|¾»‘Ò3Ü"±òVøKŠÞoºÝ®¢ÔˆAÕ•ám?”öt¦"®Ul•æý5/ØԨ „™Å 0† e+Œ‘ÁŽø`f˜­d¡äç"»QŒQÃX±zΖ8{n° Œ·õΨ³_Qô` é9¦^‰0”¬‚3E¶ûxV<¡·m] ¶ŠVŽÏU2¬>¯<×"Öºü˜wQÈÊ3ÂS8Þ‡2Ùpk»‰J–Ô+ dQÅóòsFaʵþ4à̈p¿û0Uä¼ZT«Ã¼…ZÕàï³Ôuø¢º?ª–Eõ±Ÿ¨½€€„q àMtTó/zV€–v%2XO¯Ëª‰çr4Ãt*°gQÛ`ï1•¶ù"רI”àˆBÒ<øC󵿡1@îEÄd½ eN/¡É¬¿ü G h çãz°%-šÝ¦ÂdxRqyN*7éö§þ"›^ã«,–Ýn– v?ÄeÜP¼(²®'#óŸ^ñv¨OÂ;6¯~sÀé*À–T©_ɪ¢UÁbE«Šp†l(>—eIôªlNèæ{ÓxßPt,ú Ëñ–…p¦•ÏR–¾}†¦¿¡2’š‘7^ã¾tùme´mY^fä‚ÔÌ*í?pÑ8Í{IÛ;^'*1rc UÅêSj…³›Âb)mÖo»í~)€»¼¦)¾±H³¶¯Â'ªîQYE¥+ßì+$Ó»¾ŒxºqhIÅHHËÇŠ¥ƒ+¬öû3<šWùþ 5'Jb@‡*wKÅLš‹ª‰Ìþ³Ó¡Ö2È2µ™Kž’ŸVé• @ða|ò¤šõmÚïÓ>‚®;½Ø›h‡b&M7ÿ êÄ8õ5ïtµš‰€}åx€ˆÇ}^ð›Q«Á^.'“¡²£»¶‰ŸAÄ^$üA«»Sd|Lk‘ƒtsÕb§2Ñ‹<¯ÐŸ £M¸šØK(0ÖJ(Šò"!1›î«‹Ü6гÔ2³Jà J¡ËZŠpiP±†þ¬ãšÊ”â±h¸fìw@Û­¸œ8Ë ÷Îý¯éâ®é›¾¶ý"êŒÀÝÐvIè83…Û$p#q³x-ãÞgããbæAăà2®òfqm]&»B7x TBŠ(f(<«Ò…*––®Qä󖇞 “ʨ †woeŽ÷W(æ©ëGsåæâ#vFs…b.4Omú‰KSk;y"Åâ˨%Ü!–“v@ôr)<»EÏ–àmw›ªeŒÌJÚºj¢i¦ÁÂ\oû«Üð¢Ô4¦©EÜæû7Y-Ã.­½HD^špMe€Dԡᚊ¶˜É9•!¬M74ÁbdÍå}5»A| –n1sé4•ÒCñ ÎÓ|ˆuš•™A«0E1à;´Ú[4Ÿô!;g[¤×Û8Pz¦¨µjº«åqŸª²‡ø'«šŠñîà'•"ŒòíŽð—ßy»Nh=_ó^t$d »p©nüVÞ)šnÒídûÆ>(‘šy¢š"¼Øi´«H;DrM îéšzV bºIˈªñÃ>@2 ªvÇ ¿Û9dõ€é@“¶g¨-ºƒŸ¡Qð ‘Õ"ð¯ >Qüµºd6Ì=y}ä²<”|Ý,jò׫ÿžP€endstream endobj 391 0 obj << /Filter /FlateDecode /Length 6549 >> stream xœÍ]Ý$·q7à·“’ÉS ,òâžXÓi~“–ïAAXŽ‚ÄÎæ¸ ¹í4»#ÍŒî´þëSU$»Ylöììݱô°=3ìb‘,Výꃼ®†^\ øúûòöÉpuóä‡'‚¾½J^Þ^ýãõ“øƒ ðM† ®®¿y_WR†^jyåŒëƒ2W×·Ožu¿Ûÿ¸úÁídèwôÁí}·Ù­ÖJÉ>xÕí¿ÁgÕ-º§6ʉ!t§Íi{žxÞ­>\t6H}åßIb`7 BÂöWª ÷¬{¾B¦q¥u·¹+6¬¦uìG‹ ½ò–õm_Lý=<š´ƒïƒÂ••bXEÍ3ËT“í­w!©¦N1¶a:Ø÷NÉA¥6OsFÖK:—šÜ5É„Þ:Lló¬ûºwhó%l¯sãÏWka-h+ÔOùn(4̶uy†z ê¿ÆGƒj1>߯Çϧ œ ¥™80Òœ.Ñ[¡s›7± _cÛK¯ó”>ë>ƒu5@9ÐfÃÍ«ì^T:J‚:Õ[¼9p­ÁàŒýþ]æ nÜ@ML?x'À8‹ t_QÃZ å|¨Z³½ñH#6ùvA.@ÅHyUô÷ ¤ Ä8€â”ݰjÚTÕk9 Sõ¨Å–DÝà% >øL¶àbINisÌÉN‚¤'é±åãçSƒ7ÍÂ.6r\ÿ~µv†Áu_ M*Z àÛ$ í}|ávDºa®FÌâs3 ¯T„ûÓ¦ÐSdëÕ€[GrÈð£ÎLÊ÷¥Í6#~†¿2Ó0)sTª-€nZ܇uC$˜{ÖóØuàªâÀ¾¨<ò„’[ú.͈šAnWƒ’–˜¤xÆB_’÷8fèqfû€Ò/æ|lpÿ¼€ƒÈ1†Û…=¡Íº8€zôÿ3†@AR¾øCÛõ—XYù0ŠÁ=#ü`"ŒÀɇÂî9á{=xyUv·„";Œ«´Œ"ûqQ€=ëÂè oJ­ý µì;0nsM?Eoïý:×êóÔ,·Æ"xµh‚v÷+¶ÍvqHCÄ®ß0U—†\mÊqq« ÌðV=²%C²Ãø:ÚaL` Zƒyøî˜¾€—-º,œP¹†k= äÜÂZ Ûâ¿d?Ñá3t?ÓùÔ¦Ö·%šË–Š™›¤ÿs×A_kÐÄZí)x÷Oì']% ‘¶ ç5¸Eñ¸Þ£æ¨c3¬c?êW;Óîz4þ rÇÿ˜§XÕÁ­ØGˆ3†(˜iæíå„üBÎ_Mà;A¢CHY5ÃÐêG˜KTÍúÊÄ¢)ióÛû9TŽòq[ïS )-3ôjchøõ)B¥aÀ[íWŠ—°XÔ^Át²˜touà“™“à}ÛàW`”‘±½ê)E)Œ ßÖ£O€Ó÷‚¢˜Òº%¤¦¼ÄGcg Ü).k¯¸ŽMí×›Ø>˜ ÙnˆPÔB<#=y‚x´'sh·KTC„ÎHÇÌ·jm*K»ñ!( W†ßw±SÒÍD ´ ÅxLÕq{û}¡æ)ËH“ ñ§Ñªà_ôô°¸FÕn¼K¦â-?47xz¦—[Üä «Ì79ƒ D ܶ)Í—·çTrA]n{¢Äh*dù7’mø:ÆÄqccø-Qd0Øb·bâ4lÓð~1inD„˜(¨[òRTj¡ÖaøG-¶7 ³4ˆæñý`ªõEKüè§CóMTª°&÷éçàxµ„†µ;m¾«Cm±m¶!ï 86£ÿ–º˜©£ÃæfªHI6t‘Æ2óá JÐ,:ö»—TÁ11öíd~æ±3æ ÐÔÂÆ¸Ø¢MƒEäføñ{&“>8l#PÅB›Â¦‚Øzf»/pÁÑ–Ø”ˆ¢·haÛÂ:Òœ£µµqGÒ›Òñ9'Q!Ó©.p4|x“^€™‰/Kâx†D¨;pJX ˆZþ»©”Ê’oAëÝ¡{°¹K]«zm³¹#òÚ.7·1ÕnÝÏJaš™Ä)*X"žT,—hË!_JTæø™`µ¿‹šñ»øÆåLô3SÔqôÞ%yûŽ@#²É4Ū«yü>‚:#Ü«¿¬xŸ¨y¾?yÖ·Äš?a¶]0,Ô!!¦Eñ›gygÁK‚MÜcL{Ùê8tJk€ÂÎ}ñ 3˜%°“væ5ö‡ûiÒs,à~W†!™ýl4ï*S\h€1Eñ^y™(À‰R& ýÔpÆ«O’€ÁÅ­ú*Ñ1¯Æ|P#T©hÃâáû]‘zß°uŸËDüY¬ù¢2åõ¸=e¯1§Þ(Zö/ô>FxdGjI6ëiðQŒ.ÁW¤|Þ®j„c0pkŸZ76ˆÆp–S3H6[™KC:©ý0wð"õ÷gÓü<þö&dѦêÞÌ›^VÛ3<(QÌŠë¾cÁ®˜ð‚õy—G2—£8#dém ÞŸ7÷Ô“Î¥Êe(¿rV¼éÝXÀñS+¬ ÎJ¤â1Ü¡å¬À『NÑðOÊå 1ÍØ2ã; ZU,ã[›ÍasûzYŽíRƒŹêë“vpyAdïí³fÔ‹^ìXÑò‹vÁhµ!M/A¿ÌêRZ`¬(ytö+*V©ó³ÖdaA‰u¹I{®*`¬ì¨¦ŠsÿmÛÉ€:¦Wžw.’Zk ΃êE|¡¢L x5C,¬Žù9À[+¨)Ä(‹ "´qa hx¦“Žeèî]½NMµkô½Æ:†*#(Åžd,ÏŸ l$£Qf6‡-3.·™ç˜º2C Õh•Ú@‡€3”-3œANð|ç73œaL n"bO9Xˆx "’SËÕj€ŠïÆÀû)UÖy¥Û㪑œd!£Ýö–AõMš~"€il¬û}ÿ¥.tàÀ…mú±FxòiÚŸ¼Õ1®Š¡lïN¾*ßÉRÄÓÍ•=Ÿ©¡ñQ2äù›Œýç†#ôbУCù x}8)‚fûСê¶P‡u±öAÊòëOÛåo]8•·ò´ßy_ÜîH:¿Yð½V¤²Š–óÐ-O£¿¯LÁõKji¾’³6Iàªxóð‘ÐXHà…iƧQ›a)†àǸ.+"üßK`ýµQ¢ÙQ›´õÄ)–ûŸÜe¡7jðöctpuE»#P®ââ‘Pç¥1Ùlv´h§¢Ê6^{hÐ9jzžozpÃßËU¶·mQÖa:»õ«,Yøfáæ{nwÙ»@âʶ;U`kJׯ/¯CžÔ ½T¹7CÌY Óêa<ÉI10§c¤kVm(Â?Ù4vÈ6ÿEJ/Õy§C"!b掺©N)@È(V^ t ¢.=Ä ¿õ¿Ná1ç¡!•~à ÛXõn¦_]á‹ ¸GÅô& ZÅ'×:Œ~Ayy@ÊÂKÑÝçü”›*¤R¢)çÜ‚á)Vz“s“ð}YeÙH!ýX¦2[–‹9Ê*cÎÏáµsŽ*BÊ<ßõXÏû§¼8´^¿ võÁg`àeI/Æ‹A˜ÿ£L˜ŒñZkÇYw£mͳx>J­¬œªì?/³£s »œýô"ŸæÅO&\x€v–t€YpœeÒá¸g‚Ñ:Ü<œ¦h’)d,KÍDQž’ó09%¥€'Ï5íO1zqþ)G+Oûœùœ%å¨k×8*”’—ÿ¼‰áv¡…¦ÒÔúÔÝß²Ùé5ºrÇ¥®ãóYEBir£­‰É𑆯MÙFÇG» â¸X¢ÃŸ: úÐY7ÐÊ —¤ÜVTcxg–ñ-Öd0ÿ™µŠ](”S’@"*àÓX Ê"4Ó{3žJCŒSDPnð-MÉ´Zi“†Ê¾“ëÂÆçV*GRX[À/Ý"„dµméR0ê`Sª@¾øúmÁª>x>#½gRó '„xÐÓôÖ]°QÊ”l” ‰ü6¥6aÙÞOŒé·ãÍ\Í–d.àf¨ ]¾bCQ ëE§„¨0÷¯m§aUé Ð9*]´°C@C·|ËU š€áßM2·´‰s€œ¼d„.ôˆ¿ižiåãm:V¢± ·!½Ô oêkfᇺ:m¦ðÚ"#qúg¥ç‹Õ¹=£´mb1Gj†h·çš2o¿cAÌ&wFõ2œ5{˜žòb NáJRMÉ,ÒC"Ï€¬Ï­‚PzÀq¢R„Í“Vé°ÃiæPnyÔ‘ä±ÄL¤ÅTé•KqRÚº*,‹ýȪŒ¯rª³ÇQçZWáH·’{¤ÔÒÍ9'n)32Ã;pΠFF•Oñ£=û;9‘±Ó­<ÍZ”~ƒoËQ×ãé³øžê/ç°)Û†5ÞÇ#5—Â0ªZ~4X(¥Ä´*•)x×:žä›W]Äá釮 Jó㟇åuMX~}óªÚKxªÝäJ±Íç„r`> ¯>‡¸Dc€Á|{ȩܹ$ .?ßBo´t#Mµ‡L€Æ;•Ð<ÝǃÙRGõ!)º;ÐéÚ'VI^|¿t•N €˜UÈÝ-7Þ&WŸ”·Oéÿ·û¦hnUÖkBÆþÌ¢CîÄZw6cÉ"’$~ÔÐL¿àTØjÙߎ¤9'$ÿé‚0¦#onxÔ~é^1ÛºnÑ|-}…[* q:Ôìµ 16S\Ь~”ψoÅ£¸\ç‚­Æ»2Æš×…Ëîd?]BøÞe ûEaH³ì‘®7 ºò[gšj©\ŒHN“·Ë!Á3jº«OsSþf.Å4÷ÔðaóPV–¯ ¾ôø<½ â’ürÔžðâbÕxŠäºêpç¡ä·Qg´–Òô®>„?¿=ÅÒy—T›¨ð&—-‹^å ¯7ÜSû¡l´©ÎWåÙ½w‰yܥǎ­ã­…;Êï|¯ÍÏ¿a:^À8ù F@25ÜÞ¼ÏÃ.ü¨VëÈC<ª…[vß6>%L>Cˆ& 0÷DWã%t| ,8¼=\_ ûb„ùð]*¶³MÊC¹6¹š˜Uò6elbéÎWÙËQ9<3È^vO?iž1ϯ\9;°QÈ6SÏ£f¨%1¦FŒÊ3Cr°‘$N-Z.Œˆ_øBÙï´±ž¶SÞð†Š+_rºLÖ3%¦:½Ÿµû¦G. ³”²°tSŠŸªè_=.qŠÏTWQÊ)ÎkÓ™zEµ€cư­žÅù…'ÍI¥[”Fãõ|!×z¬ÏÁ’Ï›3Æ E{Êc7­j­IíÌ®]‰^ž^À#I‡œŽmYY #{kzH,Ís,Z Î÷ D§‹h“‹'–oÈtsÀo/ üp>9wkŽVuaòðnè©Ðk™Ïâ–â‘O‰ê€? Q~ËŠOšÜø§;߯×燜lYœÙ«é:Èö~e›è‚ýú¬5®êT¿¢ /…IœåЫ1ŠòØù4=‚8/ψ™®,ˢ¿øukÑU>„$Ý©teÔÕi¿xˆÚûêëeüЛd|¯4@°rºG†BÐZ žb÷È'i5á¼/¾Ÿ>Óå!udÊPv§ûâ!£‘ÌubÇû6»j±ß÷Þ– z6mÚǹm&xÐï²$+Aª?äê}@V|¨Ÿ.†´‹—åæ³Ö ÿ0?ë\8p¯Óë m‹®]QÓÈá]"CyéÂKùu¼»ûÙÁjèYD|ÞáÌûSU^oÌ·Ú3Yß3iéㆎïÆÆä9pÈ&jÏl×›ÔЦÓÉ©ÛM¶ëü_´`¼[Èñnxtâ8¡‚åù ,‚:σGÃüàš¾-óÄ9­¼è¤ÕS2äÊU¼ iÜ¥¿ò0*l²endstream endobj 392 0 obj << /Filter /FlateDecode /Length 6220 >> stream xœÅ]K“7rvìÉ1'‡¯»±1á‹j¼ìráòî¨Ç®¸!†DíAùPâT‹Ýlª»IŠúÇ>û83T#Q¨žöh:¨¦ …G"Ÿ_&ÀŸ.»V\vø_üÿ³õEwùòâ§ A¿^Æÿ=[_~z}ñßÚ~iû®—×/.Â'âRʾ•Z^:ãÚ^™ËëõÅÓæo›·W]Û¡ì›íkúÃôÚûfX]-”’mïU³yϪíµhþNm”]ßì‡ýr·_>»Zào^x›}׳¦ÐEöÝ•ôm× Õ¼¿’®¶Ï³þûúo°-òEÈ^¶ÎxXÈõÍEÓ_]ÿx±ÐÐd¡Lë­Å_Ÿ6»°í%ŒýýÕBÂBÑÍÙÏq ^Á`ͳüýòÍJHƒ­t¯Û—ó:ûúG2xx<,gù{ÖÓÛìÕjºê{×ì7±[X÷ð&k”?¯>\yhÒÕÛ«Ek€çæÓ·Œz¡K¯›å>ΨWlF0’´ÐFÍIÛìÒlTó:o>vÓüý<‚t²y7nØH×c …!rÀæÄ¤5Xb¢ë¼kÖh›:5¢—ïlmüq‹­à½…?¬´XëuÚ1ÕìØ‹wãÚøT€}‘…jîååõW×ÿþ´öûÔÌi«šõ›Œ+ö» ]gˆ&$)¦Ÿóã~³°j蚇ß>zü0þ%\³Þ„µxcÙ>²¹­VË|Ó^¦QU¿‡Qajû‚ÌÔÆ¸f5ìö±LGš$¥¶ù7äí®%Z|q}ñä"ès¹×;\d“ÞñâÒ‚ôv•óe’(é:ÝäÒ5à*h³ÜDìèwå›o—»{Ç:#O6ñ»ÎXô]ëEŸO»ù®As%ÊŠeähDïeÖó]»”¶ÞswÉÅËvó&ð˜yÚìØV' 2ȾY³6Œ¯]£®¼9l1?¨–å, ¿&‰6 ¼ûen>ÄQ½›ò/J»“Íf;í5Êò6¶}°TØ“ÎøÉ‚¸‘´Q#¤ •RçHñþB …ñ—‹Q! ¥y‘1àF׸*ã-<Šf‡¶qÃØ{û!¼î{™”QäÛí’ý r·p2(‘G¯ãG`zKÒ&»Ü¦‘ÍéÛFÝÚi·´PÅLÛ0s†2 E ÚIÛV|‰Ý*Ü=Hë)mµv¢Ù|Ï 6 ¶ëi`Œ¿§ iº,tzýwy?ûRuµ@uiž¦Ymx›ÑÀþ';ð6Ó]û ó[š—[¾‹»äQh²ÂÒÈh;’®×æ÷A{C¶¿‰ßvSá 6ªgý ûá¼é‘lR¤áÕ ~¡oá¥×¡[4™Óá S-›ÕòUX™-§½,Ù Úw">ç‰0 CÊ¥5<£®¹a³¯( íPi «¸„þ@G¥ÐqG@µ´Kßöœ'7ë\Q2W«ä¸4ówËaä3r¨˜®'›Êõk)ß©] =ƒ¾•Á7+×7©]ùlÊtŒ‹Q•h…”?â¡öãÒu…óÊ…qµÜÍv‘¦g* »Fv÷Šè5ðÔ¬­â”Ü/Ù–ìb ¾Rúúœ*‰üã v(ö¼Î“¥ðsßû2Âm@ ·4ú¦®xù!ùŠªyYqy*¾ÐöÀYél¦ó‰qÌ~Â<÷ãÿIÑ*CοþïšÍó—ìsœ6áZám6Öý8m؟׬_ÞÑN›Ðª•âd!¯­ ¾£ÏÏûÊ~O5kDÒhªÆÆ€&_SëÿÜŒ/2‡»n+Â@'ú|¹Mæn±^è ìÙ-> øÐ*¸žøÜ“ß÷F)‹1><‚ZWjõ–Ñn1¾Hdƒ¼¾"‹eË(~4G²råÜ£¡ïy4I¤•°ÙÒE] Â*x¨µãž"J.¢ ÕR H €;ÙyHŸÝ cnß–ÃËûPÒ‰VšKÛ¹ÖY[*…0î/q`JFùÀ<¶Ãè^e~èjuu†QδN±ÙÝ‹Q “ó¬ß3ÕˆîDëzË Ij!fjWŒ¸ë  ïÇ-v=|S¨ÈÒ®í‡v{Y[¦@Ö&j4.cWâšÞ×¾C¤¸i:nJÝJå¸ù´¹ÎwçÛ‡¿¾ª!¢a}ú¨â9Ž0Èd<à-Ò—9HûÅÃ+OQ¸ç³ø{Ù!Ÿ‹Ò­ð2uØFªyÕüåÊ#@%$EÉÊw„+ ‘|i^¢æŒ wbX°k Ün[øbŒ_AÆV‘+ak\©‡‡ÕËÍv¿ Úw\-Á~i*°Ÿc´äkå<[¢XØÄACë#½3bÄ耛NƒG|ý=N<(Î «5 }—Á¡pä»NÊæÓGŸÝ“ßg,0†ÕA3}µÌ\âs½®k­eߣ×AdØiÖ¯ïàGXÎIÉé@Zkî‚ÏiÔÅ$;:¤¸} žÓ„劊đú¹Ås;ÕEˆó! ‹Ï3NN„)V±¡—£“Cd¹Cd0.ÞMr¿áð˜/dņír -´Á=T…Þm(¥˜þ(ˆoÔ˜Ú˜†3òˆ…Úø56œ ½YTÓqø‚¦[æÅòàþÐÌö¤§§]ÀÔãAS`ï ½CK\+ß… ïRÊQƒJô8‚?Õ,–àD›¾f²>{Hß‚›¶vñÅÏQˆ$f¾¶ûªé´h“Fs;±àѽÉ'˜ö°Å¢WÖÉ"°tFB=Ì9'ùAàK‚ý œcÂhÆ*Œ‰ÂùÙ” ¸’žÑ 52´9ÜúV±;ÆjÛeLŸ)1…r¨_…6¤B|þü~eZ¥úKƒp”aïãÿa2M8—Qê‰çO³2ºù†få@¾†]à༕ âœ@@ùVu"ŸíýZ‚Ѭ_ÙuÝfFyQVr¢™ ì¬ÌlŠ'’˜‚«ª|VÑbDŒt7ÆsÃ| à Ʀ˜Ò_û‰&ÁÄ™r]š´¿l͘ý-Ú?€XgXí6ù_«Mbp;ßS³Ž ÙeÄ‘¢äj?ú RÕÌL¾‰&G0_«»%¡¬½GŸì&Y iÙqtyN@–+RAXù2ÍFÙ©Ûg©þSôZjn¹+uŒgöÕð²ÜÅèü—ëåzL33Õ|Žþ©pÍ™®û9f À,á'W a© ù„†Cq¬ DÉ­…S,ØY<ü¯zhEá{£–ªM„ MçÚÖ€F }]Õæa½(.èPëjŒ±‹¯±L`; $1ixaôĦ¢þú‹¼M‘Çàœ´eo)wUwŽ[›˜G$ ÃŠ;›ù<_þæ&¤˜>$âÈ|õ~Z“"B…BYÚ¥õÙœT( ºa‚pÐʳT'²2v¹· ¸ðÓfš+JG™|“†¸¢0gœ:+hÜÕ²PEþ#¸»¡:ÒèRàI1ÉÝÕXÌÿ¶ª# ]¯“Äü…@$ðfûœûú;ËÆ%‚D~³ÝÔ]-4ùn”¥û°Ÿ0¢uæR# &bLõ×Ä AÉýÖó—J ãc˜[›b±÷°k‚CÛ÷¬_vNñ!𬎓âN¤!¨y­Ų݌1Ö»¨—ÆøÄÉP†Ÿª¢eØŽìÔ•NÑ»$ê¡»÷©](Z§ýBowÆAUý"ŽS«}Ã6æ$·: ùrH•‚Î9ŠDot´cÉX ‚Ó) KQXˆž˜Ë9ì³_–y8Q¥K–yº”ŠÃÎE-2lj+Vqt“U ¤Ù&Eä/º¦FvG½‰.ªV ‹Ê ÿÙ”·GE¶£Þ06E¬­¬Cš<§BP"è½›çˆ ;M1Öf{ó|›ÚÖ ] ùÐ8½}5óô—P”+‡ß§î@õ%B¢_X,§»9Œ’MM7}VUÄànHe]ZÿÔh,9Ä ñ¹Ù½—Ú"âˆ,,E—Üo0Þ÷Í´b ò mçC4̃cß 3æêÞü£j:)ÐÜг¡çŸêK±­À#$ñcòs‰iîRW•ÐÈÝ!ã´Œôè™Íb{pÝ Xr²H×jøÊ^.öÎ 2}¿GM‡MÙéˆæYp2éK£±)‚ù´Ú‰o½>Nùîê»FÄv’ß¶àWEd¢ù§êx"ÆáÜ%Äul4M@DXÜMu^ºõ†è7…]»}jC,rŽ™Ö*yj¹Ÿ#«ÇyË?××+@i20ŒKåÒÖö|k…Fg&Ló³$]jó¯Õ šVKk¹}ï{wûÞghÑ ý[¸wBT‰[xvXƒêEõ­Õ2ÖW¼ŸO,ä±]¤*gL=)lœÞ‹ê€`Yè–%xéÃp-9Büƒœ™·|Y[,<:Û™rÏ}ÐWX¹Õ¸¾¸ RÁF¸É,°Æ¯E(JM´ýHÝECCÜrð4'U˜bòg°„ï¿Ìv†CjOº“„~ ˆ°:"êWqòC(pS=r‡70™nâ‚N6Q:Ùº‘›6Ñd›h[+Í%&Z|dm[tëf´YÃU¾ñ­3¦ó\†Š=Ö`VzÅ÷Ødƒ÷±L/ì™gX[ŽV³åeßï#dgÈrr1rGoƒ ΰßl±–žªX&9Ü" +ŽX58Øqá÷a}žUŽ–¤„,t'¶kG]{§å€)_ÊßRUÊÞaIÒ©[–t–‡e¡¾Ã"Côõߌg7«L¬Áqßņ GFw¶zÔh„fÜ*˜„J”S›¥oµ6® ýL½Vöcï?Õ{—b”ì§EŒ´K•%}¡{˜ŠÛGÏË—áIEôYÓ™$Ó\‡².ˆ7‰€“ÔYÒ½ÍÄpÜ›RÁ†%¤ñwÃj¹+jƒ“B貪±~+?LH®¹E‘’}yU«¦ß/¿ÏX™FÃ0>¿5˜ðPQT@ê™ÜT #v»ù„CšGY78ìg¸‚ ´‚ÿhU1šóº\ùV‘‘þ(–ËîÇþçÕ…–ò>ؽhÀbY³ž1#æ‰shÿËôTü¹³Éi ƒÊ›,zŠ.‹Š«ªë͸d‘¬’dMu£ ý¥êPq=YïÇB8€Æ=´ùSuO„lÕÁ5úõ­±'â>°)ð,©‚Ya%³“Hút›er©W³Y”QN1*´‘4/Nþ<î¼è›wy4 †Eœ]bx¬Ì¥ìaǸ·¦Z7Û¬]7µÔR K-!7ž}4WcðÇêéŒp†|ܼVáºLgäXžD®D¡+R‘fUíŠK.NpüÄÈÇ·!OÅ=ðjfð±¼£jÿæpþ4ÛçA­ ϧ9€_ô¤>®BØONÃû<îM&Š…îëj¤#(ºÊÚ=½'è eŒ ³XÇq:OJW¼õÕšƒgÿÜîMÒádÉ:+âXQ‚NòLð®Þă¸ÇðÖ4F¼¦IS óÑ‹fðP­d•ê©OÏTÐLžP´**J^ÑD@-ì üúÙûs²ƒXW¬u‰2ðk£jB‚°1(´ì£9æ..2¬…c(ªÊ`ÁU„¯Bu2]r#ñö¥Ý«péOºVžµ¯\m‡%2fÔbA uÚé/(‰]¯î Ýñ¢OâIá‹ìW©$ e•ºÒ’Ô‘ËÒB:×B=XyêUbq!ê(³§[›º:l\\E"?“ËxÛ;ºr'^‹ òo§³KG1êù+±1¢©f4Àóó}—ÒÆõôŸm]çFd šºÀCrcõù‘n’®[j¬½òÎÜa2U“ìÛBzJ'UµÑƒåaˆ“º©ÂN¼ÉC§sõX>‹¢x»-òm?zÜs˜–¶þÄû¦à¾Ôæ( Õ"õ̽S*\=SV»GÈý}Èj[ª(™X^bI}è2wô:Š‚h†-r¥:ËÁ¦ ž%ñ9&¬s>¥S­<üÚbsÂç»A,瘇 ш÷êãù@›ü$x<©È}Z‘ ñ‘r¥QÄ›Ô"„%ãN"ük¹ÈMç::ˆrq²•_Ú°Íšº†¥íž^nz“îQP"6Pµ‹TÃ} }öŸA!#4ß«„”ÎJe€5‡DR'mW‚äFHµP\¥pl´×‡HLÆSq¢oV°lx’GÛÌ ›õ£Zÿj ÈÒø¡Ìð ÍP…œÁµ‡DÔ›9b¯þ¸sÖåE\?ÍbV=;ßψC}SÏyuºÓ»yRïÀø;,êÕ êÔI°dGƒç…BÏ´iïŒ]ð$#F Çg!»‡3 YîHÌU¾"¾”œçÛ]} 1¤ÛŸ”èê\"ø¹‰.>Ù²î_â]4 ¿…Ç.ÜÜx$°z¿äfˆ<»x<ù. ±ø~´&_ÍÈéÞ²9šl? Qð÷Îpò^®—óii½ú#ïùOßZ©>Þó)H­Og%©úÿtOº¼Úµ:yE×ü¡ó­ó¼ækö†È–õÖûCêÍò·Ï·Wt®ÛƒSòú={…›*“Õf]qiëL⋌áX¨1‡Èw­aï[Ð<£b> !SQ_ïOÁó¦ó>Ö{å-gSY™wzÞŠõdÁGó;ÇR(ù¬Ž$!:ÏpçRêân-`ÙIÎCî`)ou•ÿAtÑ1Ðê6ÑE¡s¦]äiNÑHÝšŒ%È¡Ïüüø2ÖdPaOo’«êy׊³-[ß%÷öLBÛV’[ÎMꔤ[N§¥ñOÉçþå€çàbÙ‰nÍäæ‹ÊžÁ^ÿ.{1{ùfºPÌŠéíBôàõtõ‚P¦ étPþoÌýÛèôŽçO&÷LQý¿hö· òðW„œe Ñè| Geñ¶ÍÊu 4Žfå wt&L,©T«1Œ•.ÆLX#‘âÈ-ÓÖ߭\ÀÎcÍÑ“‹ÿ=*Qendstream endobj 393 0 obj << /Filter /FlateDecode /Length 6868 >> stream xœÕ]I“Ç•¾ó4w;}Sa†]®Ü3=ÖDÐ’lrìka„ÂAΡÄn¶aD €Hµý¼%³*3« èf3bbBè\_¾å{K¦~ºèZqÑáñß7Û'ÝÅÍ“Ÿžúõ"þóf{ñ‡—O~û­ðKº .^¾}Â]Ä…θ6(sñrû¤Ýêå? ±uEciÚNyèðòêÉ«æíªk;£œèB³Û_¿Y]âw/¼õM8®þçåâ!B¹V/Ó¿[]j­Û®Sͳ•tm'¬m~ÎÆ=î¶ýq C+¥àÇæåz{¿8Ù|—µ¼Î§ß¯‹¯‡ØEèæ+/ÛhÅy“zõëw«ñûMÜKI»Kae+ƒ½¸T¦õÖò–þʵtÆEiÚÎ6‡Ý¦Ø×z÷Žÿ´€]âgßo›ãß¹¿—°È5¬\«®¹Êº®S¬5us\Á(@MÓÜÅ™;ǃàL^¥Ñ’Qc›)føU>èášÖ@Ín³ß÷ëÝþ³ÁÂ,¦ù,¶‚£ìófE—]ÿ9høò÷¸g%šùªÖÜDÇM`áJ ›iv%7ñ¨fØà*õ÷MÞó«—O¾yÂbc.öí}+Pf¤„à (3¯š/óeíÈ·TM~©;Gý/…j’Y§ÏYÖ®œnƒ ÍŸ‰ Á¸øX( ÂmÆ'Û~~{Ý—0²õ¹po$UðºAUð8r P-öù°²ëD6ðƒGtžDnñUóz…$%ä@J¢N"¥mÖÛœ7³l‹oïH’4pÛ±f)åp°ÆSãʉ\ Xq¸rpœ¨LJ°é÷ëmÿWO*·Ü/(GáÜœÆef3X±É P¬f˜4åãY_×zØONùo×3dµÒZéPáü¨ˆàÌåêÜ©†)ý¡뉘ýö[cóq¤h .éþí½‘OmÁ[¼yñ.z(O´R”gÙ ›äÙû(Ï·™<çâ:êAÙäÃ=æP¸Ò ¢Mãl±Çf}\÷ü›åš÷+ƒºX¹¦/ŒMm a¬f÷6ö3fÆ(à\¸g›¾Y2¥ª¥†!éˆ_7?€Š‚Y%Ëy+´»ÔÍfßå4ÍÎü¾OÔónNß ôH iO’™‡WcX“ ?¯–TCšô1n<ÿA‹ÿåÉË}ÕäGד0Á{¶Í’Q0‘F FG­Aº6ÿÌ/íw±Q˜h9úYÃñÇ :ßlúýMšO«œ.ûu¿¨7Šý‚X‡ØÌ4Ïw¼ XƇja—O¦úyÿ4Áœ O‰PðþŽù&HÕì—'ô%„*¤€œ:JóÕœ4ÃGJ•¤¹ØïU­+êጋ`·ùrvxÕzá0ûÃ`2îÒ Á—»]ÿ3'KµÏ¡×D&èøBóìŬpL%,ëÃÓÿœñB" ƒ•×d'èRyPk R}®Ÿæ1ê*1É‘ä_ÂIƒõ‰ÏQd<(·¼UÝþ*¶R¡hµ<Š üV2ÔåUf׿*ÿX™ò./©lnéÏÜIˆK8Q`g‡áXyW¦ ko™ù5(øÝ¡0Ÿ4¹(8Ỏc‚ªJúV”ôÙ¬k±ŒÂUƒTXë#NÒ‘Â ¦€ÏhP&c³:K…DâñÚðè|€Š´ ·þ¦IÔÌÛ8Ú‡8=èÀÒ'9{ÂÌ •ËëŠuÜ[ÜëÜälÒÂ-Ñ´šñº9qîµÓD'þðrÜLô÷Q8û=R#(kd³=Äõaz›YºÄØ‘ûjÆf¸ƒ@ò³GÂHoZ©¥½°ºkÓ æ¾Ë]šõÕ°ä!Oð¼0h%ÿF‚êÏôW¥ÑEn'ŠCS׈ôc§Šüµbµ=j Q›^7ÐùÃúªÔäó ý@Ë„¥î%þµ –¨Z‡ƒ\¯ðà|šûD1ŠdŽÐé/ 0€&)í7‡Í28Óg›Â쀗3*£­in*˜ª„§!ÔÆæ ¥ªÕF­békƒo\Iúç‹B‘“{ˆ9À=Ê÷ yòå2€ø¨WÊP»D"^~1ó/ëH#CÈi6PkŠ ÆŒ?üC~6}H Ë'Q;,ËÇ~ÈuÖæ˜ÚuäM ².ã¤~Ÿj-aeè\>¤¬W/¬îûÓZ ›~,Oï]üƒ–Ë"úwkìü˜Ÿ'ç›"P`‰zd›€R4LX5ÿ¥?T}š ’ÆÐƒ¡¨á”û$7ÅI:Oð¥ŽI‡iÙ|™¸læyDe䜻Ëè¿¥*%°& ŠÁ¶J+ð20«ñIB¦²Ó€ä…±¡uЄ³ëh£´O©ý‚»HЈ5¾¤ßAiÀNÑ;蚥ŠàÅc`Ê‘“ÕVó€xPÏÑT©)ŒÆËÚ¦stÓ˜cɲD~kÉe†å§LUlªªVo©oŸ}Þô7øíD#€UeôOÙñK©‘ ÌÝÔü íÑå}þUöóŸþ†¿s¸1‹ŸD ­`ÿÍ1öäµË\äÇó €Q€cl+B ž?¿8…E':ü*¸êLnP}×|UdpÝZkà«› ïÇ;#Hè½ þ§}AË›IPbl v \—9xJ ©/Y°Çp°ö­²>'Ä'Éq)#[+D1.8Ý#’\˜7ï§<3Nx0‹,pè-Ëêé/Èh ~Ù·&°ñ.’XY 8Hï;J«0E.€‰N¢–ÜŽ$<˜²#Ãi¥ûSH I¥e¸¥ ^ÇVŽÂø­ªkÙ!-ê‡] 1YÂ Ý ðˆmU4Ev˜xÙÑ,~Q¯Òtô¹)~ÊÚÕ7Úð&-þ9¤Î6Qâ–£ð@ê¼[3‡{ÓŸæ n<Á«HbØ6[j ä>Á0i­Ë4Ñs`‰É*¥$%òó³B‡O‘ý»¿/ÐÎ9ÏeÀWB99i 6º\`²HCMš™õÇþ)ÿ £:4h+4 VB²#-‡‘öÜ–õìÛÿõ,–o ·jwnÿ+7H-ÐgCŸš­tœ"¤I}IiÑn“K­š™ÆPô!µW¥±ŒTg·àl î}r˜üq……#i¥¤5G9KÎ.sÔoVC ëX ­\‘n¤Y­"û:- Ï›¼ÃeÊáèÖz]‰"*TJíaóžŒ0¼#$õç¯3ÖÌ+¢¾Ã69+Ñïfß¿ÌjŒô§OŠaRí1°æ[×y6n±Ò½ð~¾!ôÑ´Yp¤@(Y9Ê’Š |M°„‘HᮂZJÍ×·Ô +f¶)ÜÕ‘ü®83œÔè2†´-Dñ8LžRòq^?â~ ð3¨6EÕ¾Y¿ôÔû  ´ŸqÓvJ+}¨ªî(É@ÚFp0é°¾),ó¯—ò6}ŒYf%Ù.’ë4¦=µ¸Úœ2­Õl` ê si¥Ø¬.|ˆy—,Ï2çà{?W%‘Çw&‡‰Æ>˜™Ã¤ 8¹ ”œð˜û¹JèàAUÚë–ËjÛ¿_QJšwFظápÜÝÆ!ä´Àg€e :jM#ODRrúçõZû4¹1',⻸&£+~@hQÈ{Ò' S鲬Ü'zê&gÃ=qâäöºDc^J¤³‘§E%ƒ_ÖÍÂ/­=ÕY$¯ {ëS¥Ù¥/âÌÔÚ¦õó »< vcѪÀöÈ VQÛŒIBcSyû”£1í9"”s8¬š"b ßæð`X³´Á×þ §€¼_FɪIkÌûÍü¼ŠªÃ0qšÖ]ÀÀ®w&ÐËk­²JQÓílÝŒl}þ"køŠ‘d tž„’|î<”d6%»Öw6¥v橞ØJjˆK'Õ9cZ®‚˜S·€í(FÎB¨P_nüö…M=•»›ˆe €" uuÎ|h¼DPe2ªæo pŒµ‘Ÿc¼Œ ÒÍ{ ù}ª ÑžãU¤‚â£&öXž8 Y¹EnJU7uD{)Ÿ<©±ÐìAêלvï³Ïe˜üx¯9&WB‘_$‹ç\W–G }—ÏûüÒÂabL4×RÔY;âÒ¤Ÿ؃’.48^RÆLù:Þ ñnN ĵMãÜÏRÉŽ\BÐçEÙü¡Ðû\ª Û<ªDwÓ´Ö™ b{ÏbØcŠMä{ÄO‚ê2MïRŠ´Là™ç&g˜vS¨– ÖIÕHI!Ó÷ë¼Äè:¶»]ØóýšîÚd~[vs%µ3QªÃؾ¬—[°gNµR$!ç5Df"ˆ$Í¿Gó® Ñoñ##ÌßÏg&Rz¾j¡F4ˆ1…çéH¹8ªú&zc¦·TÌŽËÐ]¥ÉÓƒ˜¸,¨³xÅ µ;V¡çÊñ.ü<“™{ÀÎÊÙøÞÓêÅK­"G§ßlvC¥î‡ø;ß›À¸”NºK¿¦Ô]}êôWëS¢âD¥ØG9šÒ-ú9ŽNð?ØÂ€æðÉ(ˆ¹cc2®ÀâMe($ù—‚Ç…pøÉ G}1‡fië=£'Ä  @y‘µ{UNf|N ÐÛ- `›È AŸªicUþ×P)¬;Y)ÍR±Ì€kt ±n󌼷_gW‘Îb ÛãN-¬ðrNe§IXË«X蛄î@—ÙæÝ±¾Ÿsz-@(¥ýÙ“ïÂýNÞjüÅÑ÷7}éL-p rhNüI ÊQ™A5U 5‘ZÎB'SSQµ°›ÄêÔª4–sâmœÉp- ¤i”5ñð@«L¢däZè2Ø> Âb+M©¡Ë!Õ æ²ÉÃõ£˜‚2­KÑ«X‘ôëŒåc0W8 \R™Ë÷ g) VЩZ—9Í‘‚ƒËi^C`J§²Ë[2v$ƒ¯m'høñ87;4('Õy›l° óVضjô‡Ešl;(ÄA](,”¤gšnÁQT`;ëEåCÉV’‚“8ˈ–ˆqq™Íò·‰Ebxl]u å—>j×NcÝÅPóè8H¤Ø*å[äcÁ³h*°âZ•:8ŸE#§1žšÈ ä8*¦ upÐP¥Ä¤øœ~eh#”¼›Ãô.$õ15ɦÉ aÀÎŽ¥¿‡XxŒ2ªÒ(°5êôF Z1XOA+ŒÝMåCSV–®m†Pc\hðúÜý7î0›ÿ‹å”öN¥¯ZNUýÁIŠÜãgÔ[9uúC}q‰»;MvëNqAbKèèÞÑrb• sýÇÕ¦YkS¢;1­-ÌV“\p%$¹6-r³Þç’ç‚ÓzÆûL;ââBDEûqátCÛè<7@>Í1””Ò zŽ¡$Ìæ;v:ñ3€~Œ>óÅyYúû×ìµè¾Ó°~)9f‹Ó:¦áâ˜rÚ2Á'ïøÒ<'TUý[,Ûs’Ëò_ÇQó›ó}ì.æ² <°œ2 ¤1àv9\Gà²%¥KFYZClƒ}Þ”œÞñÄZw6b6l¢³ òí«k¿È鎡*/c.×"8n|Qxú›âØËJ|öß0ƒˆv„oÆ…áºO(/|f*,΄ýø5‹x“‹ë× å7Fßï÷1ÌnW¥ÎT…Á÷ÊÄšó3›Øºœ, Æå¥Ëº¹áö™.îÂEVüªu~ä>+YÉÃÁÏHëpì'®p§îJ-JDn ý:aºA‰rÐ Úœ3—UqW±;Õ“„êúX›§K~©ãy<»¯¬Ç!Sí,]üAmûÓ9 (æÒ;þŒ².¨bŽå^ðµl.‚Ö‚Ñ+Ê÷=ú’Œ˜M¦²>Ûüwñ‡÷³Yãf¿lŒÊ°ÈRÐ -eª‹áÛã´|WdÍ'÷©ÅôJceŠÄòÏü¨î”I¼¼¨ÂLaš/âþcMr k®|˜®ÖÅ"µxïp\SaTè&`mì<}á!ÊF\…wõÌ¿«Ov­`œqU@~’¿Š Ñ YÞ¬…ÍîËH"©ÈôW¶Üñkyqß—A²±"a·ÿqìòºY·%s>å¬Rúú¾È6~9FpŠ\æêXÎFEdÐ$Sé—·ÃS-´]VæY„*¿ Qê™…“µb,öâmœO¦,›—¥Ió¹´üÑš¦åŸ±¦qZÙW“5;Ì ðÌ]‚É{)4‚]83…°pyҞ܉»4L\ûQ†±³Lïw –(b÷ƒÐÛêVÊ?–‚±N8Væc\ùëÕÀ§fQŸœÉ|ƒµNÙŸ$Sƒª4pÎÊë Õ}Ñ÷Û[åáÚR÷.%­6“#GâéP>‰Vz8ÖW@è¾£Œ·³Ïs1)<Äÿ£>˜Ü •¤R3±}éy`žqŸ½ÍÔ"»àóÓuuFa*犲D¾‚Pç«6ÃSOGf-*èª`–hÈg\0 g£Yý¯!"?[{é['…*LË£§Û!ËÈcsWƒ)¾Æ1WD9ÇÊœHŒ­ÕH %YeU…’4Ò*­vÃ%ÈÁûc"ÇtÆA°>×zŽî5és€wã˜ø@—ètÇÌ]܇>0ó¾ TL:O*kð–B¯±šn˜u–c‚m-ÅO&ö>â5°Ìç<3Êó|b_øVcr!²Ž¥ä¨Òç™(•Ã!‚X߬'¦Ðñ]YÒ+·¹0£÷Øåd“b¡lb²IÄÝqü_ïuƒov¡Zµ¶“î”ÃG)Æ|Ãë‰V%¾  ×â£wó:Ò†V{Uâeg=*Éñp_ÍíòŠø“k­6'ܦÐÞ÷צ _¬kô úNµ¿}IwJžjá‹BTƒº‹£vjÁ Ì+ì£gc*4–»_:Ȩ{dm;ßZŸæv! ëC—ROg}*Ÿžiúi~i» Ï £ÇTþ׋Vv|žô¡—ñw§á¿Y^ûNÌÚ7¤=½{ó ×žˆf°/Ÿ¤ÇמÚxÞÂUWþ±‰iüT?/zdblWWn3[i«•3±Ÿ,³¶­3Ý ´³ª­Ëk²ªTÔ¤&Ë‚ü–¹Ì¬Ø›Bøîžá™¡(7œª\uée’Q¶f)„‘©1Kv5»S…$)CÝüÛR¥J–­ùK‹\¨NÓáŽjf@Xæ`ºtmlâ/¨èß@¤gh˜Z8_յʛ¨ U+±(‚j“dólů@Ä(H ±à-ÔÛËkúE0I\¯@Ï)6·{Ê}á\óæúêç}úã·Y:?îsÜ÷+êâ07 ¨B0Ø>Ÿaýî&.†^»Íݦ ¡÷Õõ&-YÇ% 8fÙÜöo®«þºnˆúäo å.zù&ãe‚Æ„é?+0(~úÍ©¹ UÆäúu‚Þh<[¹L7F¼™ÁWt ejw»Ž,Eæ&?úÖ­^^®h–p0j(“ýÿXÑlA—“žÔnfðàÒæÓ%!˜¹g²( ”šÒ§•_ùÇ-¹¥ÍX^¬ô)«}©•i…Q½=|Ûƒµté4æMzh½ÄÀeÊ–î~³'=gùFzC¦ˆ˜-gréò3ß(ʧwX`æ;Ò¼]jãbz…Õ¡¢¼Ù‡‡áÔµ^žƒ!v,y( éºBœÕ¢¢ëp#ÒÕŒtua¤ëý°!‘Ëaæó$5@k×õ›ïËq&/ôÖôuøÄo¢ï›SlòŠÍÏ"a\szà­mð>ê`ï\aeŒóP,NQR.…Õ‚ç{ÏqÊqw䀪 þÛ½Òô>ö¦jÜS¯ b<•{^} hiGÓOÂ]8·•'/a=`fŒÞ$t§¼ù¾78”ËàP :øÕ¹·A&'#Æ“ñåɘñdÞ²”éhÄCæ´ê~³ ¶kͤÖòfý~ò«âÚ\p(ø Jа¯$¼ÔoŽ»2‹}‚rTS)‰[_¼+P3¾ŽmæŠM—* ¹Àå¼;)Ð&øS† ÝIçÃ?Ðf•a³a hؾÜ}†y„Wš3k@[ —àѽ†Y !óÑ͉lÆ¢Y‘£Uá—Јjs6Ñ{ÙIËX÷)ðíÃéË/4¤KÙè"\E·€±0·ÔÑéùÌ™„ –îÅGžH1R€9•¢Ðåoœ±ãÿQÈl5Õ–Qüܺc¥F‡Î7äwŠÞ2<ób,vо|aˆ,f‰äŽ„マYáÆ›±mÆyA —†—”‹ûš'äê0œ¦¸žô¢d#gÑÒë‘\àUNðÓâê3¯GÇeŠäÇP] (*ª;ìâËÎy¶·¾Å;\שäsÎ¥ôúé1¹ª íøÁ›º òŽm,’$/ã8¦ªäø¬²ãÜÅö]agÊ c,êÁ›k¶XlÂýÐ@Zпò8Zßç LâfŸU?y¿ÓÿG M¤Mé[¦öx9™_êEh¢Ýbï¨FðJËÂÓñTµ‹£‘B_8ƒ|Ћ;ªã@Ë·²iggß-êÍ }J÷žyò¿>¦Ïendstream endobj 394 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 331 >> stream xœcd`ab`ddôñ ÊÏMÌ3Ó JM/ÍI, jüfü!ÃôC–¹û·üÏ×?…Y{x»y˜»yX|ß(ô=Lð{0ÿ÷fFÆðô|çü‚Ê¢ÌôŒ#c]] i©T©à¤§à•˜œ_^œ©˜—¢à¥ç«§à—_ÌTÐÈÏSHJÍHÌISÈOSIP v Vpò ÖÔCwŒÏÀÀÀ¨ÍÀÀÃÀÄÈÈö} ßæåº ¿¹Ê¾ÿßÿý9ãßýÌ?_ï]ZÔ]+÷ûú÷l3»—vy7~_g¯-ê.,\Ò=Sþûõß7Øj»‹–.í^2Sîûï×Ùg.é^*ÏW¶à‡ó¬ïùS'/`ÛÄu[Ž‹%$Ÿ‡³›‡{uOoO_O爛ÞÉÇyx6ônîïéŸÔÓ?¥w*/=ª‚Áendstream endobj 395 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 468 >> stream xœcd`ab`ddóñõM,Éð,IÌÉL6Ó JM/ÍI,ɨÿfü!ÃôC–¹»ñל¾¬=<ŒÝ<ÌÝ<,~Ä }OüžÈÿ=N€™‘1<§Ò9¿ ²(3=£DÁÈÀÀXWHZ*$U*8é)x%&gç—gg*$æ¥(xéùê)øå—34òó’R3sÒòÓBR#Bƒ]ƒ‚܃üC‚5õ°:E1—Á‰‘‘åÛ÷5|ÿY\04®þ!¾šñ竛̿æ|WíîšYYÛÕVÑ*W˜ÛÍQ[;qåüIs§,Ÿ¶|ÏwÞî£w}×Ú‡x–›yɵ^ XåÔÙí]ímÓvÝu^jwjwaIevabCXwGÔö)³{&Íž 7ÿÖâ5«»9ÖöfTW7&·äË—–Û·ä×Gv·J–Îi˜;wÎôY“åfŸXøtι)k×ذArþ¼m+vsœîލ(nLnΖOùmÐVÖ”ÑÝB„Ž€…Ê8ùÖª–êî*Žˆé‡>ìý.±PޝlÁç©Ó¦}/XÀ¶šk·KH>g7÷ÊÞžÞž¾Ys¦öLîçáÙ0kQOOÏ”‰ý=}SxxTÒVendstream endobj 396 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 533 >> stream xœcd`ab`ddôñ NÌ+64Ð JM/ÍI, ªüfü!ÃôC–¹»ûÇß^¬Ý<ÌÝ<,4 }üÄÿÝG€™‘1<=ß9¿ ²(3=£DÁÈÀÀXWHZ*$U*8é)x%&gç—gg*$æ¥(xéùê)øå—34òó’R3sÒòÓBR#Bƒ]ƒ‚܃üC‚5õÐã300°8º…10(1¨13˜3°}ÁÀÂÄÐÏpŸQòû¾ÿL³V1,(ûžpø‡[9ãòߘ ‰ýž ô„1ÛX¯²ÝøžÀú};Ûõß ¬šßgˆþîaÛ÷]9M H,8¶êÁ¤ =l½µ“ŠyrM µ`è¾ø}ú‰ï g_>aþ±OìÒo!öúîú®únŽß©ß§¿wâ÷½ïß娦wOïšÞÍñÝûÌŸ ¿îl@Í­WAN¹xâûŒÃŒ¯~¿u‡ùÇ_±ßQßo}ZyîR÷Éw¿ýV–ûÝú×ëá÷‹ßؾü¾ÈZþc‚¨‹wÀo.¹ß*ßg|WaûÎq6ÀE浟^åŒ3 1ÿTøé%jËö[ò÷«ê$‡ß¡M±’]lÍ›¾35-þíø#DBíûR ‡Ÿ³ñ•.ü0ç{øÂÙ ÙNpÝä–ãb ‰çáìæáÞ<«zû€h*Ï‘½}} >ËÃËÀSqèÇendstream endobj 397 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2886 >> stream xœ–ypçÆWÈkcœUb²«@ SÌ‘@BÒ¤„#á² ¶± ÄŸñ!ãC’åK‡u¬ÞÕa¾ËÂ7ØCå°!CÎ! ¹:Ó29f’I§ß:ë6ýd¦Í¤¤ÿhF+Í·ïûþÞçy>2‹óvÆÄË Ò W¯Y±­4-?÷PðáRn¡€[4‹{H¼íûÌIm(D!"äü¢¹ÿœ¼óPÎ}(î~B($gm’•çfç”J׬ZõÄŠøs½4½\º1Zº=íPžLQ’—+M+Ìnމ–ÆÊøa®t™¬Pšž™“–Ÿ%•eI3S¤{¶Ä'H_ŠÛ³+ayôO˺÷ ˆ_m).-“§ŠÉˆÍÌMHÌ/xfÙòßÄbb±›XJ${ˆ$"™ØHl&¢‰-ÄJâEb;ñ$±–ˆ!b‰…DNÌ%¢ðˆ¢ƒà#³Z…RacÈK!BBOЬ³Ÿžý%O~Öž>>§Î_"ž€ˆ«¨/ò‡Y7?" €Ò\²B0¹¾Eì©…ê}GC½2IšÛÍÖ*TI_[Ã>õqT~¢¬t¯AmÖ™AEj¼~ ¡ü`º€tx Õ¥cõt‰¥Ê ­Ðà¬w5½‡ÄQ¨Räáײ–¢³¤J Jl}ƒè§û Ÿé²Î -4¿1b>W„âÑç¡‘œÉ#–…f]GÞëBNzÅßí¸¼äçóóËxâÆžwQ$šAK©ÇÄp°„ÍKLßœRœdZîà›§:Ѭ¾KôÐõsG‡€<×õr<å“he·²MÀm½&äŒè„øvê/îLÞQk ´g²;r¡kä•YÕ1¤Îθ<,ë«£¯´^é² k”F¹±’æC§öêòkõ™ Qk@©s0ö®€üôQèe3myôP^v]¬ã4ÚI»‹äœ Ðl–Ð|¿Mœ³%]–dŒþ °²V‹“>v;{Æ?¹’ÁÖ¬¤„\~ž"Ƽ¼A^×h( ˜\‹Ìb·L˜LÃèu"ÿÛôx~9|¤¨ÐÌzl`±5Ðo¡žÐۢΠYy­ŒÑÑÀèMA^ÖVo¥ÿˆŠY[6I𴳩ÊD)þP^²ȧEˆu 8Ø^ y"áIÕ‚ÂGƒÑ€T»Àîk‹•êD[—V‰G*j*Y¤ ’vBÓEÐiLÀ«h~áÔ°6G¯‹IùÝi.CÔ,[×øgTes<òA³š˜áÏ-_È•q Äç^rÈR3Ë’³(핃E¿R~éF~ÔXf0Jjœ5->wk“z5X½£è¾Ï@b‡n訪4*+¡˜”7é[üõƒT$ú¿~U%á ¿(Dß GÄ.-ÕfFm jò3–o²²ÊÑ×hóÙé›h,ôSQ³ó®ý{Ñ0ZÍZX€ÄUÛ¨,Ö–U© ~uè:ѽÎ?mâG ¨Á¥¥úÓ.~}-³S‘\ÇÝÅ_q©uLÈÞ}qÙ\ª§O+sd@–Ú[g^ü²5€Ãk¬qUËtÅÞ&þ¨ƒD‘–•“ äíhßÕ/|×i§Ï^mäpɱ}«rùÇôÚ ?ÖÚ`£œG»¾¸ ¤×c()Ö–ï¦óÖT§@¹úJÉ™mý”'£G>Ç¡-Phð^+É%Ý-6úRËšû‰SrsÅçõåÊbe©Öbpi(_‘5 *Èé¼—K©zmørÿß8åìüÅ¥â…Æl‘KÞ¯8ž9…þÑEÝó‡¯'„œûÏèmÔkõ€³ðèêËój3€2›•§s½E áül~¿tÍéoŽŒt£=±£ß¶Éo,ëÇ­Û dó¡÷l/ o¦»&FÏòªcM-mª žš¸ycƳ2?r©;tN½ÉlÖꨤMd)@¦^¹kLýWÏôŸ„:8 û‹ÍeJ(!‹; .÷PÝØé´¡ØnÉxÜGèþO¾i "æ{24 ÀiödЯU&’ƒ 4ü©·£öpט3[‚[›;³kh=Ò z01zš`ªMµu?SÀH s¹Öi¶u÷X €-«gÚ²~4ñVæ€~Òek -䎅²¢ËSß[”“$Xòvg=7{2$Šñ‹§nü»ªÝ`íÇæ>@Â30“ nT&€Óådí¬…F÷º«¿Î9’fv~ S3íȈÀ4ÛþÍz{·µžF‹¹ëlņ\µl…ù1mT•Yk†r˪á,h5Fg6R»xmUoqàà/…ü‚B\[·ög!×E›¦!ßÍÌ Éwƒp¡² ©È·Y«%žªƒ2·©¡´ØQõ|ÈTb¿óͬ$º‡Æå}Í?45úë±Dq(cê;[ „âÁáakFsQM'CÒæa›ñp“4m¨¸ =‡ç+º† ý?¿˜Û´·z_st»Óƒ¢´ ÐüѬc»áé%üæ¥c›þ4½‚t&rˆ¥ü3GɱÇ=ñµ»ÈaŸ,—ŽD秉¢¾šœ;;öà‚Ðû÷Pš°ÿis¯ŽR]cïìjl9C_Aë\Gpf´HνêÏXù*¿Ìðo.ÐÛu»@(Õ‡õªÚrúY¾@WÕ •ì>.ïûk/Zn£Hý\¶¸[áUÊ ‹Ú«[üG±;œ–&WûhßJr>¤íC£{›Då¹^n€›äç·n†"€ɯ•±b°Bp”`(…Å)1†2Ø™Hsig,¨uÔ¸¬w^€rŒ0ŒŸÅ“|D™%}0ƒn/m1¼¥/:QÕ’ÈUÏ=ºêùÍÇ¿u[üuë uéÀ fÌF*™Ï0›˜$Z7€e½njôª?}4ïÙ¹(ý=@GNîÂ[Fà-c¹BñQ´ÍâÆ.!?eÆŠgÀPËïŒÂ‡Õ‚‰ ÊÎͲmNê,ZÞщÙ/ò±Voª63Áè=¦ÍÒiÖ¤ff÷\àÄZ² %L¨Ûs7·jD*¨À5¥EÃB¶L‹Åq#QŽî:»Óâ°ó¾=¢±@—bª u¨zúam²âÌ´zä‚;8,»Ðb1V¼ÁX z†2›Œ•&mõ>U\î¯Ô±‘Ñ`óó§XFkRC¥$©?çT›¯»jékx#@“#310)ë¸$œ¼þ“…+Z€cZçðuzzœmt×{ýwà2y¾ôÌó1{³³)ý›/ûöÃnØ—Ÿ½×4š?˜ú^+¼MŽ6u ÅØ;êÚí÷ÐEvÈSR¡Àÿ/§·ñ͸F¨vVãkHK³Gæ:ß´àKœ‚½ª˜WøÕ>Z[lÔ‚œÜßS0rgzɱ´à+¤ùJ€wc‡½Î-·W¶çÉ dÊz…¿Ïßã§žâo‹–ºT²ü¢ü²&U§¿÷H–Á£¼Jþ}N¤e§„\ݤJ àÔh$ï§øü¾$¾xbÜ»Ó|[OÆ4i³³¼–z­òx õs+UºñzMgþ½C?rs¸T1ü½ 6|@êu 0:Ôô…§ÃX—9õ8ßÌ+k°—ãV笟¸p|BÏ£­o£@IÜ¿AŽöÛûVˆ^=!îÐ÷eåÉòeíšV_6:Õúq©‡îb´éíÍ(E ÙT¤¼ÛäF%–ævQ |l’(‹ƒˆ9­øö…µenp±}l3V²eëY[Ä\‚ø9ísendstream endobj 398 0 obj << /Filter /FlateDecode /Length 4145 >> stream xœí\Ko$·rì GÃб'Ùi7ߤX;9؈a{- ‡U½ÒZnc¤ÑöÌ®¼ùùÙ©*’Ýd?fZZNc;ê!‹¬b=?VϫӪd§þ ÿ_\ŸT§W'¯N== ÿ]\Ÿ~zvòÑ3íàIé*ÇNϾ;ñSØ)ç®ä’ŸeJ'ÔéÙõÉóâ‹íëUUVŠIÃ]ÑÞÐÊIk‹z³Z ÁKgE±ý?‹ÒIV|Kc„a•+öõ¾Ù훋ÕŸYfu2ÏeCD2oÅmYULw+nŠº}™PøçÙÀ„d)ÜñÒ( Œœ]žŒ­Î~8YK³ª´ZããçE +[ ;6HWr`غâ-|ë`Æ=?8RûÖp+ÞøI\a?NÀžŠv×Ü$›¿Â¹è˜bÿ==—–·/ÃsÍ‹Ým*ÈL<Ý(™ì‚×ÛÕšö©tq™,– ¦ØìâÒùó³šì»âI [ì7oÃôJÀ‘_[Ãé¶³+¶q†(öÛ0ÃÁyµÍ”<çG(ë}<eÓ\§†Tö§q-^Ü5@‰f;>¦Z¡¼µæµ†‰RIÇOÏþ~röÇçÅfëOV¥ê—18ßá~…$‰<ýü3¯äÈ웕R¨  Ôùu¶"è$*ì«\Ñ’ºJ,.ù•E‹qŠ8Fb’· v{ëZ]lwq”a¤¼dn²ØáÁî@Åž–ä¨Ó·ôXsùú®Ù%+Ô›«mÛd²¹ß2YԻȥ(R+Ün‚„,ðy·ó }ôLÉÌò ÈTºÈà·û—·xœÎ†}œØÉ”ÝJfÞn£x\*œ}û6Š]…ÝysÚ¦ro# ¥€ê£@N[/(ø æ½k^$F·‰KÀá/·-Ú†õªNga@Eê6Ñ’»(e¿L”W™Ï]ëRs Ÿ×œ—Œ;Cnëß赆uÂEÏöôÙç_>=/ü@Æ2¢’—ª÷<ŽéUJ%E÷'«5Ó +–öIØé€(S¥0*§™oIªÊFšç«¨XE“(S$ží†™ÒëõëI®/ue:òÆ£²ä¤Ùj2T0^ó8¾N=óe86ðÙQ“LkQkÜQI&lß[”ç«ó‚MRf®NLQþ«WmT»¹5À‡ à œÅ´FÁu:~JUzMÂ/þ2½6— Ê¢‹ÁsºÕlƒF MCT®´Òž®A;yëG2¨E5{ Â±‡hõió¢|Û¨Ð6¦)?šmT¿Û°öñmcÚ%ýj°yQ¾«müÄqã`ê$jÌrû‹¶ŒŸ$j°#QCNÒ#fTÜþwXÆZÀ–È‘j Q1¿ÒçG7ÅÊžÒ夿ØRqÍLó§iY³²s ^?„‹¸$+ýaNG•G×. òÉj-+MZ<]z †JbÇ ‚é`q¤æÃñÌz ,¿,>H±ª}6çÒr jÁi¡K|t¶åq`Ë™0<—œ-me;ߥ'EW%† ÖA~5iØ ¬žÇ—«µfˆ©ˆâ« %">YèäU ª)b¡à”,v) SìW“rp.9ï‹åKx_Êe#îXñÕwýÍâEÅnˆ9®x1¥ hïp$$|“€g„uuÊÆÜ!mQŠ Q3™RH¦Ò,Ã}öœUËè•<Ãú™¨Ód#N‰¨,¶7DT8D»µªFû—rnîîºÞl2vÉ‚˜q‡hŸ&À¬ÃÊ<6’M„ã‘„®Èâ,]¬ÙE‚š>£”pòfh~×3›–Å{é1g'–áÌ…¯Šýû«?ĬÛc]n$ÕA¤’ƒdCÔôjrIÏ@è'Y‘ÕÍÒH#îvŸvGÊ/<’ ‚zëV mèœ{]Èh"·10Z‡!>+úüj:-МTš¤V-ˆ ²É5K §+ÑŃmåÊË@IžX*ƒa„N„P`2m)­ìí×AmŒ+â  ®˜Š„K.AÑÅ7>"8.Ôä„Dˆ^t;ˆë›SXÁT>-A01.5œ K…>!dzpúq]2“ܼŒ˜ƒô©RyFCXòÁ¬fqóývo Æ$ÇùÅsô™¿;ú|‘ÚÚ6µá]3a®á>("Å2ó.·ñŽÌärfêþЉU|èð6EƒÃðx?plGN"Wèl5ÞSiÈZšzßlSplÐö&l挮c8ž9Û£ÎRUªˆh8ã ‘õ—€ä!‘]PÓ=/îæ³—È™ÎOóeسáñ"’žO*¼F­‰%œê´[Wé0äɤSî´®¢|5MÓm~2_OJħtçâÜaWÍYÉY—ò3Cžw;xî#6•«ƒóvE‰Q1¥ª%̤ƒ"3 Ÿ0t—?æ¥ñm2;«)‹RÿíL™¸âiïY‚c=?éX»½Ž—×ãü‚HËã2ňÜb„<`1ŸÌX צ4Œß;?f1':PÁü à{piª0H…è_tÑÓV…A€^xBº¼¥‹aÒ­éffùϪNÊ=Ô^´ÌÿÎim’枈ÚcêÒ¼7Sì´é€7s¿jÓ£k“õ³ûÞ]æ’ƒØí¸ÕFbçepûú¦;´9\BbÒ)ÛŸz¢ Á¤HˆÛƒD0ëÊ!ë™ë¥'å•Wð™Q[Ðè˜I bP–WJã蘑¨[Úƒr%ÝC½›)5ñô]4ìi„–ÅèÊQ,ù*žšAxxUÚ.ó%_w¶ÕßAÞ²÷!´ÈãC•*Ö|(SÀ׉þµ™;ŒXh»c‹;‚0Ä„~ÉIXW¼¥ˆ#äªÏÿŽ#ޱHŸÁä4¦ÎQ <\Í‚.k…ZlĽ™“C(ذ;Š Rl¡Ò¾¿m±dq<ëºÌ´#÷;ǤžËj[šÆpœùíÆ}V©FnW°Ö3& 6H²eEƒ ¢-á}|Ê–Ï4~{ctqGQw‰ø¬Œï¢Dô½`4ð1ÈÃþý^ ËÝ~ü¡ªíMETn°SOªA—]Ádö9ÀÖzB‘ª‘þL©PäéMúøðTû ^rfâ SË^¹ZCì c# ¸xÚ#fðw3-¹±»eÝul/,OFüc”Ö<‚°ÂŒZÄÓiOö¼w´¥±‡qGWjXB>´6Ö.^¨O#-®¶oò‡EÜŽýyHUêØBޝOQŠ{ä1ðl?ÎG8¯ÊJjÕQ‚³‹môâPfÔ|pH§ˆ”`øégZAñ±&Hc'úë™à—:¥NŠsp+tH J0rÿa/³>,Í'ëtÇ_Ù®ásIÝQim²XG±¾°&¼çéRýûM¼{Ð"ŸZ_¬â!Ä&Xîêüþã|5×2Ín ŠªØ=ðƒ±ƒrÝ–3ï'DWV®8“®ðJî0p¹¦Ë­“X0Oaý›KñyKɇÇçí=LsŠŽ~ã$Rü q_˜zS$KW« ´7_€åÖ)üËcç]RféÞâ¬ÃBïb–üg5Kç›ezÛJ)œ ¦H¦”a?Ì!Ù†éÍ²Ššúêp[ʽÓG9èÒœ°(.²z§ñã—¾$e(Í¢ïMýßïÇ}¨ðž£w•2»LÿX7Y2ZsMÞ7ÃŒS„¼0Lúœ.±¦¤ZÀ õE8ë4<íê Ö¢ÖPÙ÷í8â «§Ì6d¥Ál'¾Š€]4ØíæíÊ ÑȆó·_YYG%¦§yFy};Sžc- +Iéï°€“• äŒÒY\Ê!°¿ XöêLë¦.Y×`ûá$¦Ë®#dº$4dø1ŸÎu»q¨š»¾ŽßÍ‚lUèTЀ™÷ˆ ¡k-“kûÞ»¾éø÷“{PØòiÅq¹€nÞ‹™ßÌ &.Üg`°Ðµ¤ONÌ¡9Ð=4­’9VÛu}d• •ø5|ääÅ¢ââXŒä´7¸¨Ãô"ï. £Múõ‚I¶#ðÈš†|n5@?êØ+ñ‹€† qÆU¿H ½ífh˜þmÖ£í€OŽßd-ªì{h!Í|p¼J¡Èº­ô?^úÙ`R>Åf”Ðå®~Ÿœö&ÌþÇhé.45DO—X,Yæþ%—rõÐ?ÑäÞã÷ñÅ#õ½7±Kõ°Îñ¸8'/to§³÷Cûn{£{¸ Aƒ–óôü¦ …Ú¿à¨ø5]2À¨‚Å1ròhŒaþ—F¿´‚“÷,ìâNTð›Mœ®ò£•ü0Ë<Ì‘¯‘NW RÞì‡!:¾Ä=¶Â¼+bHWÈÜu„A¬xú,òƽq‡Ï_>íédúÑÖù%qÎG»‹@ñFvN×c|pU4ùzÊt¸òÙšÒË‹õQvg§º§—%<”…“g/UÓuSš£Ÿë!9¸ÒoXþnßý–Š´§ ª'¥8u¿U%Ç7xƒxE)èR¸¢Bü3¨x1Q£Uð‡u°› cÙ¿mæÿ‚èõòG¿$ƒqj=§Òß÷yy³”Áƒ3mèטhþ¬L?§X'°ó­¹¹¢ÿíìäø÷ð›/²endstream endobj 399 0 obj << /Filter /FlateDecode /Length 4061 >> stream xœ­[KsǾó7ø€ã"%¬çýp%©¢SIYN䊦\)Ñ„¨µBÆ.e1¿>Ý=3‹™ÙI™*´ØWÏtýu÷ð×kù‚á¿øÿfwÆ×g¿žqz»ˆÿmv‹o/ξ~m<¼i=ó|qñö,tá !|+”XXm[/õâbwö¦ù~·d-Ó\Yá›Ã-ýÐ^9׬o–+)EëlöoñY¶^ñæ_ÔFZÎ|3¬‡®ºÍr…ïw&ë狦0DÖo)\Ë—ÍoKa›õa›ðóÅ÷ „⹋Öj‚\\5\//~6¶h³RÐg%uëŒÁfoÁ˜Å«Řj½Ë•ðàT#­Ð­vþøF §Z©uÖF[ÛŸõR Dš/q•\¶Zy±¸øÇÙÅh:;N»,¼Ì§c¼uNçCqŠñ|:Ѫ¬—2ŒµN š.ÎáÒ°“­á*ëo,<—õ×JÁr3!¥‚7Îe³* ½¤²ù~œÃpÔ–|èßò5Zßj–Ë Â¶Òårx-[åì¸mú¸iœ¥ÉÇÅš|2±\,¨Hkd~ŽÂyPÌl`]«?ž‘sÙt|œÎ˜¸8nÞ‚äëù­È—$H"³Õ,Ër“íi ËuÞås€êû\4gІç"qPk'YÖF '›Ÿ—ã°j!X~ÆNÀg#jÐØÈ|°È–ó\+áÔ`œYt­˜HÛxñެæ3OÆk•‘¡oi˜B´’3w;ô—Íå2Xp9 ˜×>´{Ӽͦ¸ËÀã6{‘ÇÃÚ\3tû[ؘ΀¡~?d˸i>dý{B€¼æcw•!^BMŸ:ëfx—õÞÆ÷Æ‹ê‹Î9Öå­nbg&Š&Eßõp—­é0Î@…SÐ((¾¹ïn³Ÿ×±‹Mºù*‡â¡˜ý*¶’¶ÙÁþ ‹ø¯š«S« 5CØë0 †pLÐÌ!ô€'ɼó€÷q[e1æzX“Ú­´j•mXv‹ªPîîÐ.W¾£([:” üËa¹RÚÐÈÛ\+>­wÅC½€NÎû-ø$e°»øfJ–úµÏææ [i7¿uQE§\]ö¡L¡mƒHG}¼jöÿÑžxæ—MÒhØÔ飇îãN tØïâ,~ª®•c-mSr ºÅ£mÞõ·Ûa{³ÝÌY' 0’¬³>½°eÜ&Y  +¸ö!ž ƒ]:µ®&ÓXR4íLPæû*³úaèr¸~Ø c2 _S8½ÛHaŒ¨N5ªÇhÐ2h\Ô¹j\z¯á|oO µêKËúæçÅÑí îÓ”Tª’d:ûCŸx–¯Œù)pÑ¢mRä(媠f«>ÙK4pê§P9ë3o£§Îæz‚þáõŸ“_‘ÍÛnH?\óÇÕñy‚QåÞðo”þà ³Ê‡¶}”ýñ”ùFëŸ_¤¶€gWÛü¥ €ÿ9­0Î[O¶ÞlîëÍý%M=â"Hr°8Ú:° ]©Üšt¬U&7%-¹à›¹ ¡gÿ>¾Gg6Ä~N¡ƒñJ/Òçg|˜¸6ØEb«Z‰ÕJ´vø±9Ù®X@Ùî> ÈO€Š'GÈÇ<ŽÍø_’Ø~΂ƒÈ!РQ[s^—¨uwÅ4ûtT þ…­ßÜÏÑËy˜p•Ǹh´/.žh_•ºÍ«ë[Ø ~‰Êý‚³Ëe2g&6rÔk·àe8‘‚€E'nŽWC@‚æsñųÁ€¾Ûß ?T’ƒ0‘ÃIDCT/´c*œ˜òÀÁysw»!ç6kQDg›¤%ÂãoÓt=>sâ{ëÜ?ìO;ÀJ À!w…(i®rWh£2„¿¨ 49Z®ÆKSÂõ'$ð z¤QØÄc0)üqÔCh’ ZO4 ™‰Ô£$AEj5¢©A ºÊŸ~ýZ›2˜†Æ'ŸÿzÎ×Ã#c<9ûÝ:­ˆF©Öi~¡[+õj‚*r*Àx[HWÆS<É%ßä–» 4RqK´¦¤‘á´¬ ÙØeCmÿzqöãYÈ®èÅátV¥Ú ˜UÁØÍ‚}bÄËD̪¼*tì~I,zªy´Àˆ·E¾”Ï^ƒQ°S®ÍNIEwIj%N¤¨),ŸA‰Ö*®Òiô _ýäð+3¶-·nÔ¢œ†ƒ?À]Qðø~}Á—)± €ŒÕ‚¸äK¤é£ísŽÆOììE à C?AoÚÜ é³³š÷Ç÷€'—Í/~ ܤ½sðâ|úâü¸Ñ6ÁN˜ãª´ÂײµLGDrƒ›‚c¼ éÉàw…šÇ‘€‹’ŠõËNÂ^Ô¾GgW`E ¿áª{8ÊŠ „Óä¦À ŽúeM4æOÁ|9× 8HË‚ù¶Ü9cAÆÖHˆf¨%›3t×: .²†o¢vŒ–œûÌO“ Õ:Î2ŸøU|Õ:¶‡(ød R°•˜Ô +á@Ò®÷‡®ÀÞí9²ªù¶à%õ¨s4¤ŽÄ פÜ\ÀÆ jñ)D,¡ì­®¼{DV%4Ñì A¦kÕ¼PŸ«Aн>öã: þ0ºÊ“ Ýnìâ×\714CÏë:<¢ŽOó#1¨¦å [—宊 ¶q$Eà3É%ý¥èH« ‰}SeÖ} i7&ñ-$KGµÕ\(4BñŸuÃlÔ,Ì»LŽÞ%²c\žB$yhG’cCf‹¼ÛH«C¯2Š ´:ÚXmˆ©šÃ¹\/çxù,é?ƒ¨Vi9&ÉüBrYÔŠ̵æù¾^"€[¹À|/Ønð³ßÝY•ÎQhÍŽÂ~;BÛÄ7cöÒ°»/Öw¥ JCÅÞê=„OºÈ_?%ÔÆjîü:ÑíËãÚ"ç&¦I ñðâ©D,‘³¶ ˪vP5Ê$)ü~Œ4h„Ê5QêŽ8$§Ü]‘í-ÄTB˜#[Z5«6»Ô«˜6V˜Uš!Ƈt,äh@®;J£ygóe‚'…ˆ%­°žã9p¡”m1Y’!Ú<!xÆ`½f]Ôæ˜©K!}ÃÛghòÔÆÐ|%£_µP²T8‰ð2À‘ázÞ¤´ªÒu\ˆ Ã¬þ¡9 nZ¡÷Óò!’#å'¥¡G޵Ÿ9¬yË•J>øÃa{Õm†Sþ‘@c¹Ù(%¦âò(e…P/ëœw|älZ\£aë”eQE›&Ðh0E*YIgš_N‘‚MN%‡¸IÀø0Ä$D©·¡/ZhU_\Ê[fôî²¼ÏæXK2ƒ"|«‡ˆcߛŚOZ"ç•»{ ìÔ,îÊÃQ)ƒðHM©Ãy*ìPhX¡ÐsåW¼‚ (à$ô§Œ}Sš@“ßÈãŒ#Ïp~SªQ~ä×x6To°œŠ:(¹ ÉÏÛBI—ž®:RöûªÒ_<>oOç³ç‚Æñ¦.¦– …ö € ¡iYÈ¥ó×b.˜!‰«šÎ:)¦1ŽJ…ïÁ̹¯éòæÜW8J[ú€Õ¬/.A¨8aD65ôÑÖWrT­B³jÏ,Tk^Œ9€óó=ÞTkʤãªh:¬Õܦ©Ð •ÀŠ)MLq,5ìfÝ÷ó‚øÖ˜”ŽÍ£¥9QÀ)±,R"b#bÚáášGŽ IÐL懙æ@W¦Ìë_Q,¸LpeÂÔÙŒó×/_££5TŸo(ýàÍjï“[7!¸žÐB4MY‡®é‹¢›i¡·š#”GÎsܹÅd`l§ZGx4¤Y’06¬Ø÷I×™ÅÓDqC`š>ú˜ùœ4ÚCŸXÈô,Ñq܆ŠXVÓ›Zެæfaœ:<Éœõ6ÔÌàãg’ç‘Pšþa?¢(P"¡ PÇ"­CùçÓfZŽ Ô™žgnZÐdF? ¹f!÷ BçК¤žzü€ZyèÆã0¾ tidv€:Y¥ Hõ“90,¸%öo(Ý—GÖWñ=»Ðô<5¥+#!ó¨Fp¢é«BA™Ð¡NÚAB›@5Ö(GÅîò®Í­¦‹ êPÌË+Æ(¥ú³U¶vùŠ(ØTS’¿Û9$%©ŒW3Þ8Ù Nü¤ qÅV#b&žäw9HgGwõx`ŽpôVOåCABY\—Çu%þ¼ ”šuyc¢:ŠTÖö¿D¦ÐàÕâ÷¬5R…@ô/¥~uRgÓ²fÒ†ôœÊÂ)ïž“ T˜kùB¿L*œ„€ñòq9Ñs‚{ä³Z–{ 4\Óœß@wÞ…&x¥±ë# ñ’ƪw`¦)|YçPm,!¿ô µ^QÁ7°f(Q¸«|¾‡¾Ïð´dõ4O'ä’<Ã$Žf&‹3“À»8ǜǣÕÇJh¬câ+¦žx}´ÅSošNw`¤¦,L× 'XXE&O‚Åñ& W9Ã/ô1KŒ}Yݸº¢fÓeÁüšÈUè³×èšG<ÞÝ‹w’P!Ëvˆ"&óÄ@ßw9¢ÝŒ§%˜òhéO9ì¤Ú=®köæ¶W­ôê3ª4°×ž/~ïíò8›vöZÄ¥¢UN/Vx_=&øaÙ夗»7ÛÝ6\HJFJw•x¬¼b†¬q¼Ä »¾öhÑ›ã«ó×´:«µã{ù*¬ÕJgrNÍÆÚðîôE&.N%î¤[±²,0‡¯ht^QÍšVL±¬;]êš2è/èÏØ´rð‰ ü‚sù ,⥤ãxoB.LJ»Â§ï·w“jî’ŽÄœºO+êô™%•>è—)¯àÙ¦C‘´;öPUõøÐl‚¨¦Í¿‹‹&ÝûeBÁl é±ÐRé§hödæžq´™ì¶ –óCRÂÜßêýX8âî‘‘’ž‚¼9ÅDZkÔl­™ºA´•âyÅÌ$9Â'þ²LI ÉÉ»”y“úØÿ¢CaÊyY.ÎŒEÓq]ÞÆß¥2+ҸéöÝIî½0·1D¤× /Ô ”Ú`pà÷Kº|#ßM8ûÁõkÊ:q‹*äMödzÿqrÝendstream endobj 400 0 obj << /Filter /FlateDecode /Length 4518 >> stream xœ­\K“ÛHr¾÷Éw#xÙXÐ!¨wÕ8|X;fbÇa‡cwzc’P¿„YRÐÔÈÚ_°?Û™õ* ›ìžÐ¡ ²YUY_~ù€~Ù45Û4ø/þ½;Ü4›§›_n˜ÿvÿÜ6ÿ~{ó/– ¾©]ãØæöñ&taË6F™Ú µ¹=ÜTÌno†ÆÚÆ\Õ°Ðáöþæmõ¸mêF ÃWõLJ»íŸ-³ÚVípÚþïíâ.B˜š+ËÓßnwRʺiDõ‡-7uô®>gãžúC{ê`h!|YÝv‡‡ø`xõcÖò!ŸþØ‘Ç!va²ú~ky휗8oRJß}ÜNÏOq-tïvLóš;½Ù U[­Ã’n?øŽÒrç…Jƒ¬ºzŠÓ9£Vi£é‡#üàœÐŠW~ÙØ[Ç­Íóíɇ¹Ë‡9uýǬÝ`èá7¦fé0ªáóáп¾«ÞmÃáÓÕ*V+)]lü&4¡ãéÚiçÒxŸŽÝÇÓÚhdsìòÁ»Óéá~u4WkgDÒª6_úýôyi8«¥iY¨ÇpúkiV35joû)›'ÿ¼ÿŠ'§à hpøÜ0Và3h¹³BærQÝÝ©³®ú÷Y«?Ä0˜f›p"°C^–SÛ-èTñÕ´ž;¢HÇþî…s†*awú°z³|—êôaE—õÌÖVFܨþpìíêIŠÚp[öÇ%Å€e -“î´—ŽFñ*\"å¤Mw1|O` ¼Du‚»a¬v 4a€7 ~a ©…p6Ì&kYowªþ T@(0¨"  Çy.û"è´•þ\ò¶Ÿ?Þy‘–Àˆ7µe ô_7·ÿõõY ¤mfÏV„…\d/ M†M¤é¼m¼d°L²ö¸ºk?^.Öûýé'à†ÇÕƒ0¶fB¾„Ïí$µä’àXõãéøùîtûãªM·$ñǪÆ»øüÚ`£%n?Á!Œdž™7iT®i<<ôá y!|@¢ÏzJ¼‘xõ¸5Ð;ÞHhr;*;»`a .ê;¸Tn¤ê€• +Fhâ›¶ˆ¦¿©Âêêçµ›¡2ç F´ž²úá1Š*XX#.ÞEtó²š5p ƒ ±´ó’h¿?ûmQJÃFúí &Ž"vÚ ÷=Œò‰˜j1³}m}—&õ£Ãqr‚IÇá”N6ÿø”#Q¤çLs² ç¢-`W,ºuhû\ à&,%ÜRv¹{’«ñ[¸ŠÀHµ§Ÿ%]ÇY£^H‰ƒÐB­ÙïñÉùIˆÁMÈoÈ%ÐMNB•tꬓ>¤Éuõž8Q®ûIàQx-ç6Ù+L-^$-B²ªŸ3Ò¨Q'Ôø-ª¾i@¼}:\U Þ"ÉößljÃáA…œ3k÷Oý±#y˜ÂȽ+'׃"g8íw·7º $µ9^@bÀA€2¼bâ£Hu4{ýx€»°ö|¼Ì¬Qþ‚Ì?àÁÚp©=ÞRÿhV0GJNßàͯGŽAŰÜGp¼«b,Ð"Åéäœ!-é ¡5ìÖèq}:>Üwwë·ZÖÌçëÜØ ®æÕ—\íý¢vs°MbÉãB™GÅ.`•*H9ðŠMžE¼> IšöqoTe”ÃAŠœ¿HÜàY¨$?íD¹;ö6Íh0>VUÉHíïiÍÏméí¿n-^O¸ô h^P6)>>Êϱ/u :5Þ'{Ýâ”LÏý•0¬›m2ÙÖYø”Yku¡2ëZ()®WæµA™½œRR=*pç}4,´Ôõèþ‘zÏ…NÄXCËPL\ŽÇLú#F«Q@!1¼á¯Oø ̈Kt[Â÷°‰Cèžc•ðŽ¢œ9¹~LjÒÇ6¸´Söyˆm¬!{ÙcÕø6ŸÑR¨F´G`¯»Ü–î“Ò„P¦ |åýìàâþîï§Õ_Ð “¸jDæ&Yõ.Ý¢t p¹ÜÄÇC1È—y—FW0éô¾_Uè]jêuD%hÅý¤-+Ñ7k]ŠŽ^…O†ãmˆ:éqå1cËÞ ÷ßÛÞœPz¼ÁëaL#pÓ<ž˜ŠÂ΀$_°ÈyEÿ&²|¦)ƒ(¶ùÕ{³â¬À¡0{!è=®¿¥†nˆ«k˜gö~¬Ä…¯uõåXíýpãgQî8Ð 1g¬Á(^zù©y'1˜a ÓTè£Ñ&îLDøø×–Þ0\³Ñ>(9;uƒ‰$IO‹šµ»pR0º$ù0æÂmœå¼ ïð3w~ÛñcAGúÃ!Ÿ…Ò} Ñ wió9—ŽÁšñB‘VÒE޳‘Péöà-¾ÛbäÞpï³ÿ„6ܹFŽ»(mˆúySÉ€öÑXh$J¼0÷ô&'ð›ì‡¹ö)AÖÒÓØUŠ•ï_û9sZT\ÌAÈñ¾=O=¥°£µ~&J޶’+OúPj!Î’ÍØÄ“M­<ç"½‘Ä’YØ—ÐFijòüø/£éVÜ\zÉq¯LAÓÃts¾¸Êl Ø\ÜáMÒ2W«¦ä´3àx»TÁ(ö>ÖŠÑ!'ç@ܨ‰…>{òãÞpÈmŒa€¨ËzÆ0ôp¬7/!…ˉ: }£Ÿ]n?„>ãT¹–m‘±5oÆhËsç­jÁ¤½ân4ÔýÏÂÛ¡{oí~;BËž:‹R£aU |«|î³IS˜¢*¢äj“= ie§Ø`èŠ`.¤Ï°ÍÔÍtLµE ÿ˜†-J^ (¿v÷Ô*ø)ÔÌÈDí†ÒyÅÁLc‚„è¤U=Ib}š­(þRæ…“Ô¦ši¾pƒa…ÅRv5´%Ïžä[Î4ùåu‘µ*2Ì¡“àMÞ pIL……]$ã+QO%`÷11Í@HL Ÿ—«a4r³SÑËå07m=Q“ùnOúã=¶P¹àݘ«KD-õ5°dÕk#/Þ¹ÓÄ%kÓ×J¼Ä…‹Oq?´t†‘ÇÄ9`îX¡Þª6î"ƤàHG¨û=øŠ:ÐÌß§ZrQw{»ÕíÙçÞÙœàLí°ZŽ—,Ø@¥k¦Ëûž@®$ÞH·>ǧx¿ï·)¯ú¥ ¸xH‚“{ž?û[" àoã®sEjà°€EðÀl|ÝXõ÷Å$ t›ñB®Ah있\{†%V4Ú‰9ô1髀ͯþuYÌ]š=ßå‹ä,ýˆ´ÌfH{Mžÿ2ábšï2á2£Rd˜ W•¬x¢( ÀÄ*F¨ Kk‚ Ë#ó¬ÖüMr-ºoÖPà>%J—j› ô¯Û”f þlr58(à¿\´«êÕŠyÅAÏï›ðQéu¦0'lŒ<ïCøCvŽUØY³¥Ûy¦™„g°ÝÏÂ,„HŠ[.q jqaÇo~sé Ý>·qx.Ü·‘d—ÕkÉЭéo5.‚¸ÉOfºjŒàßÚºH¤©(oûu›òÈ‹ëõ“å4¼õlüêú ÌíC Øs‘& \ʺ)}÷ÊBgžª¸ Ô,L-™~Q•sSkn“ÏŘÁ±pÚ]ôÉN0îec絑Có´â· I€Â.óTËkk›f§Êe¢ê'ú±À“}$  #܇,¿ïHqDZ ¤¯O˜K‡QÒ ‡?ÌððÖÅ+òå ãÑÜåãW‘×Iº„ YÍÅ«3ýÄÂ~6ÞÛê‹§XàÖ¡̾+k'D’ꊺ“…jäy!šw}¶Æ0bä<È>¬Eo .!Ü”Kîù5A¥âîÕ© àñóðÇV8‰WPbîØH‰«f•²ØÁ•J•_¹õ>ÕÒ|6K¾! ~¯¸¥©«qÈ2K|X íÌ"Î(¬(Sñ{˜Ñ6¿K2ÎÝ_ßì¥S¿ 3èé·©ôöYÖ`çM(wòµå{ª ù:ž>L³Ób#jY§J^;Æ”à¸0í…‰Ñè‡2å…©ÂW†sćHˆ’ß7 1‹s©•®:ÏBZ‹*Û/Ÿg·)åµÊ£Éõ}'•ð‰Îï·CL‡ð Jè#àž¥3G_ 8"¦ü=~Wý€Î¬‘…rÅjn#Žý^7ÃÐ{7Nh+æœË§¼v. ù`ΘÌå“Á"”|ÇsÁûÅcÉ’ûáX–ð.sÆT(Rœñ7ãkoŠ ´N4Ÿ‹¼P×ÎËÏf…¼ ×Hƒ<7§ÀÝ¡¿Ø×ýûŸîðÊ+.½¤¨þvûßðcxõæöÏùîÝvjñ!ÑŒz)Bº‹MÊW}tˆýŸÓ‡‡c|Ǧò¤J“ÊξÈÈ:0*bÐô–7L‚Í Ù÷±Ò6\²ÊîØ×Ø¡9ãcõ]¬ÎåNLaÊ!)¿T!(9«ªÅ6zª‚\*IëyiaÁ)À¥SSÿs’ _5ä?PK‘ßù«®ÄF/j–ä5ßY{»”ÎÇ0Â_ ¼¥²<Y|WôÀi¦0*Ãk»ÙRç5rã!zòEh9ÊíR(TLŽãkÞIyÔË\Œe=ä¡ñ,,Œ@p¯/*ùÝ×5Z °GoyÙqA´p:½þñ‚»… _ûblÌ •0i°VåL¯º;öéÿ¸Æ/±¥òHßnwÚ†÷ :òVÍIQ. ¨¦/(TØžäóßÃþ/Fñ²‹ÐÁƒ-nûoè­p@éô·R*Ìå„MZœçê °ôÉ|$“SôRÁ ;Cî`^ãn!Ma0Ù ¤~ëÇ7N'¶ΖyîÐy¿Ì¿´~š%Z ž;ó/jÄVÍ¡$Ѿ‡d¤˜¢§¾øÊ‹û­BÑ.ýÃq\K_ë†08Ä9÷âÆ%yqŸ·Ë%Âz¾tŠÿ‹›š…,Mɵ­?Ÿy›næëx0•˜g\ˆ†ãOŽxo½— N ‘?94.if›• fèIê&eðõÐaÑËý€ò³¯?NßÞMßR#ÿm$þV-ÐRœQRš”?ìþ–kUæ>ögÚW»Œ~Ð7švå\¤%æRØÎÒ`&Ò¤w¥ðI=ãOK<öî´oß`5n¢øhƒ‡áãQJM”õSÒ{nçˆ?™HXåaœîÜBÅ Ö•‚X¾$óv•ãŽD‡&<œ)Oךӭ í/ˆ*+ÄÎwžÞûÁ(|ñÿˇ´ú+Ð*tÂB‘sØ‚£>ÌMµh¸ÇrÂã Lˆ«©°.^ç“ÇàÇ,¾´D|¹‰6á ¼YZ­a`fÛË2:¼Žc1mÒð©æGæë#/§Ä±“}þUD?@Vp¢ýˆb¥vA)Æu«P„Ü„·èófÏ}¦%#óøÓÍÿ‡,õendstream endobj 401 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 2429 >> stream xœUkp×^aÛÔuŽyL¤Í$ÓhšÒ4Ð<3 °yù%KkYX–d=lɲž«]=ŽdëmÙ’õðÛØc0&ghH MÚ1é@§í$mÒI;mÚ\Qu&]Cš´;çϽ{î=ß9ç»ç[¶|Æáp*Û¨¤^¤“jëë_#%²]Z‘|ÉóT¶˜“]³,ûHÞÚœçŽûÎ +²‚| òó ybÍ}±Õwx«²_ÞŸýø{¡|œÎYZË|òé§×•(UµLZ¯%~¸iÓ&¢Ö@|í!¶“™TA|Ÿ]4“r¥ª‘Th7H’ÐÖ“DLN%ûJìÚ»“X»so±“Tj‘œ(ÕÕÊebâU™˜ThÈ'‰:¥šßÛb¥B"ÓÊ” Íb«†)–±—H½˜T-9Ö*RÝ(ÓhØ5!ÓRµH¡%%„VIÈb¹N²Ï~¯S*´„J­dý¬‡ UªÔh5bµL¥%XÄÒí;î娭i—p52ÖM(ëØ“¥X·TÍ7>­H¦ÐZR¯]©% ‰L£’‹ ,.J¥–ÝMA§‘)¤ß¢¯#Ô¤T¤–ÈIÍݸK]ù¶>⿪©TrÃݻʻ§¾Á—i5¤¼nÛpâ?ä¯Kìê‰%‚eâÿõ}ËÖÿdža÷+”jV·»E$&ëdä¶+Å`±2¬{;Œmöc;°Ønl#ööûh°åXû3gçÚ²Ëþ‘§\^³<·âàŠó\ÓÊ5+©•_ UA¶Æ˜Ê>œâ ç“¼dA«>j÷vAQ°ƒ¡Ú(R‘ã uÏ:[A¦sÔ0“¾1ýþÅàÿ‰ýN· p§ƒcÉÂYôñ¨#`nÓÙµRÁZ©ÜVëþ9„ð pXKꪷ è¦w†;¼aÂOSÍúçÕϵ›ÝFïPd7Ï¢Û³œß-"ÉbÊeÀÃEiõžcUÁZ|ï¸d~h(Ò7#¸04ÖßÓ7yiôs¸_ª;].8TÁ›ì—왩 •û¥rðÚ¦‰+ÿå¿/,@ó¶ºõçcoqøWèôxodŠ2£z´#ŸñõÊg4ÛUz™Úd³RÞÏí¡1oüx€ö;Ì?ª©Û+p©. |P 8L&[…¥† þ¡y ý~–ƒ»zþjÞômžÇî¡€ÆkFÏž˜ŽõŒ ÆOÍÁ˜p„œ”eLIÀLJGÆ{VeF0>K¶ôé#JÀ·­ß¸KלV /Ôí Æ¥•"‰®µ3c8ºÝÝžq+E1NÅØ; @16¾”Kb1Û´¯ÐjÞËU ­&ÀÍÆtWbqàïÂ.oü0éåw@Ä÷æçäúc—Ò·õ6ÊÕæp8)åtºÖL{˜f»Øî¡Á¸†ë(ÕáV“ËÍ8 ÞÇáf¸å95˹oH×'3×§>ø“€}NæTöÁçšçM¥“¡NÀCÊj¶Õv Õh%(pegs¼»'ž+¸ÊA[ÙNEÑgÐÓÙÏ’ƒn^(÷„Â.NCÐÛ´Y­f³©¦Âd0oª0–ƒ—fÔ£¦>½-°w¶@^蕦šÇÑjt¹Ð³ƒT¢5*P;ì.©1ÒÔÞ *Ø¥~±vO}Ùa‘ðSrpºûâ»è¨Pe·º¤vZï4¹ìÒ\¾M'³èGÈ2‰¼¸Õ–œN\šöOg†._Æ#+Á>Onå-”»°Ï«öâ…k¼V;8Š[Áê¢mm-•Ú­xAöys ýs†æ¼¬=ÄÛr¤ÍΊ†ƒ uô¡e§P‘0Úík÷‹ÖwÔ@-èMFi³Hn6Õ‰D¹bØŒ‹7ôgÆR£U[%# nP‘N€¹_ †“½ñÓCx®¼—WZÓØjÜh뽿ÞEØ÷f&==p‚9Eµ¼ìÐzZͨé&ã²0v\þh¾‚þ’º›Z!*ä½t°\Ù¸´yèܹÎw’³ÂÔüÀà0ôALï¯~kœïõ³ÌxñiÝ`½ÁêÔH[*ŒmºZeYósðþ³AÙ̹[“×?Ü |sŠƒæ>ÈË>€ámÙ}LI‚šŽgÜ…á7†FÒ g/IÁøI]º^gdÌZ©“©š5d¥,·Œ-¼ôDÝ܇S}$ÈÉÁK|9s{ðD´UsDÕHìXœœçjÖ>Ú8Š&.®.|ý ]ãµC¢ž3£üC^+XØé¬ð\CÁî €˜{‚ž¹ÄoñSÞØ"Ý+»”™%¹Í6/ÃÎ8^8Ô}wؽE´“vº=: ß馜ƒ÷sß÷°±!å-2î¥UúûrCÚªçÀ¯ªÃÖæbvÊj·R–Œ+ÊÎÄ øãááøÜùÁ“ø‰©S¿íýdý$zÆŸ {: X÷”1Y;oxÀç ;™»îôx©¤»ƒ5?æár9m£–„ÇÎnYsãA]ؘiÀoü«€·î°ÛcfçßMDÚ“ŒpÕE&ÙN€ép´–?¸IàV.ŒúY!( uгËÔ¶_X‘+a»£Æ5cWb06>/Hv² ûÃH€}»5Ûg³o³‚.yïóż³åU?v²bVw ŠÐzôZ… ÏÓ[ßn»bIÀ4~íüå…©nÞ¨àÝYúÍÆÁr(z<÷wmUúø[UÂëe=*¨Ä_ܲccMurD%hî¥ÒÔÙ¯»ðÕEzñx&Š­Ï]RE9Yí¶Û;»„á•á•Ðó*Ÿ@¾Žž÷œeå!â §çùÍ~«× bh0«©[ÛÌRJ£Ûl«.ɽË÷ЬxÓxcB¸íÜL £ÕŸà-©lq =—âž»ïæw_ççcØ¿,êêendstream endobj 402 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 371 >> stream xœcd`ab`ddäóñ NÌ+64ÐuÊÏI‰¨ÿfü!ÃôC–¹»û§âO/ÖÆnæn–Cß/ }÷üîÃÿÝS€™‘1<%Ç9¿ ²(3=£DÁÈÀÀXWHZ*$U*8é)x%&gç—gg*$æ¥(xéùê)øå—34òó’R3sÒòÓBR#Bƒ]ƒ‚܃üC‚5õPã0000103012²Ô_Ã÷Ÿiß †eßO_þ^úPèÙw•îï*ßù¿Ë‹ ëüT\)úÛîû¶S³^mìþ. ù=òwnwœÜï6á/éõ iò^±§O«ß ·›mãÔëä¿ÏþÎÌþÝâ÷æ™ÅÓºvKc›?§{¥üoíi¢µ¡¿ùå~‹}/ý.ÆöcA¬§<_éÂqs¾g.ïëa»Ìõ]†[Ž‹9\Äž‡³›‡ûÀ¼‰{zúú{&öõðð¼›>Èòû&ôñð20‡endstream endobj 403 0 obj << /BitsPerComponent 8 /ColorSpace /DeviceRGB /Filter /DCTDecode /Height 135 /Subtype /Image /Width 186 /Length 3880 >> stream ÿØÿîAdobedÿÛC    %,'..+'+*17F;14B4*+=S>BHJNON/;V\UL[FMNKÿÛC $$K2+2KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKÿÀ‡º"ÿÄ ÿĵ}!1AQa"q2‘¡#B±ÁRÑð$3br‚ %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyzƒ„…†‡ˆ‰Š’“”•–—˜™š¢£¤¥¦§¨©ª²³´µ¶·¸¹ºÂÃÄÅÆÇÈÉÊÒÓÔÕÖרÙÚáâãäåæçèéêñòóôõö÷øùúÿÄ ÿĵw!1AQaq"2B‘¡±Á #3RðbrÑ $4á%ñ&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz‚ƒ„…†‡ˆ‰Š’“”•–—˜™š¢£¤¥¦§¨©ª²³´µ¶·¸¹ºÂÃÄÅÆÇÈÉÊÒÓÔÕÖרÙÚâãäåæçèéêòóôõö÷øùúÿÚ ?¢céPNÑ@3+ªгšÚ[5Ç>•¬ØA5ý´Rp@Ý‘Ý{Š·Y$r““±:Z»(eNÈ>´ág'÷kRÎ-–ë€1çJqQž+/¬£O`Ì¿±ÉÜPlŽ=ëWožXïKë(=‹2þÂOq@ÓÈ­O,fåñGÖpö22>ÀM)°>õ¬#¾P¾³à¨HÇû­•¢‰ŠR¤fŸ·Dû&g›SšQhzb¯„4í„úSöÁ왜m éMk6ǵjÎhhºãŠ~Ø^É™?caéKöF­i½©¾YÏJN¸{&PŒFsNûúŠÐÛÒ§Ò§ÛÙ c­sÚ³ÿÄò2N“Çç[Šx®Ä©¨Ú7r¤:å‹mØì²GCfwXÛúd£ô§‘ÍRÑæó4ÈŠä‘G¡Î­Y.À÷ÇÒ°’w6Ðv?†[†Þa¶A$Ø'îÇž…¿Ã©öëU/u˜mÜEk›ƒ÷b“øžÕ^Ê-Zwò S3™Y]Žì“œdSPv»ѲŠR5 ÅØucÆiA5Æ]x¦ð’¨¨€áÎZÜÓuv¸µG“i~„Ž3ïùbŸÕ¦ÄêÅÑÉëŸÎ¹ýWPžKd0Hc`ø;[Ç¿¦kJ(/n!IRümpýÈ9ýjgBQÝŽ5‹½y§ óUE•Ðt"õʃó)‰~oð«e=fá$iÍr )<Óvàô?•úIHMÄ”*úÒñëÖ£Éô?•RÔu(´ôÌ –#…éV”¶HtÑP=j@sê÷Mf¤F±Êøc‚F§·½gÍ©´Ò¬y’á‹ '''Ú´QœQ×M¨ÚBÛ^`Oû [ùPɪ±ïÎÙ 3ùf¹+Én¡LÈ#‰±Â}ãúTºeµõì pýÔÀg Œß¦yõ«å²»bµôGC¯¾2òÛ´j:êÄÿOÖ‘u›UlL^Ûxëùfšm ]Ì­òî9ÎNOá‘ùU-J8WÌŠ5]Ób4펬œ¢Ý‘j›µÙq5¯´ {;IdLíÞä"þ|Ðu¼ño=ä?áUäœØYEj®Ž"] …Ǹõþu™ºFùšè©<8üqF­é±JKSPk6øé'ýò?ư5=FH¢?Áš°ÇŸ*e1õ5,Nä­ZJ*ÃsÔéôó¦XF~ÄŠ ¾Acøæ®ÛÝÇtËlì8lކ¼öæëÊ 9?ßÓüi‰¨Jª¢~sQ(ö4оã<3©«kgLÿ8½y>þŸû-d«X±ßÏãÞ´´Óû rœ}2MPKbiIçÁcŸÌ×_¢®Ý"Ì0Áò—?•p·¬ÑÁ äòI4áâmAUQ*€xš¹)hzáÚüWŸ/‰o‡$ôlT‹âk“÷”ÿßýjž_0³ìwf@iCÖ¸_øIfÏ(Oüÿ­O_Σˆ6ÿëUX–¥Øí' Ñ8ˆ…”šâu½>âÑ„÷~clß»<~1‘H f¸“ÿ­SIâØn#1ÍdOU/ÿÖ¤ô.ftYµò¼¨äVÏÎàüêËjfÈ[r¯³Æxÿ Ž]bÒT` *Çø‹gúU'¹º3赊×tt[Ìœ#JÊe<“Õ==yÿëÖżo(DJ:ú×óæ¹Ä1«îŽí£>»úõ¢º¢Çíe‘Ô`–ln÷ïQR-ìkNËsHÞ¾âÅ·1ëÍBæÞÆî@Ž]É<09üj™Ô…99#æ‡ÛÖ˜·è®ÍŒ†\zÍSk¡£iõ'º2Ü\!· Ìî<±Ž?—¯á[¬a xO—–nÕÌÜ\‰NQŒxè€?žo#'&1ÿ}WDRJÖ9¤¤Þæi˜Þ1ÀÉùªÍ‡2;1'lgŸ©ª“ÈÍ^¶P¶O!àÈák¡½ Ô‚U“y&V=5rLŽÞÄÔ’sÍCÆ*Se´‡wúÒ) SKzÓ°®M¥Nܧ¦{Ò‚éçÊÊlãiBG<ã9úŸÎ±·P„õ5µCLïˡª$úÒ=ÛH…J®I·s€Gõ¦Å&h‡Òb7åFi péO¦K»†;”ÐÔ»….ÁFÚ7 7P16Òâ›»š ÓÇàHÀS7ÿœSL™¢Â¸ìã½&ïzfêMßçìæ„ÆªK[°«N߸Ž1ÂàLeˆ°ÜñÓ-éCÉ0¥KzÚ‘ò~ðü© '³ ~iv¶3ƒŠW‘ÙÎ~ð¤û+}jeëJAAZwad@öÅ#.]p>¼Ô*ÄîXì¯Öªšµ~¤?!ÀÒæ›š3L†¥ÝLQ`%ÝFú‡4f•€”¿4ÅL–6Pƒ¿ÿZ¶#ðüwVÉ$”b:?#4h;3̤ó*åö“yd»¥‹1ÿ}yëéYø4É$Þi7“M¤4Çn4nÍ2@ š\ÓEÀºH„–=¬Çü?ÏåMlƒÒ¢‘6°æ²HѳnÞÒ(ðX¤›±¤˜F®«^c1þîp;ÿ:¯iäÀ#˜îLå$8úŠ»lV@X]«g§È£¬lsÏšäg $•à»éQ®˜Ï¸À¸TœvúúÔï¸âlÿÀEhiñ½•¥ÏžåÙö‘è2)ÎÄEµÔäo£ s¼(U™w€’ê P­]H|Юs„oý ¿Â²MgŽ–´•$0É<"Bì{ ¢B(ÚF £­YŠÅ¤ )BǦ$\þ]kBÇE–ó®$;Päþ5m´KYbÿG-Ž„µ+¡‘[øUÌŠnn"#$¨ÜG±ç­Ow¥G¥Ü ¡ù¢RߌóééEëZHÖ×à @_3<Û?ãô«wW;=—YŽpë‚§ÓøÇëLS2oÎpÙÇ úŠÔѧ͹Œó¶²¤zœ~TØn¾Åó3•Üp&¡£^‡Oæíäâ³n­ìg'u¼[$‚*Å›U.ß~AõQþ5^Kö#ýa?… 1hi=–ž½cþÆªÍ ¢–!ÿ}ñª/pIïøÓVn´ìÅ¡#¬c¢Q6; i$ÒU´”´À¶qëM, |ÇŠiUîKg'Ò¤TRm]ÝÁáY¤ÙÝ£|L@ïMÞsÈ\úŽ*úâÍå>T™ŽŸOóëY½øü)ÅÜR)J»x®S›œm' çÀó]´[丸Æ<½‰·ß®¥p¶Kåê«w#?z#гF»p €@ã})Çs ›>§I³‘ó§âöaXƺ=dñ8ËHïN0k›éJ Ö[ 5Óx~‘»c¡Æ}û×9d¡ašé4YЦänË~•R/jm$r=¿:ŠÚ`WqëÚ¬I ]Ûyr}×㱬ÈÃ[HÑKË'QëPY§$1^!ITCéX‚Ö+[÷I€6¯wöÏ¥h­öÅásô5GQ™C ï³ óõ4EŠH“R TdûÖ^°<»ˆ¶ÿÏ0GæjdÕÓ'}¾Þ81ÈAžj•ýÏÛ.<Í»Tª ÎZz’¬>µ0h¢™"ç4 R Z(¢Š(¢Š`[x›û¸Áàg5-¼i¼1\…ç>‚¤TVÑžù¦<¢ÕGÌ0'5…ï¡ÕÈ¢îÄÕZ4&8ذ-»'ŽØªö0‡&Fû©ÓÜÔ,Ïq0ãæc€=+@(HÂ/AßÖ©ûªÆmóÊåyË$«(ÎV»˜î¼Ý;¤9ÌE‰Æ9Ïê+‹¸ŒñZºmþ4_³‰ô 6·uëþ"„ÒW3œyŠEšwä§Ë×Óÿ¯“øÖ?zѸ”ˆÝÉù›¿¹¬êtö*lPH`GQZzuÒÆåXŸ.CÉþu—ÖœŽÉ÷OáVÕÈLë¬fçiãž*†ªåµ0¨åÀžõg©‚¢$“’rh¢’I ¶÷.XF2½µZÆMV2ÜÚ Ad`Ôi˜Áðh¢ÙZõþê~5ZŠ+hìc-Š(¦HT‘Í$db1Óž”Q@Ó4Òo~N¦fŠ(i3EtQE(ÅQ@ IE´QEÿÙendstream endobj 404 0 obj << /Filter /FlateDecode /Length 2334 >> stream xœ¥YKsÛ8¾ë´? ÝÚ²¸x?Rµ[åI%53oebÝâ90²d3%‰ŽH'“¿ÝHi'3•CDèn4º¿þºýyN 6§ø/þ¿ÞÏèünöyÆüÛyüo½Ÿÿ¼šýë½dð¦pÔ±ùj; [ØÜ²¹Q¦pBÍWûar±ú‹µÉsUPaaÃêvöl´ JF©›õb‰Ï–YmIÙ´‹?V¿¡—ЦàÊòNÄËÅRJYP*Èå‚›‚2­Éc"·­÷e[h!¼$«j¿‰†“ëdå&U¬²Ç&na’¼YX^8ç-N— ­¯‹Óó]Å[îîË3ëÂ6X­:?nZX̨¢ùjÎ )ÑkI­ûõPµU¹ëvDµ«K¢ÝI|¢‰C¥vPñzËdp©·mtTGoÔ¸¨ª©îöåËp,ØÇ¡\ÅÃ#~ñü0—¿¾BÒãåãò8&íŸá“W1ˆ(68“Æä—Âtw$á Ë“7È-£ä¬Épâë$õk¬-’r@uEîê@Ë,ÖçZNÏ+Öa6€,ëmòqù"¯ï ƒì³e“‚ãqHR%¥>on¦+拤žðÜ1çX“úŸ·(ˆÁÏŠ3zéõjöû,4 j~üÑ& 0\Ì…Èv È/ßÌ`èޤÜÛóÂûÿ˜Ã¢½/6E¼]TQ¼“„SªÉ ôh’@B9ÎŽ(âÒ°oœ•qÉq´õ¼2%Qó„Ú}DÈ•vs;%¢[ªSS\NÀæ(Yb§ÝÑšjÿ¸N0i6þT¬Ó3h¤R4]†4“¥ŒÙNÇ,Z" « d2-}ØqH&}ÇÑs`(¯OL:ÀLH$ìMÜë˜'¨^¾c³‰ BMÌ&à8óøë³ ær†]†¶Ú´:ï̓+Ç $T¸ï‰~@6+GGiY…?kÇã=ú!ƒ÷”ôÔ‰TÎA$äkteÐíú š„ö†JžuÝ`n·ŽÚä&8FgÉò&®Û­Õ]6(CâÂCEE™¿–¼/¸½fbvçѬŸÝ^ŸµvØÁ+41tî¬Ñ 8À?[¨qÓ3†ÛS7µ>Q«zÿbÚ RO’B3 µÎÇvè?9°ºœ¾| ÔGêÁLäâÔÞë0åŒ|í(ØÈðŒûOîʲÉl,w7‹ÐÌ1,–=[K&ÀÈhÚÁâ'¿e3è€Fx|èAQJ‰RŽA} sÃGfƒû‡³´‰œlH¬q?a÷ <²@‘à †¦~ŸS™ÕqÞ()†sð§¦Ê) ékjKµöÃ%nd0,AÎÈCõöëû!U:›Œ$³sDê ›Û!íG;œß…×p7ÕÝH{rEþ½îÂ9hö)âŸ0Zþ"[l–JšT°uò1 ¢.Ò— —jì/=í aø.;ã :›±Ù‚™þ±Ù‘f8éÊÀÓc´p â\œŽž.›Ù"PHrW}é©ó?}„AZaêg[¦-Ì®'Œòl…­±3°cC5;2<©‡ó@ê3çTߪ‘¢Ö±&çS8Úʼn7›©é”)Ú±'úœp1£×Í$zÎ탦®m˜ÿ¼‰¨7Æ(%oëN*#–Æ—¿T#/Ó•N¬tjd,{hr¥o¡_Á]ÒÕC K“®ZWWXeÒ5ÐÍr!Ò5B2}S"P§zuÐFiESQûÈÄ p]œ§Ð•6Q°¤0q(å“ÿ÷ÙÿN`:endstream endobj 405 0 obj << /Filter /FlateDecode /Length 10363 >> stream xœÅ}]odG’×OF?𛱨ÛÅ]±|ó;sÖk@cxÞµ×¶ÔÃPû¡¦Eµ8K6{HJ‚ü ü³}NDä½™d±ÉVkìÌtUTÞÌŒÈøŽÈË?î–ƒÛ-ü¯ýûöúÕ²{÷ꯜ@wöÏÛëÝo_¿ú·_EÈ¡-Íí^÷Jq»êv%•C i÷úúÕÞå³×Àà\¦Á>–PñÀëo_}³ÿîl9,)·´ýÍíÅÛ³s~¯®æº?ÞÝŸý¯×Ç)Ú8E(ŸªïSüæì<ÆxX–°ÿòÌ—ÃârÞÿ0Ì{s}¼¿ÄÔ!÷¯/¯/ìKñû¯‡‘ãò·—Ó×;{ÄÅýßœUhMv<y¸ûË÷gÛ÷w‚Ë|ýê¿“¬±ÖCh»¼;¤´s1ôiWS>øìw·»ÿ±{ÿ*|,Áï~ÿÿûNåo_¹=¶¾Ë5/œääÓÕÚÎí®Áo>írK AÚ!àˆWY!›¨€ÄÄù©å $–¹Á²`¾Å’—ÄçË’<!ÄÅCæ<Õtˆ‡‚i– þ€.æƒ,£;à4‹ód€C¼?ÙLæ’‘Ób!;n·’>ðtY|=T]x¦•phQ @ D@\:„@v~ À7p¢°ÿ˜I«¬ ì?y>ÓŒ08…CÆCµ•CÕ‡€ϠƃS(˜¥&LïÒ¸­\±aÝLÉ•Ÿpr˜F'.•[)‡"t*Øÿ"[Á@…`ÿŽ{Á/Y&©Ø¿#¹!ºt<5þ C€@à4%‘/ÑsÌb¯@ rñ,ÿ RÑ®À sžàHB€Aáv€A,kÁ'΃³µyj>TÎ~ˆúTK‡ÆÅ; []«@> 8,u8„œœ“³ãnÀÁ›i@§ÍéƒÎ ÀÒ™w: >ÅD’'þBPHã4`8 ¸^1hÀ Bpr:K”4œ¥_–xàJ@;7¸À Ç!àåÆiÀj> {oTþ’ ²¹.UÕ/Ô˜§Äå½Bð<æë¢®|ã˜À±„ °].®ô!à9 :1pHœšÉ8ÞaÂÄijâ¿„‰ „Áµ’ ! •4¹H$ MèuYeªÐ!ê²{­,€çCÍ©÷G¡–¨€&º¬@—%ûó4Ø”Í5òŠ"+ D$äkuu@Ö]†i¢ŸÂÏ%È*a$ŒŸeÌÕ«ï_Ñóù˜÷éiS¡8¹ŸŽ¢àÀÀ†éƒ~³³?¾9ÛýîkÌqþótR 5ï/Ž·»ßß¼ÿön÷óåÅÕ·æez5¬,[7gk´;wÎg?¬|ÆY(†T …3üƒxvKÞóü–å$"ñ[Ý~ƒß°Lßüô-NßòÆ[Èw8“È(´ÇÎŽõñ °PH0IðÆÂ4È Ó(zîÐÙÃ(ƒL£LÇ£V­?Œ2ë;ŒZíñ6 &[Xc£:deX£:¼ðH7Ñ´£dtz®¦;p0p\¡-Ë>lãö1ÙÇ÷yƒùH×m_OßS_×—/ ÁîPÍëÿÃ"ß¾ƒ §DÛCƒS0`3è:R@’Ø z´ê !Üבªê(L¼ˆ”å_B¸›@û””E|áŒd#ø—¹(¤1&€Næp2Ç´83|ÌÐy$–U!9‹óH‹]Ä ¥§óýg~=ê¦ÈhV!Mâ®á ©;I‡Îc Ý;ªÀ‚Îc„õ²ç ,¨q¨S÷ ,"ç¡ÿÕÒÄy ­c±jzÔBŸš$††£P ‹ ,è<xkJ1œ ƒ:@œ9ÿŒé<†$žÝ)NUŸ‚##ª~›^V$-·ÕËRL,ú¥K,¶aX·’–¦ˆ!š¹cF @õØŒ`à‡Å|k#*"lG%¼^¢Ou1ìp¶Ðr=À—Óž²CÃàFFhÙâò•Y md¨ÚÈqÍÇC™²-UC¹•qUGæ®°y†»²¹.#Œš•K»5ávè zî ·¼áÞMâV%•0Š›ÍÓe Ž˜d-3œdd #Zô¶|™d­U VVYƒ©·ÊZ¡»2’8G9CBð¤V?ª„˜ÐäÑd-‘§Ú(k Z2O²–à$/™¬ÑUQ,º¬%:ýq”µä²aÑe-õTÆ*k‰A]e-w <º¬Å¶ÊµÉZ¬¥ïÇd-Òî²´|ÂúTÃ|aš¹åUlõ–,7°î°ÅfL×±hˆG܈(œMµH+1˜ÂÐØ²¬y~¤)³/~¢{£?ŠZfúÄô©_mÝ3ïgLT'Q« …R§ó ÃsÓøÆOL“äI¿‹áq#_VØÕV«¸‘&iá+«´-]Uv)Ø *ØUܰ¸i!œO£o Ÿƒ†ß1ìw_ýíð$¿Mœ<;fƒô0$ðË2®á°¸ª¹¯iE"¢X4nˆ®J€@Ii…È8Š 8E=Dð#„P,6·Òß­Æï!-žç–¹‚S¶NŸ;v7(Àÿ”ó‹Îi.$ޱkt–¼@Ÿ%yÁ`¤rÈ8¦°h ½B ¬œ…û”ïQ™€´ÅQKÔÌ7Í*ÂWLK2A“nÊ ¬Ã9Qˆ fCIšhJΊ á|¤ ñ}Œ¦š3“ÙEÆT'Ö"38öB® ‰bf–®Š0e`¦i®¼ôÕk]$Àh[œ…Ðà~P3H!ò_ÏKV0‡ B °QY»å¨Š=é3j  íƒ=Š_ʵۢ¯a<´Œ†‘μ˜‰Øq„"Ýb2Ä@Å¥iƒ.WÆ™ŠÃ^4#oÅSÑI*OÒÑ‘ló…0Ó&YCc¿ÈðÊUM žwôP¼’q“¤*¦:(>«²ˆÌpQ€ƒ×C„Ò%d¼ÈRǺH¦;êÑE •V$dÀPÒ«w/Z †4†3nú3¿MU™¸G1.ISáÂã‘|+ND\ú¢fÉq¥]´ä[C¤%9‡H¿²‰[QÅG+‹»#J>ú`aVDÓòÙR±Ôeúü¹i‰@åËÂÆœ–ø½¦%Þ_Üï.®.ÞÞß^¾½¼ÿy÷Áˆ»¸²¿xq{¼¿¼y:=Á HþâìDɱ€*kR¢ûíåÕØýãOß³V÷ë$ 6ÕØoL#®ý˜€xv®Çø&Uý> ÉQ”ä6ÆÓ êEü·A˜5FKed€qmr3lúSÓ–¥2=!¬°ÑJ¥œËL"w::X‘öaûVÓ>N¿%~{Lãçò/thNæ b ‹…cŽbƤѕšÔƒêßYï¾xô1Áߦ&-QŒÈÂ/ÉÀÚ¤¸ø‘™r‰áªºá%8”Ø*»Hq˜ÕʦÁw•ú±KFæQzó1: ××Ghë4­s–«_—vðImŒîNBi)Žt J[øoHá–4ÐQ²UT:­ ~ Pr¥ ÁÆÉpq z9ˆáÙ¨ŽsœÅFuT e§z-­ۖ¯ÑφÜç&ª»ª…Ñê`Ý–'ª‡¤ìJõhµó•êx¦¤‰ì0&©Ló°×0­]S_©ïÙ ?á …Íì!«Ündgµ7Nd_¢úA±-÷IÃö(ÈšÚtŒ!ý´…¥;P}—Îw²vD0]‰²¡ûa+A’Wïs#Ã2½¸é0ªÕçì`d‘2þÂÆ‘ä5‘%#|ŸB©št‰u¡]r †¶K¤"a 0©Ú/ÁÈÛNkk'‰æ%£!í÷XÄSMtYâ¥bšØÍQ´C+°É—¤8C!z’>ù”l7(±m}"YZH,Z™tŒq‰UߤÝ%U:D’cÌ©«7q#ƒö¬Ø!¼Hm¦èQ(1ø‹ÆíÒôuH”}zÙ Ãmbñ]‚ÇŸ°=§ÒéJ‘@4-̰è¼8CX°ØÀ*YŸj’5ħ§íª“âbfº÷tHÙ=¹Zm­Ä©N¢«ô)Ä…|׿ mFÁqf;¦8 º‰Y¶c ‘½Ö®Q¹ß%¨rmKŽšLÒ¢‡iTÕ¤½umÎÒ„CȆó#ó°¢cÍÔç@2hžÐ5-ƒ¦%ôŸÎõ“c‰W[1ª$¸¨¦q"m-E7±Ê¯Ð…+m$Ðñ‹—ÈLâ-?µÀ‰Šd6*m¡ˆ’UI,éÛÄ]2…±7^P´#É/Ø‚'ß³­+7i@ItûµÃ!°Kœ§¤%njÏÂu¢·”ˆÃî +! Õò`¼áœB4ì4‡˜ØH¡™(ˆ&U#„w±Ì9[*(˜L¹élZSÏg':€­šTò‹Dàɳ²¯Ú?<“*>6’[Ɇ–ך8ÎÅ[£TŽ(Ï$• ÅXê=1úÓÔÏRN’\×1•YXœK³Ä°gE…˜×fçqæD»xë5™"s¦ ñI•@YŒ:!H:!1ŽLÚÔå\S\sëì„€IÌŠ&ó¤%NgÖ÷ 1lŠ‹1­§YrïPX$! S†&³0¢!›3"G܀ξbƒÔþSÒ&"Wêd(dÓY>jóɘ"a¡ÄîeGnœ*€­šÂdR”ú‘*Y©T ‰0?šMf}†ê1ëÔÀxò˜e¡N$$ˆû•zÑÁzLž%TÙbH%ÁàšÓLêN&†*‰ÚäLLjêZM¼‹ä™Þ’³O‚ 2 š—!OBFëZ(¢ yz˜9ˆÅħ˂:‡¼ÂÞ¡´¦3Þ2ñ÷»Ç»»‹÷–Ä€ Û_Üî®oîonw?^|ùö ƒ{UöW»·ú-ÔýÍ·?¼}:ÑA^íOîÃHÌ£™éxýýÍwGv|Ü|·{{¼½ûõòÝÿè.ÙõzH"uëG£6H6»ûõjj‡Q+dÕÍŰâ F™ºF­a”Éì0j… £`4â¸ú%Í?»XXêZ3ÌOÀ˜±LÄoI¾Ñ²lÅðmøÍZ+ì9|Këïam¸µO$¦¸ç„aŒ¶`#ö3ý2‚–œÃ:HÓ˜ÀPdšÉ Ó¨ÇÉ×éwX–¹¢C¦QŠì0hÃþ—öL°®ª˜}C¢ã 8¨Uf=¶/ѾÀ^eæ”r¯O¯ôî~¢—ÆxÃörLæ‰â½bQ ]éX«»O)K®$$£N¬­7•èút§Ò¸Cv– ‹¨u±ÑÀzS‰Á0Ɖ9A­™gïíÌ-IáIy‹Dǰ¶KHÊ}…q‰êuLdÆCd£4õµ$ BN&?Ånõz¹%iFB¹EA*%;Ovs7{Ê+¢Å²M%jïPëWQO´éSUÓ‰Ã~Ìñàù©ÈIÔ%[Ÿx`r. d±ò<ÛHÜ-¡iÆ)ð’A©Jæ cœ RÍ¢¦LÉX«I(¥¥kù %t?Í¢¦œØ £O¹1ƒCÖH-ð¢Ašè^¢ÉQ²L°t80¹@î1@“ÉR‹š(QÍ13®4-E•¾[,§#m’kë§Pª’+©OÃkœ%ôýÂ0]ãBµó¤s.9'°¢öD,bé±»`¥ZLKî’rZ¨ª[“öV8IqHÂN›\˜„tA‘Šú,€7}¦¢Vƒ$1%ùVuð=SmŽ9^>cºÐû®ån óR1´E E­x ±ª’ ì5Ë}ŒäÄ@.ÐIÖqÑ-qè$«ÂÇ„§'±¤ªEvFIš”ºSÈz ú ¢<…•ZÔñš4Ó~‘EЉ·‚uH&ìhQ`hNEH²iN!*BžQæµ7Éœç¥?å«&þú ±ÀÔj$D|Húš;«¯%í,YØfkpQ ’‹Xº,Ü{u­‚Ü:$R˜_Ï©)¥á¡Y}&äô>Ø©0ƒÎœŠ§uð QòÁîM…VU‚¼ëJ[²¸NOW²ØB˜™Vš6Va}ŠWXú!}:èwÄ*@±ª¾Ž, Rx|oP‹‹‹q*ñ ý7–'dÁC$.²t%ë¬ÎÉŠ˜äUC³Râ¢|‘xK«·õ¨øÀwÒ¬Zd^2ÿ¹w§0ãߪ®6ò,LHIÒ*ª’´ Ì%GC¢¨ÛǼ´=¥,•|ÕREäE[JK2Z[ô†ÜoSÎqiÒ6`ÞD%‹*š%23-Éa&ÛƒBd+V¡‰Nc¡ÄvöˆSÑñ¾Y”Ü×¥ÇNš…ˆnH^Ý#ë_JiF,ŠzþÔB:OÔ¤8s×öTTù‰kµÍ%iJJlIò ¨’:ô¬‰è³DÉg뢌<Io—^³¤LÐгž ¢òCÁÔ>"WU~äžAäækò½ï3º¬\ev Í'³½#j;Õµ2¶ô†0¿H?•ä µŒéá‚ɉS±kÏ•“;o‰ ±ÇÑëhœ•%×#@ˆÊŒÎ4IÏ6/­£{vu&åm­3¯-9k7ŒL&æÜmå”VŠzƒV¹éÛ:lÓ”¢a¯Z3ë.I oE¥È¢^´ô¬žƒ$ú£…v ê‚HëÜ‹$½åÌu¼'Ù,¯$‹³*ƒÓz rC:1 g‘§ëe~–-‚T¬ó^:Ö4¡l#tÉŠkª™ …$Rtƒ¬)IÉë#ìÿHºrÔ¥£”}¯ èI’È‚’öU°2äU:ôB–2cpÖyS¨éìÎF„Û¢oç(i  [ºvr&.Ü=åàÌ} ÉZÐHÙ(ÃÊmP±SŽe5ƒ&^Ú± Uœè¬Ìy}"TÍág}ÈU=n¾B°â¥ zÊÂÚ¬Gæà!” MŒËJÑ”‹ÖטÑgâ±k;_4ñá½øªn/ën’\•DÇ¢+¥&Õz#YJ ¼ÃªLÙÕ©^uۓĆ/Feâ$çÚ!YSð´A€Q™äë³]”ÄæD<%S" ±ÖEMéévÓ’´(Öì=‘§a)>#+4’Ñ`Ó¢ŽñZ=`1Hû#¼øƒ`×Þ{Ê ¬XÑI;W˜‹1®·^–¸¨?©ec5B„g±Û7ÒAYbbCÞõÊö¿,f&ïÜi²’jP¦‘ià%n¶–•Q*ÌI»Ÿ¥¾Q ’…¤Òm=ZËå^•ÔÔŠÒ4Û%¡¥Ój ½½SvkmœëçÏ­ž§<×@¾}s¶û¯?^Üžð€öwǻݗw—÷7lw³ûòŒ]ηýw÷·Ç«Ëã-ž .<žOïñ„{ œW>>\Ü|¸ºø5{=56ÝêýHÇÚÇãQd«ô tµB†Qæ£VÈ6ª›Ÿa_+dõ`¯ãî?«ªA5Í ÔêF:;—è­õ:ÆðM#c[£ÿ6×1Ö<}uï‡&ÌÄ÷E<èžFiÓcŒƒÀ-¯ƒ:`—dá]›æ[ÂÐ]ÙÓ Ñ©ã4˜1ƒí††Ï˜®Û ù_ÚÍ©7s[»9ÏÎå ¾°c?6­jô_âøKê_ïó:Ìû}9Õâùl½ã…9ÛÓ-žy‘›0l±aë1/÷FõìŠuªW—ÒûûœtÉŸ¡þ U’¸övó,f}Y$¡mž ÷ŠÔéSkû@ê‰Ñ¼Šâ­›ŸïP·¢õæQX ±3û¬3Tßô Žz`9M \HÚ]ÈpQúE–Þ–Z´ƒxEëRT¿‚xY—bՎ⥲^V»ìtL³ê»·¾@äE.b—5Vä+Y$üaKö®.âZ0‹ÖÉ'®©³ m•â´¡ÝU,œsÃ#,©ÁµIyÛO]œ¾n¢ǽiYiݼ$>GüÄâç‘TSe¢S\£ãNK6‚(ÆÞk_Íz&¼ÃTòxntÍl;[ú$õóg bœx„iëF>â•1ãPã5ÞNlmäGúL8vžeækzXÖ l¼/wu-“ºš†—‰Ý|ñÂN·õ®’V½¿Mµ¤Î¶]Ô¼¾Ýh“´Ø:…º¤37bÆ8N‚-…¾ ZéÝÁ]ÎzƒÕ&g˜Ç³ ‹ê$gÑ.hlr–-¨êǾE쫘ͽmR–VÎèR–ÖbãÂÈþÈYÊøÔ$dÍ›ŸÝ™„h“1ŸG!cJo”)F[ih¨M,Ú„Q.ùÆÉ¯BFç6MBæzëwß[nv%s“3&Üý$h~™¤,ù®{º”1Ñ2K™†Ê«ˆmMó]ÄŒOGF×¹NÇÊ;vK™D,,Ó ÆѶÚå‹Y“<É“·iâB¾)Oò…_lé._4¥ybxøê†TвtMÒå«7'¾¼}{•¹Vµwb›>ÔŽJßÎÓÌRºí¾Bºj×g7tSè Ú¥®Æn–:Ùüb©‚•®ìJ›Ô\ÉC˜¤.‹vW©«¹j?f¾Ÿ`bÆpn»jW27~Zb·\çØ™Ø&¹c=sf\¾5.OÌÍTœ›/ÛušUF˜b™¬›ýlB¶ØíkÜø´·HRSù~­æo.ß/säÙ±øÉ¿9;§ýC(5¼“QGƒÁñå–ƒ ¸×·>òÑÄØLßÕØ Î†W=ê×;×>~?¾ ’¯—l͇ýOöjG…¿³U–´ÿpvÎ7f¥÷7Ã;Èe û{ºóŒ ÚÃ7^~ìý‘wÛ¼Çq½om¶PöuùW¶ÙšöÿÚàÍí[àÐo[äÛá—ËiyéÇÓ²ßx`•ý½N=‚³²¿ýñŒIîòþxu·Qûæ÷ÃÌçâû²;?BŽ^ÿçW¯ÿâ›ýýÑh¨'õ`[ç!È5ÅéEžwÓïô…œ¤é—_ý§ÿòe?gÝ_ß°ÓˆXš0œÖ¸º;ȶ(Ü ô‘mëxõPÝå¨1õÏDªH¸¾?~–Ÿˆz3þr{y¼Ÿøé6…ªG(¿ÿ¤*›l!RiåBÝÑx¼×Ô@²½Q¿úW?ŸñòÒRÃþ =îÅ¿n'¹›™ìZ¯‘¿ãIÍ|eêQŠìçæúbf‚ëGï@Ímÿþ)š¾Yc~òîxû³ˆh A Â!fÊmà}W°Ìí¶«ûï-a³Þ­Û-&JºÀͤ$N2œ½ö•w¯â3 à ØjÙÿ$´_bÞ¯Oå0Þdví#&ã;.+›Ë âíqS'uyšíxäù„Þ085Ôé—Ö*ãÜõqNß„ö/ÞÌJæÍÙŠiÛ";½?·ïîñ¹(.¨RMµ€S|÷ÎÎk3`¢L˜ž@ [›ÎñV©v7«™Ë™ F´ÆißœµèH’”¯3‘.Bæ GLBÛXw›SézQYÃô¢cÙ¹«Åa¼Ÿá¡Z¤KæœI›´ßʼnÞÇ·gä§€™þñüi~¤*iEc„QÂñèÖB{|TâÆY(ΦùD åT³Ýßf‹ÿˆÁ¹{H&‘¢Vö.$ñÉ×w¢Þøåòêò~•¬ŸÏúa¿Ï÷FÎ ý£J·ûéüo寧©p, òê¼*”תº ˆ:µ²{ÚD@oŸë+â¬.ïìPþxõîæörúùÚ¦î–eN"áØÉï¾¶ÙÂGÔôý¶â¬°ž>k¾Ÿ`üN“‚>íWÄÊ~&Ú“ëÛk ô±´¹fQôâ3’¯™ò2ùŽAÞWâôàûîÞ_Üó¥ úþñÙÍL¼?’]y8ÕŸ¼’"wEŸê~ØÌä~Œ;á6Îã«J| MMó:çc…„¯¿À7WW7ëËÊ{“t~`{&Íss}}ýŸQï™/Ãçô¶u¾Œ)çΰ_ý{Ø]Þÿ­´û—×ÇïpŒ‰ûw>üúÃýÍA†¼ÙÈùæì”Y'ýî-ßÙŽ¯iší;¾Ù¿¼YWµ©>ÿáêƒdñðø¡¸.þú!©þÃG)=žàH¾ÇZ}wŠÅ2ˬˆ¡•ÅFú`§'øL ¶m}Iýåõ¤ &q¸~ 5bR`¿D»F©<wÁ3Í C½Ö‡ ‹n¾³ça…ž|Ëýc§"ƒ$¬õ/HZùÓb9¹Ç„€›oV®^ÿ,@X«OŸþ¢äÓê8ß7C#©úƆç~ò:äÙþ½}ŽŒ|_ìeRGoªUž‹’~ò6Î¥ÆPMS$å‰É"]ïNë6fYK6FÚIž;ÅoÒ?–Zç·ÃöGºAlý ïtú˜Ë;†ÍW÷#-Fןê˜S¯G– OFÔï·˜ïò½…v Ë‰ð¢ìg2hb‰5í¼çŸ PõŸÁ ì­Zò8Ýã`ð—¾ùÕ2D·ÿ—£·={’=Îa kCüXXkÓ.u¤;TÅúC˜n³D2íOw¿é†$Ms>#oº“Å rÂÿç'²-7.«æ“ØüÍ~ùÂ}á *-Hw³5¼µ‡úº±`·øYë‰%Ïõr‚:òbç¾ÿ‰ÇÛη~oœ!š³º˜õ€ŒŽûŸç?ýqõí¬ ˆøãì߀Î…‘¬–;û#'û®Tr.íø×õCÎ{¡çï‡Szº>r¢=ïì¹âwÃÈoöM„Ô‡]ìÈì=çðìm¿995Èé[îV-x¿œÔ-ôr:6oÎþJÇ<ä¶”Ô8ùBŸÂ  âG±pFô€ŸNêörÔC÷]Ѥ1²) ÷“@¿+–Ù58^mÌ¢ì…Ïñ'š¢mÿ NôC¬øÀpЙβòÏ›ð~š/ýˆ?ÓK¹óÏOq'‹T™©Åg83øºGþµô33òJiìkþ““³±ä˜\¶1ëŠÓ<ðÂྷç˜:€‚©—àŸeê¿:¹/ì•Õç{¡ü“„òÝ?¡–_‰P1üJ„zRß=_ÊU¾¶‘Xƒ¾ƒÆZßñ%‹/ǘ ÕŸq~VÁ?BøÅÜ1!üÿ"„kg˜È(øø,Æ)‡®µU±dTúæâ”|ˆ}ø€MΧÑaªïìEççRM¿Ü^±P1×ùjÎ_d°º½r_,thøA × prwßÏò-þ$±xÀ±ž·ÞEé³Ãƒ=\ÌîsDiËé¼$:àŸH{|,¸«Ï2±¾(™uà˜ó„¸=»i7mþáæ^|^¾0ÑÍe½£fÕúÖ„ºb‘òD$^ íûû꤅ZX/YÍð‡ÉÏ?^Oû¹ÇèöòOÖÀ*¬l1~òêwOXÞb˜ªÃö@ŠV^bA~û„¼VÉ}–LúéñŒëÏwy ø/O‡˜Oëï¥<áW>ØvÁyNžÒéͳñq ]¬þò´ÒbKnY¥áÏàMeíÖ‘Iâ±?;;)¼Â¾<œþáIaúdž«<—}øã˜'þ鎠>ÛSuÏCM¥®õõãÙöÓ¾¢eµ1þ„j}ÿùVže@Wëcô9Ðö9ow³I9£OŒTq&8î—±¦]óû¿ÌåÈæendstream endobj 406 0 obj << /Filter /FlateDecode /Length 3311 >> stream xœ­ZKsÜÆN®ü I¥x 6‘y?\¥íȹ¬Ä‘™ÊAJª`rIÁÚåJhÆÿ>_Ï €,HQNг=Ý=ýš¯gðñ”Õü”Ñ_ú¹?a§7'Ox=Mÿ.÷§_^œüéµñ©=óüôâú$Ná§BøZ(qjµ­½Ô§û“7Õ7‡» «™æÊ _u·áE{å\Õì6gRŠÚ;Y®éYÖ^ñêû@#-g¾š¡í‡örsFcŽ;“Íó)Xdó6ÂÕŒqYÝo„­šn›qø×Å7X„âù"„µÕ ¹¸:©¸Ý\ü.dNt¦0éLêÚCtoª_oθ1PÅÅGGMBð0ú«M”ÆyÁI˜š&’¸ß%iÜžÂt–SëÓ3^ kª#UÁIÕÚ(§NsÊ/GÊÂO®v‚»q‰o7‘¨4ƒ"ziÞTÏ‚Wª&ºM9›_%O^õí¾Ý5Ýîç8Äx•ûàÐþµÕð.ûaKã ¯¾ê·…gûÃmF—9z(èº}_»žiD¬ó†2µ“FFµÏÇyPñ.S}¿oº¤«÷)æ@&'ý"Ù6-ú]gÇ1ˆ¼„"ÕeþÒôC?1nûш<„YÀW?m´®Øë‡l‰»b]ÏF½Uõ!·C7Œ?˜"Sîs«"IH¼W½Æ'](Éúœü¨½@†Ü&r$èˆ\|[(¶‹ÔNªê¾NmËüqµõÜ^|{rñ‡7Õ×0Ö%Œ²9ÓBa¢ªöÛáÝáê Ð5%æùë—¯ÎßVâ&Œqœ¹ê¾…3â4¤ÒU×^A"—51Iø~ÛµÛ>ãw×ßn‡ín{¨¡.ó¸úê°½¾n/Ûííúߟ < +2ÄЋ2MÇa®<”Å‹€L`\íž¿Ð/œ1z™uCº“%"O9"w%“ ç,r…=•Ìx­…Sô®?†Vl~GaáÚé( # •ŸÉ$¤¯·5&h«#Á…ïdtzg¥®PÚòwþ½…ò>Ê#÷í;ù·˜M¼E)Þ7Ãöjt+b³Ÿe%`¥/fî»ÃÍø"¨}¿Ýµï‡«çgÂÉZªÙ©äƒèÔó—_=×ÖÕÆÎ‹ÂØåsí`<3~I„žÕ†gÞžbãE×QñÚ»¦¿ë(P‚8†"ÈÝÂ÷¯^Œ&ðÕëWß¿˜=üêüů¾‹tÃßDaŠ +­Î¿ú:êtV =vÑ5ím{{3gF¿f;£2Љùg¥jçÕ˜¦B¦"¡ç÷3²‚µ|ž!jθ˜)`:äñ8…k"–¿Ì”1kCš £iG)bú»C{;DÃÎNv¨¾=ŒòyåXüK»2˜S"ØŽ)Sž LöŒÁÒÀ ÒW{mç•KÏhÛÏ­Ç€J<ÏF¤CY©r“AÁ•ˆ„8=‰³¬–š¬PYó°2À<§¡ÚÀ}Fƒ}¾¶NLy&ÉPÌÛšç΄TŽ4ÍÕBÖB¸bÙ¨Næ4ËÖ>—aGœSå&ñÆ×Æó|¾Æƒ•ù:¼‚‘rÓJ‰ÈJëHA2ÍÂ9Î9kì@¥‰T¯µ/ 3*åò,€ñ)V¶ˆó“8è"–‹³(£¬`öæÜ»Å5£ÑH eMf?šeH¬hr¯ÝÖÌå2´t¨öŲ´¦`šF ø0®r|’áQ#),fN£¼ç ãµb… ªŽ:÷‘åØe ËeˆQ™T˜Âë écòuàb¥s­9y]åš9àfíZPÕ‰Â4òó"D½ÆsÖÖ ) ‹à×2§qÎ TÙfÄ2J¿X`:¡#¦º G‘VŽÝÌñEÈUmÇKGÀã Mä¡6÷þÇ *ÅáÜôš ¯ÁTÅ» „mIÕ§I^áè àìG Œ ýµ:Tý¤¸›8íéÙ„ýùªœ›Á»~\‹Š­M†å€YEhn,èÉLR ™"Vž@x*Lÿëa˜L]jÛ ‰/ p;˜„å)[µ9ü6ˆ:àŽ6ïVêÀØ%Z‹tí¢gh›±1q‘ãW¹ÈÃZ&K|ØÈ‚©éˆ¯%ˆø8ô#ؼˆÅIÏÆša ½¤pc¶èA}G’kY¡B(>#»¢³ˆ=vê}sC6Á¶ÁBÝ#n°¯Üw9;. À`0:4…»ú4›‹ãnƧn*ße'9›¢Õ?vù²ïà–ªQêiÏ;ÀÓ·µ¶Ø"Áf4Ãc½“¬@«Ó¦%QÄ ÊoÊœù9"Çšôˆ¥ÜwMn౟ÂNPf\7 åè03Z/F#ìÚÒÁÙ×§BCd>fì»Ü¬…U^“ò^¿ˆµà ¤*‘r”ª1 ?êž§9kÑ œ-Ð*%-úÂÝÍÐêœÆà×'…2ÙŽÑŽVQyR1©/Ž œË* llí¨åzJp­˜ösã<™Ö¯™ö!…3mY?S݈á4Öôö@ÝàrsZ1¶Db¤ºAµÔ¢=~?,¶¶ØÄ"[²‹þ«¥ÁÚ¢.BöÜW‰ÆÄÝ㟠´+6vyP/¶¾À ¨¤ ‚ÃAMN¾> èa&ZCÑ/zˆ²œTGlq€ÇÁA³;Pcí蛉Ñ"•ð꫞`öt¼÷çU>üš\úœôw¡dóÕÀ.†¦gÚ:(‘„•êmqöZ,x §Ü¬žh¢yJʶ·—»»«mfýöùÅë¼XK1ê¶å¤:åHˆm>¯j½bÍcR?Z/Ïcó±ÎÔN%,Sk…3†rÊnA‡¦²˜ÞöôƒAÑÓ«¨#\Æq…Q“ž H=¥«ûv·K«ðº*jÃ^V!rÚ½éùÿ’¨CJ<š–:C/MúZ€“#Ç(r1R¡ ”‹Ósb'Áž"_[0ÂKº™¥[$µÚȶ¡´íG*ñÈvQ," Ör%G ‹E—„Âɱˆtí#7Ì}¢v6v™dq¼µD¥üTC“£çr(ÙñvÏæúùà–UMj?¾­-NøŠ1dØ(0@§ʅè,.² Y¬(þ6Ô­kúØ$m#Ï8ÔúAMVó,j(c̬”Ñ=5Wž°"‰$©Uóä‘D¬í¸ïÐ ªã“b†ØKùhÌÌ9—ÍcÌгp˜À¦˜ïæ”§)Ï@çñ<¯`Ç]P *ãÑçTɬ*éÇø ßøÔ¿Àó2>ˆ†+”úx·tc`dÒ]*ñ [8« NëqðOÄÁ¨£©öm K¡we½¬Vœ@e×.ÐÓo2œY;¥ ,}€ErõqÖFë>)®Š=³¸ B …Šf³h+Ï£×zŠ#ÂqK“ùw<œSÙ_Ì x  95&ÜO„¦¹j¥êZvÐv—æ8QÚïЕ6Ø5 ±9¾þˆJ§r‚9ô!fYůÆ5ˆôµL|[ùÌŠ„ªå5¹a®â8#¶aÑHC Ư­Wã/G¼ÁE+î4ÝtÀÝÏV᪦ —ðb«o¯î€‚OÆÄ¯VŽ,VcÌ¥£­/\íPïu·°S¸ Ëû¸‚YYúé[­e^ì“6¨cF7›ôØJäÛè¨ñ˱¥£ÒGw¯¦¯ÛŽý”f3›S»ãÔ²j· ªèÖ'ÇßiïTüendstream endobj 407 0 obj << /Filter /FlateDecode /Subtype /Type1C /Length 1073 >> stream xœ]R}LSw}¯íÚ‡”¯¾5MĽ¾-NqHÃÀ d.8q"hKƆ›ù(°>(-G‹¡·EŠ` h ÌÍè¦KÐÍù– jƒ„¹±d‰èœ›ã÷’—e«Û¶nî½Üsî9ÇD Çñ¨½j5cd^NˆÓ•X*ô¦Ç˹hœ['àž‚ss<R!HE_¬Yd¨4 åF u$&ÄñÜ&©¬3JJÍtB||b\\°¦Ðuô©/,g¬ÕåZo_Ø”/ …˜\B˜Yhqƒ£…j.nl¢¿ëlŸrgÙ ‹l¬‘U•œEÊá7>ÒûVkbgV;\$†FFïÝ?ùv•‹rY»=@ø¡»W9+é‚>¦Ú^ TáÀ°Áoµ$ ˜±Zåh¯á΂ô?>D»¶ØPªÚ£ÿðf ÕÐÑxÚDÔZ•ë%ïeØÓ:©ËEWG€@j@Òöj·XaŃj Ä ¤{Q„¼úÊÊÁRC9²4˜xÆ”P’äé×|Òq7ô——¹ŽJI’Ø*ÿ•æG¾E¬š4Üüriè!õ#—E[ØßYÙ•ª (È|Îí“?>Seu¹m2Jf5ó|´þ%ØUÙSßçö».:)hq:½.ó x ÆÀo:ÇÆì¦ý'ÔÖ¼Ã{J îœÈZý¦åÜUŠÜ^Ö;ÞôIôÀø”|ʯr0CMA-Aú—æCctöÁÑ _/¡°ÿ¸r+€¶šçtAZ‚yÂåƒ&8f>qÌÖ@Y´o•'’Òf¯ø)†¼MŽ6ek“Ó Í#7äb_Oçзà>UËÓ[ùðÅHª$EhíGK]O‘®Á5ÍʾšB=7äw#¹›ú*jíÇëí”ifC-$CÆÄÑÛº,ööÀÇ($ã^0Ñ„j#ÎË~Ù„"ü󟕈’“Ûøñ™…Àø<oÒÉ> stream xœµYK“Û6¾ë”°ÝmY ñrK¶vk7•¤6YÝÆ{`Fó`Fm‘öÄ¿:!Ý @Hq&®™”¦H ÑïïkÌûe]ñeÿâÿ×ûE½¼[¼_pz»Œÿ]ï—ßn_ýl<¼©|íùrs»[øR_ %–VÛÊK½ÜìWì»îê®jÍ•žôC{åkv«µ”¢òN²îŸeågÿ£5ÒòÚ³¡Ú~h¯Wk|ç¸3Ù>_,Ù¾•pU]sÉW²æx“Iøÿæ;0BñÜáEeµC6Ûã~µùu±V°f-uåŒÁ×Wl¸§3”p8ˆ”‚Wµ‘tg Öáí<ëã¹^ÂÙlèÏ_ÈbØî•b·™ÔT…ZåÙu¾»éq?|ó\qŪÕZI¿$û±‹ªÔŽ½Ë¼PXͶ™ïKmwˆš)ËÚ¢´eC!àøq¥5h ÉP™¼Kçk]žß‘$.8ûØæÇô ’´b¿¬Ö°Á^®(Ãÿ!;¡O'+™‚>„ýFá?éÆB Ù!Ï|-Àƒ†åòó%Û‰øä—ݧ6, ¿ïV˜a\VZy±Ü|¿ØüýŠõCŒóãŠOÒ3×¹ ™ŽuP+ÃöqƒSzþìs-¼etì?7‹Ÿ¡–õò8_Ãeú§æÊÆ™¥­]¥E(á¼Èî)¥Ç:¥<·BÒèµR ‡RY²G ֔ΠzC^ÿûÓœ—÷Í!¶¨1Q×:·ðsMocuf¦±·« ëü«Ÿm±c vÐÒ57•“ÚSOîo†æ-ì [F‡àBƒý5Ԧ㸉õjëü ½ðTEHCc†*N…=n Ùª ­A*ƒºî”[!]¥žÂúRª²¦š!~ðê´ÅÝ€ášÏpLK¬*?¼ÏÓ» àî œ¼+–H¬á@‚}ˆd$XH“¡óg•3ß-Ñ7° R ì}¦(Ñ#´àj=g»´ T)Îù-µðE³œCð0Pëc–繜DXèØÑV-ž©@QÎzc †³¬+ÂÑæµuñ V=¶°,‚\ kǶ ªÕ*R;E„žïàÙ]{¸¹%ðùr’“pÀPÕ²;ˆ$¥œq\Qµ¦†Y€Ç‡Ì¿ä†ÑY÷E ÚÈ«ET™€ÖˆTwD†”Òrö4v½'UhÐ$úJiŽñ ù€àgÍÙßr_m>aÜÅ ÀõšøÅtM¼ØžÓO¼el9”ô¢Ù'K~†%A ù)¥5FT†šî8k‚¶e¶4Ce;3Á0Ú0®·ßÖ'ª#ŽMªðy·k†qÞâXÑíŸ!‚“0gêë]9bž0‰u"“Èüƒ^tóüuhÚ%&F_PÙÓ3”pYe}V«çˆa£Rl)½jjRS‚cÅ”‘Àß}Ÿh{ÍÇl÷qÒ\>;‚]*¯0‚ÓÝiƒQa('™b{s4¿C¤Ã4Fœ»<¯Ÿ­ú¾S·±‰êçñ…éBûJz¹Ô:©u—Èɘ§ƒ\“ËÕÐÛkÉÃÐ~ÅnŸhîÇj: ­^BÕ„´ôÖ\éWa%hµ+|A#NóV"€èp˜[ Ugfî­-fœýMs¸}rƘÍ8 RŽ’|ÈSë˜CmŸÐÑîdC×Dþ28Œ’V:ÐlÌ㑇 Ãþ™fT•ØnÀ ÂhèoÍ!€/7;>>ƒÔ…ý“H-°¡Âš/rÿ5¿ ÏŒFÆ/WF„|¢­ü3=,4‚×H‚ ¢K¢hºÊÐSçÒgƒ7ýEºqvn¾À3•Ãë´Óæ.yIb/tª=´O<ú/4í\ñýé‚­Ä]Óã±™»ƒÀù9¬ò‰`£Óvñ8P$qCW?t»êÒ­Ð&¿PlS¦{QD¯¼ZÓ-µ‰ÈÐL¼Á£TÐÐÅTG Ûíâø÷xªAÇrÚ=%¼—Ѫ¤fqw‘ {¨~õ3nNBÏs ¹9pp]j÷<¡Î4zú?§¦`Y“£L‘äÇóï7g:?¾ú=WÉ o!ósðí»ýMÔˆñ,‘zL†«‚8ìºaBÑH0PüÜÚY`òåSv‘Ej.R¤ÀÜvŸ,QòTxq l‹:ìgúàÐo[˜\šO3ÝðÂYÄ“ñÙèyæÎŒÒ]ŸÓMŒPtË5òîÙíÍ®›qtJ9IwÒM†+ßPE9Úÿñ¯”¢³7œÿ]9̧Øö'æ¾›¦ïï@Ùgå²ßÂí±-S<ç¦þT¡}œýY".ê–ƒÓù*ú‚§%]!6!Nó]²¿.-þA@r?½4¡HÈW¦¨"Åí)üÓL-Eý¡y£¬4Z°v›Í¸ÍCK_Ç‹ŠÕE–j*«ÿj–*þ’éý*,UZQÕVr9˜û– uP9€ÀBÕìîó§ÅHJendstream endobj 409 0 obj << /Filter /FlateDecode /Length 1052 >> stream xœíWKoÛF¾óG<ÃÂZïû¡S“4mã¤(b+ £êᘮ(:$eAÿ¾³KRZ:v]Ô)ÐC¡ƒÈÕ¼ç›oGŸSJXJý§ÿ^” M?%ŸNÓþkQ¦/gÉé¹vxBu,]% K9w„Kžeˆ*•É%œUÛŒª˜4ÜA½ /ÊIk!_g!8qV@uåŸq’ÁE†QmÞM[,²‰?³ÌêHÏDÑD¤—qK(ev7׫ÈÂï³3LB²8 î81Êb"³e\d³/cSƈSŠ{™‰D‰PÄjÄ^|³.0Àj3íÅG&™!šáÑì]2ûîΫy·pæ%¡LÃÏûMø2l|¾N*%4ÃZ8ÁáΣ/…X¨›¢«³ ö™÷âMÿ0ÔËH-à6 *¯ÛrdaÔ±½Xô :DW*ôÓ?#àu¤½ˆ5«(=¨ÆVÛzÜô&؃o^n£ä›".Åj¤Ùô¡Xõ$´‚¨Dd释¬(î‹{Dqqו«±ÛcaÇÂöa xµÎ± Þ……}‹N:³RÀÇ7¯÷„¥ôäÐŽAÅrÛ¨dM[çÀl€†€ÑWœ*aRæÅzd^Ï’÷ %*ÌkýøxöÐx3ŽªR¥ZQÂqRp¼=¦ âw‰Hý¾¬6X%²Zný0t¾þ‚CäƒN,KµÐÄPì87=ZûÇ‘ã™vfdùºmo›ééi]Ío®» È¢*Ÿ:u8ò¬c¾ß2ë¹sÍRÞ”ˆÕž4¼½ŽQÄ2þá¼>´Q÷/3ÀLÿSÁ† îñÿÓ¬ Œ·ªMwˆèŸ‹HfZJ¢yZ&Šóãë:¹xð§óeòàr¤úpÕ×9sæ0p:ß=s¸jJ(“pQ]µ;7?Ÿ„ÄEf|·Û‘›hЩêO§Ïs†¸ì»CJ5´÷4ùrðc ?-±×/W‰´~K<9ˆW¥ªÖ0¿ÞbsSÜ™1„ÆBlö¨Ã%f4/£—/¦.,;˜ÍÓS÷ `%މŽÛ4‚ÂU•{0| 0¿ñŠþqÖûãnË)µ'þgácfýDJ$½uÌ[媗1 ¸99ì¸ošf‘Ì"ëY‡'p?¥†w‹ì%\lçeѶ«LùÀHXN³‡ö_OI̱aÿEÑ Uîžûú:Q…° XVÅ¥°ÛœÞ4 ¹£”’‚Ò¿× ý ÉqI§<öãÙ©Àqíoš!ùãÓêÖׄžz¤$Òa”qIì„ ä®C žM ÿn6^=›¾Oþ½ýþendstream endobj 410 0 obj << /Type /XRef /Length 295 /Filter /FlateDecode /DecodeParms << /Columns 5 /Predictor 12 >> /W [ 1 3 1 ] /Info 3 0 R /Root 2 0 R /Size 411 /ID [] >> stream xœcb&F~0ù‰ $À8JŽ8ò?ƒðæ5@6ûZPJxö}4%ŒTò?ƒæÍ5À4p½v4 ŒTò?ƒi&( œy;šF*ùŸÁq ¨F—¢J RÄ D*ú‚H¹½ R¬ DrŸ“ÛA$Ól)`"Y€$#w6XåÉ+–­«|o‘ì, ’Q D²m‘\¡`5`¶„Ø Ý`pš¶V@l°£.Ø®#`öoÉ2D ç‚ÙÏÀ*62-‘ÌïÁä2°®R©er-—+ØöK`ײƒHɰßÁ™"¥åÀ¦-“z`2Dò,û±LªƒH¡`3o€Õœ‹‘œ 5í²û¢DrD€]žÆµ8o endstream endobj startxref 214256 %%EOF forecast/inst/doc/JSS2008.Rmd0000644000176200001440000017272514473635572015255 0ustar liggesusers--- author: - name: Rob J Hyndman affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia email: Rob.Hyndman@monash.edu url: https://robjhyndman.com - name: Yeasmin Khandakar affiliation: Monash University address: > Department of Econometrics \& Business Statistics Monash University Clayton VIC 3800, Australia title: formatted: "Automatic Time Series Forecasting:\\newline the \\pkg{forecast} Package for \\proglang{R}" # If you use tex in the formatted title, also supply version without plain: "Automatic Time Series Forecasting: the forecast Package for R" # For running headers, if needed short: "\\pkg{forecast}: Automatic Time Series Forecasting" abstract: > This vignette to the \proglang{R} package \pkg{forecast} is an updated version of @HK2008, published in the *Journal of Statistical Software*. Automatic forecasts of large numbers of univariate time series are often needed in business and other contexts. We describe two automatic forecasting algorithms that have been implemented in the \pkg{forecast} package for \proglang{R}. The first is based on innovations state space models that underly exponential smoothing methods. The second is a step-wise algorithm for forecasting with ARIMA models. The algorithms are applicable to both seasonal and non-seasonal data, and are compared and illustrated using four real time series. We also briefly describe some of the other functionality available in the \pkg{forecast} package.} keywords: # at least one keyword must be supplied formatted: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, "\\proglang{R}"] plain: [ARIMA models, automatic forecasting, exponential smoothing, prediction intervals, state space models, time series, R] preamble: > \usepackage{amsmath,rotating,bm,fancyvrb,paralist,thumbpdf} \Volume{27} \Issue{3} \Month{July} \Year{2008} \Submitdate{2007-05-29} \Acceptdate{2008-03-22} \def\damped{$_{\mbox{\footnotesize d}}$} \let\var=\VAR \def\R{\proglang{R}} \def\dampfactor{\phi_h} \raggedbottom bibliography: JSS-paper.bib vignette: > %\VignetteIndexEntry{Automatic Time Series Forecasting: the forecast Package for R (Hyndman & Khandakar, JSS 2008)} %\VignetteEngine{knitr::rmarkdown} %\VignetteEncoding{UTF-8} documentclass: jss output: if (rmarkdown::pandoc_version() >= "2") rticles::jss_article else rmarkdown::html_vignette fig_width: 7 fig_height: 6 fig_caption: true --- ```{r load_forecast, echo=FALSE, message=FALSE} library('forecast') ``` ```{r load_expsmooth, echo=FALSE, message=FALSE, eval=FALSE} library('expsmooth') ``` ```{r expsmooth_datsets, echo=FALSE, message=FALSE} bonds <- structure(c(5.83, 6.06, 6.58, 7.09, 7.31, 7.23, 7.43, 7.37, 7.6, 7.89, 8.12, 7.96, 7.93, 7.61, 7.33, 7.18, 6.74, 6.27, 6.38, 6.6, 6.3, 6.13, 6.02, 5.79, 5.73, 5.89, 6.37, 6.62, 6.85, 7.03, 6.99, 6.75, 6.95, 6.64, 6.3, 6.4, 6.69, 6.52, 6.8, 7.01, 6.82, 6.6, 6.32, 6.4, 6.11, 5.82, 5.87, 5.89, 5.63, 5.65, 5.73, 5.72, 5.73, 5.58, 5.53, 5.41, 4.87, 4.58, 4.89, 4.69, 4.78, 4.99, 5.23, 5.18, 5.54, 5.9, 5.8, 5.94, 5.91, 6.1, 6.03, 6.26, 6.66, 6.52, 6.26, 6, 6.42, 6.1, 6.04, 5.83, 5.8, 5.74, 5.72, 5.23, 5.14, 5.1, 4.89, 5.13, 5.37, 5.26, 5.23, 4.97, 4.76, 4.55, 4.61, 5.07, 5, 4.9, 5.28, 5.21, 5.15, 4.9, 4.62, 4.24, 3.88, 3.91, 4.04, 4.03, 4.02, 3.9, 3.79, 3.94, 3.56, 3.32, 3.93, 4.44, 4.29, 4.27, 4.29, 4.26, 4.13, 4.06, 3.81, 4.32, 4.7), .Tsp = c(1994, 2004.33333333333, 12), class = "ts") usnetelec <- structure(c(296.1, 334.1, 375.3, 403.8, 447, 476.3, 550.3, 603.9, 634.6, 648.5, 713.4, 759.2, 797.1, 857.9, 920, 987.2, 1058.4, 1147.5, 1217.8, 1332.8, 1445.5, 1535.1, 1615.9, 1753, 1864.1, 1870.3, 1920.8, 2040.9, 2127.4, 2209.4, 2250.7, 2289.6, 2298, 2244.4, 2313.4, 2419.5, 2473, 2490.5, 2575.3, 2707.4, 2967.3, 3038, 3073.8, 3083.9, 3197.2, 3247.5, 3353.5, 3444.2, 3492.2, 3620.3, 3694.8, 3802.1, 3736.6, 3858.5, 3848), .Tsp = c(1949, 2003, 1), class = "ts") ukcars <- structure(c(330.371, 371.051, 270.67, 343.88, 358.491, 362.822, 261.281, 240.355, 325.382, 316.7, 171.153, 257.217, 298.127, 251.464, 181.555, 192.598, 245.652, 245.526, 225.261, 238.211, 257.385, 228.461, 175.371, 226.462, 266.15, 287.251, 225.883, 265.313, 272.759, 234.134, 196.462, 205.551, 291.283, 284.422, 221.571, 250.697, 253.757, 267.016, 220.388, 277.801, 283.233, 302.072, 259.72, 297.658, 306.129, 322.106, 256.723, 341.877, 356.004, 361.54, 270.433, 311.105, 326.688, 327.059, 274.257, 367.606, 346.163, 348.211, 250.008, 292.518, 343.318, 343.429, 275.386, 329.747, 364.521, 378.448, 300.798, 331.757, 362.536, 389.133, 323.322, 391.832, 421.646, 416.823, 311.713, 381.902, 422.982, 427.722, 376.85, 458.58, 436.225, 441.487, 369.566, 450.723, 462.442, 468.232, 403.636, 413.948, 460.496, 448.932, 407.787, 469.408, 494.311, 433.24, 335.106, 378.795, 387.1, 372.395, 335.79, 397.08, 449.755, 402.252, 391.847, 385.89, 424.325, 433.28, 391.213, 408.74, 445.458, 428.202, 379.048, 394.042, 432.796), .Tsp = c(1977, 2005, 4), class = "ts") visitors <- structure(c(75.7, 75.4, 83.1, 82.9, 77.3, 105.7, 121.9, 150, 98, 118, 129.5, 110.6, 91.7, 94.8, 109.5, 105.1, 95, 130.3, 156.7, 190.1, 139.7, 147.8, 145.2, 132.7, 120.7, 116.5, 142, 140.4, 128, 165.7, 183.1, 222.8, 161.3, 180.4, 185.2, 160.5, 157.1, 163.8, 203.3, 196.9, 179.6, 207.3, 208, 245.8, 168.9, 191.1, 180, 160.1, 136.6, 142.7, 175.4, 161.4, 149.9, 174.1, 192.7, 247.4, 176.2, 192.8, 189.1, 181.1, 149.9, 157.3, 185.3, 178.2, 162.7, 190.6, 198.6, 253.1, 177.4, 190.6, 189.2, 168, 161.4, 172.2, 208.3, 199.3, 197.4, 216, 223.9, 266.8, 196.1, 238.2, 217.8, 203.8, 175.2, 176.9, 219.3, 199.1, 190, 229.3, 255, 302.4, 242.8, 245.5, 257.9, 226.3, 213.4, 204.6, 244.6, 239.9, 224, 267.2, 285.9, 344, 250.5, 304.3, 307.4, 255.1, 214.9, 230.9, 282.5, 265.4, 254, 301.6, 311, 384, 303.8, 319.1, 313.5, 294.2, 244.8, 261.4, 329.7, 304.9, 268.6, 320.7, 342.9, 422.3, 317.2, 392.7, 365.6, 333.2, 261.5, 306.9, 358.2, 329.2, 309.2, 350.4, 375.6, 465.2, 342.9, 408, 390.9, 325.9, 289.1, 308.2, 397.4, 330.4, 330.9, 366.5, 379.5, 448.3, 346.2, 353.6, 338.6, 341.1, 283.4, 304.2, 372.3, 323.7, 323.9, 354.8, 367.9, 457.6, 351, 398.6, 389, 334.1, 298.1, 317.1, 388.5, 355.6, 353.1, 397, 416.7, 460.8, 360.8, 434.6, 411.9, 405.6, 319.3, 347.9, 429, 372.9, 403, 426.5, 459.9, 559.9, 416.6, 429.2, 428.7, 405.4, 330.2, 370, 446.9, 384.6, 366.3, 378.5, 376.2, 523.2, 379.3, 437.2, 446.5, 360.3, 329.9, 339.4, 418.2, 371.9, 358.6, 428.9, 437, 534, 396.6, 427.5, 392.5, 321.5, 260.9, 308.3, 415.5, 362.2, 385.6, 435.3, 473.3, 566.6, 420.2, 454.8, 432.3, 402.8, 341.3, 367.3, 472, 405.8, 395.6, 449.9, 479.9, 593.1, 462.4, 501.6, 504.7, 409.5), .Tsp = c(1985.33333333333, 2005.25, 12), class = "ts") ``` # Introduction Automatic forecasts of large numbers of univariate time series are often needed in business. It is common to have over one thousand product lines that need forecasting at least monthly. Even when a smaller number of forecasts are required, there may be nobody suitably trained in the use of time series models to produce them. In these circumstances, an automatic forecasting algorithm is an essential tool. Automatic forecasting algorithms must determine an appropriate time series model, estimate the parameters and compute the forecasts. They must be robust to unusual time series patterns, and applicable to large numbers of series without user intervention. The most popular automatic forecasting algorithms are based on either exponential smoothing or ARIMA models. In this article, we discuss the implementation of two automatic univariate forecasting methods in the \pkg{forecast} package for \proglang{R}. We also briefly describe some univariate forecasting methods that are part of the \pkg{forecast} package. The \pkg{forecast} package for the \proglang{R} system for statistical computing [@R] is available from the Comprehensive \proglang{R} Archive Network at \url{https://CRAN.R-project.org/package=forecast}. Version `r packageVersion('forecast')` of the package was used for this paper. The \pkg{forecast} package contains functions for univariate forecasting and a few examples of real time series data. For more extensive testing of forecasting methods, the \pkg{fma} package contains the 90 data sets from @MWH3, the \pkg{expsmooth} package contains 24 data sets from @expsmooth08, and the \pkg{Mcomp} package contains the 1001 time series from the M-competition [@Mcomp82] and the 3003 time series from the M3-competition [@M3comp00]. The \pkg{forecast} package implements automatic forecasting using exponential smoothing, ARIMA models, the Theta method [@AN00], cubic splines [@HKPB05], as well as other common forecasting methods. In this article, we primarily discuss the exponential smoothing approach (in Section \ref{sec:expsmooth}) and the ARIMA modelling approach (in Section \ref{sec:arima}) to automatic forecasting. In Section \ref{sec:package}, we describe the implementation of these methods in the \pkg{forecast} package, along with other features of the package. # Exponential smoothing {#sec:expsmooth} Although exponential smoothing methods have been around since the 1950s, a modelling framework incorporating procedures for model selection was not developed until relatively recently. @OKS97, @HKSG02 and @HKOS05 have shown that all exponential smoothing methods (including non-linear methods) are optimal forecasts from innovations state space models. Exponential smoothing methods were originally classified by Pegels' (1969)\nocite{Pegels69} taxonomy. This was later extended by @Gardner85, modified by @HKSG02, and extended again by @Taylor03a, giving a total of fifteen methods seen in the following table. \begin{table}[!hbt] \begin{center}\vspace{0.2cm} \begin{tabular}{|ll|ccc|} \hline & &\multicolumn{3}{c|}{Seasonal Component} \\ \multicolumn{2}{|c|}{Trend}& N & A & M\\ \multicolumn{2}{|c|}{Component} & (None) & (Additive) & (Multiplicative)\\ \cline{3-5} &&&\\[-0.3cm] N & (None) & N,N & N,A & N,M\\ &&&&\\[-0.3cm] A & (Additive) & A,N & A,A & A,M\\ &&&&\\[-0.3cm] A\damped & (Additive damped) & A\damped,N & A\damped,A & A\damped,M\\ &&&&\\[-0.3cm] M & (Multiplicative) & M,N & M,A & M,M\\ &&&&\\[-0.3cm] M\damped & (Multiplicative damped) & M\damped,N & M\damped,A & M\damped,M\\ \hline \end{tabular}\vspace{0.2cm} \end{center} \caption{The fifteen exponential smoothing methods.} \end{table} Some of these methods are better known under other names. For example, cell (N,N) describes the simple exponential smoothing (or SES) method, cell (A,N) describes Holt's linear method, and cell (A\damped,N) describes the damped trend method. The additive Holt-Winters' method is given by cell (A,A) and the multiplicative Holt-Winters' method is given by cell (A,M). The other cells correspond to less commonly used but analogous methods. ## Point forecasts for all methods We denote the observed time series by $y_1,y_2,\dots,y_n$. A forecast of $y_{t+h}$ based on all of the data up to time $t$ is denoted by $\hat{y}_{t+h|t}$. To illustrate the method, we give the point forecasts and updating equations for method (A,A), the Holt-Winters' additive method: \begin{subequations}\label{eq:AMmethod} \begin{align} \mbox{Level:}\quad &\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)(\ell_{t-1} + b_{t-1})\hspace*{1cm} \label{eq:3-44a}\\ \mbox{Growth:}\quad &b_t = \beta^*(\ell_t - \ell_{t-1}) + (1-\beta^*)b_{t-1} \label{eq:3-45a}\\ \mbox{Seasonal:}\quad &s_t = \gamma(y_t - \ell_{t-1} -b_{t-1}) + (1-\gamma)s_{t-m}\label{eq:3-46a}\\ \mbox{Forecast:}\quad &\hat{y}_{t+h|t} = \ell_t + b_th +s_{t-m+h_m^+}. \label{eq:3-47a} \end{align} \end{subequations} where $m$ is the length of seasonality (e.g., the number of months or quarters in a year), $\ell_t$ represents the level of the series, $b_t$ denotes the growth, $s_t$ is the seasonal component, $\hat{y}_{t+h|t}$ is the forecast for $h$ periods ahead, and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$. To use method \eqref{eq:AMmethod}, we need values for the initial states $\ell_0$, $b_0$ and $s_{1-m},\dots,s_0$, and for the smoothing parameters $\alpha$, $\beta^*$ and $\gamma$. All of these will be estimated from the observed data. Equation \eqref{eq:3-46a} is slightly different from the usual Holt-Winters equations such as those in @MWH3 or @BOK05. These authors replace \eqref{eq:3-46a} with $$ s_t = \gamma^*(y_t - \ell_{t}) + (1-\gamma^*)s_{t-m}. $$ If $\ell_t$ is substituted using \eqref{eq:3-44a}, we obtain $$s_t = \gamma^*(1-\alpha)(y_t - \ell_{t-1}-b_{t-1}) + \{1-\gamma^*(1-\alpha)\}s_{t-m}. $$ Thus, we obtain identical forecasts using this approach by replacing $\gamma$ in \eqref{eq:3-46a} with $\gamma^*(1-\alpha)$. The modification given in \eqref{eq:3-46a} was proposed by @OKS97 to make the state space formulation simpler. It is equivalent to Archibald's (1990)\nocite{Archibald90} variation of the Holt-Winters' method. \begin{sidewaystable} \begin{small} \begin{center} \begin{tabular}{|c|lll|} \hline & \multicolumn{3}{c|}{Seasonal} \\ {Trend} & \multicolumn{1}{c}{N} & \multicolumn{1}{c}{A} & \multicolumn{1}{c|}{M}\\ \cline{2-4} & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}$\\ {N} & & $s_t = \gamma (y_t - \ell_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / \ell_{t-1}) + (1-\gamma) s_{t-m}$ \\ & $\hat{y}_{t+h|t} = \ell_t$ & $\hat{y}_{t+h|t} = \ell_t + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_ts_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+b_{t-1})$\\ {A} & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+hb_t$ & $\hat{y}_{t+h|t} = \ell_t +hb_t +s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+hb_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) (\ell_{t-1}+\phi b_{t-1})$\\ {A\damped } & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$ & $b_t = \beta^* (\ell_t-\ell_{t-1}) + (1-\beta^*) \phi b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}-\phi b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}-\phi b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t$ & $\hat{y}_{t+h|t} = \ell_t+\dampfactor b_t+s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= (\ell_t+\dampfactor b_t)s_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha) \ell_{t-1}b_{t-1}$\\ {M} & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^h$ & $\hat{y}_{t+h|t} = \ell_tb_t^h + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^hs_{t-m+h_m^+}$ \\ \hline & $\ell_t = \alpha y_t + (1-\alpha) \ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t - s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$ & $\ell_t = \alpha (y_t / s_{t-m}) + (1-\alpha)\ell_{t-1}b^\phi_{t-1}$\\ {M\damped } & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$ & $b_t = \beta^* (\ell_t/\ell_{t-1}) + (1-\beta^*) b^\phi_{t-1}$\\ & & $s_t = \gamma (y_t - \ell_{t-1}b^\phi_{t-1}) + (1-\gamma) s_{t-m}$ & $s_t = \gamma (y_t / (\ell_{t-1}b^\phi_{t-1})) + (1-\gamma) s_{t-m}$\\ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h}$ & $\hat{y}_{t+h|t} = \ell_tb_t^{\phi_h} + s_{t-m+h_m^+}$ & $\hat{y}_{t+h|t}= \ell_tb_t^{\phi_h}s_{t-m+h_m^+}$ \\ \hline \end{tabular} \end{center} \end{small} \caption{Formulae for recursive calculations and point forecasts. In each case, $\ell_t$ denotes the series level at time $t$, $b_t$ denotes the slope at time $t$, $s_t$ denotes the seasonal component of the series at time $t$, and $m$ denotes the number of seasons in a year; $\alpha$, $\beta^*$, $\gamma$ and $\phi$ are constants, $\phi_h = \phi+\phi^2+\dots+\phi^{h}$ and $h_m^+ = \big[(h-1) \mbox{ mod } m\big] + 1$.}\label{table:pegels} \end{sidewaystable} Table \ref{table:pegels} gives recursive formulae for computing point forecasts $h$ periods ahead for all of the exponential smoothing methods. Some interesting special cases can be obtained by setting the smoothing parameters to extreme values. For example, if $\alpha=0$, the level is constant over time; if $\beta^*=0$, the slope is constant over time; and if $\gamma=0$, the seasonal pattern is constant over time. At the other extreme, naïve forecasts (i.e., $\hat{y}_{t+h|t}=y_t$ for all $h$) are obtained using the (N,N) method with $\alpha=1$. Finally, the additive and multiplicative trend methods are special cases of their damped counterparts obtained by letting $\phi=1$. ## Innovations state space models {#sec:statespace} For each exponential smoothing method in Table \ref{table:pegels}, @expsmooth08 describe two possible innovations state space models, one corresponding to a model with additive errors and the other to a model with multiplicative errors. If the same parameter values are used, these two models give equivalent point forecasts, although different prediction intervals. Thus there are 30 potential models described in this classification. Historically, the nature of the error component has often been ignored, because the distinction between additive and multiplicative errors makes no difference to point forecasts. We are careful to distinguish exponential smoothing \emph{methods} from the underlying state space \emph{models}. An exponential smoothing method is an algorithm for producing point forecasts only. The underlying stochastic state space model gives the same point forecasts, but also provides a framework for computing prediction intervals and other properties. To distinguish the models with additive and multiplicative errors, we add an extra letter to the front of the method notation. The triplet (E,T,S) refers to the three components: error, trend and seasonality. So the model ETS(A,A,N) has additive errors, additive trend and no seasonality---in other words, this is Holt's linear method with additive errors. Similarly, ETS(M,M\damped,M) refers to a model with multiplicative errors, a damped multiplicative trend and multiplicative seasonality. The notation ETS($\cdot$,$\cdot$,$\cdot$) helps in remembering the order in which the components are specified. Once a model is specified, we can study the probability distribution of future values of the series and find, for example, the conditional mean of a future observation given knowledge of the past. We denote this as $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, where $\bm{x}_t$ contains the unobserved components such as $\ell_t$, $b_t$ and $s_t$. For $h=1$ we use $\mu_t\equiv\mu_{t+1|t}$ as a shorthand notation. For many models, these conditional means will be identical to the point forecasts given in Table \ref{table:pegels}, so that $\mu_{t+h|t}=\hat{y}_{t+h|t}$. However, for other models (those with multiplicative trend or multiplicative seasonality), the conditional mean and the point forecast will differ slightly for $h\ge 2$. We illustrate these ideas using the damped trend method of @GM85. \subsubsection{Additive error model: ETS(A,A$_d$,N)} Let $\mu_t = \hat{y}_t = \ell_{t-1}+b_{t-1}$ denote the one-step forecast of $y_{t}$ assuming that we know the values of all parameters. Also, let $\varepsilon_t = y_t - \mu_t$ denote the one-step forecast error at time $t$. From the equations in Table \ref{table:pegels}, we find that \begin{align} \label{ss1} y_t &= \ell_{t-1} + \phi b_{t-1} + \varepsilon_t\\ \ell_t &= \ell_{t-1} + \phi b_{t-1} + \alpha \varepsilon_t \label{ss2}\\ b_t &= \phi b_{t-1} + \beta^*(\ell_t - \ell_{t-1}- \phi b_{t-1}) = \phi b_{t-1} + \alpha\beta^*\varepsilon_t. \label{ss3} \end{align} We simplify the last expression by setting $\beta=\alpha\beta^*$. The three equations above constitute a state space model underlying the damped Holt's method. Note that it is an \emph{innovations} state space model [@AM79;@Aoki87] because the same error term appears in each equation. We an write it in standard state space notation by defining the state vector as $\bm{x}_t = (\ell_t,b_t)'$ and expressing \eqref{ss1}--\eqref{ss3} as \begin{subequations} \begin{align} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1} + \varepsilon_t\label{obseq}\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi\\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t.\label{stateeq} \end{align} \end{subequations} The model is fully specified once we state the distribution of the error term $\varepsilon_t$. Usually we assume that these are independent and identically distributed, following a normal distribution with mean 0 and variance $\sigma^2$, which we write as $\varepsilon_t \sim\mbox{NID}(0, \sigma^2)$. \subsubsection{Multiplicative error model: ETS(M,A$_d$,N)} A model with multiplicative error can be derived similarly, by first setting $\varepsilon_t = (y_t-\mu_t)/\mu_t$, so that $\varepsilon_t$ is the relative error. Then, following a similar approach to that for additive errors, we find \begin{align*} y_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \varepsilon_t)\\ \ell_t &= (\ell_{t-1} + \phi b_{t-1})(1 + \alpha \varepsilon_t)\\ b_t &= \phi b_{t-1} + \beta(\ell_{t-1}+\phi b_{t-1})\varepsilon_t, \end{align*} or \begin{align*} y_t &= \left[ 1 \phi \right] \bm{x}_{t-1}(1 + \varepsilon_t)\\ \bm{x}_t &= \left[\begin{array}{ll} 1 & \phi \\ 0 & \phi \end{array}\right]\bm{x}_{t-1} + \left[ 1 \phi \right] \bm{x}_{t-1} \left[\begin{array}{l} \alpha\\ \beta \end{array}\right]\varepsilon_t. \end{align*} Again we assume that $\varepsilon_t \sim \mbox{NID}(0,\sigma^2)$. Of course, this is a nonlinear state space model, which is usually considered difficult to handle in estimating and forecasting. However, that is one of the many advantages of the innovations form of state space models --- we can still compute forecasts, the likelihood and prediction intervals for this nonlinear model with no more effort than is required for the additive error model. ## State space models for all exponential smoothing methods {#sec:ssmodels} There are similar state space models for all 30 exponential smoothing variations. The general model involves a state vector $\bm{x}_t = (\ell_t, b_t$, $s_t, s_{t-1}, \dots, s_{t-m+1})'$ and state space equations of the form \begin{subequations}\label{eq:ss} \begin{align} y_t &= w(\bm{x}_{t-1}) + r(\bm{x}_{t-1})\varepsilon_t \label{eq:ss1}\\ \bm{x}_t &= f(\bm{x}_{t-1}) + g(\bm{x}_{t-1})\varepsilon_t \label{eq:ss2} \end{align} \end{subequations} where $\{\varepsilon_t\}$ is a Gaussian white noise process with mean zero and variance $\sigma^2$, and $\mu_t = w(\bm{x}_{t-1})$. The model with additive errors has $r(\bm{x}_{t-1})=1$, so that $y_t = \mu_{t} + \varepsilon_t$. The model with multiplicative errors has $r(\bm{x}_{t-1})=\mu_t$, so that $y_t = \mu_{t}(1 + \varepsilon_t)$. Thus, $\varepsilon_t = (y_t - \mu_t)/\mu_t$ is the relative error for the multiplicative model. The models are not unique. Clearly, any value of $r(\bm{x}_{t-1})$ will lead to identical point forecasts for $y_t$. All of the methods in Table \ref{table:pegels} can be written in the form \eqref{eq:ss1} and \eqref{eq:ss2}. The specific form for each model is given in @expsmooth08. Some of the combinations of trend, seasonality and error can occasionally lead to numerical difficulties; specifically, any model equation that requires division by a state component could involve division by zero. This is a problem for models with additive errors and either multiplicative trend or multiplicative seasonality, as well as for the model with multiplicative errors, multiplicative trend and additive seasonality. These models should therefore be used with caution. The multiplicative error models are useful when the data are strictly positive, but are not numerically stable when the data contain zeros or negative values. So when the time series is not strictly positive, only the six fully additive models may be applied. The point forecasts given in Table \ref{table:pegels} are easily obtained from these models by iterating equations \eqref{eq:ss1} and \eqref{eq:ss2} for $t=n+1, n+2,\dots,n+h$, setting $\varepsilon_{n+j}=0$ for $j=1,\dots,h$. In most cases (notable exceptions being models with multiplicative seasonality or multiplicative trend for $h\ge2$), the point forecasts can be shown to be equal to $\mu_{t+h|t} = \E(y_{t+h} \mid \bm{x}_t)$, the conditional expectation of the corresponding state space model. The models also provide a means of obtaining prediction intervals. In the case of the linear models, where the forecast distributions are normal, we can derive the conditional variance $v_{t+h|t} = \var(y_{t+h} \mid \bm{x}_t)$ and obtain prediction intervals accordingly. This approach also works for many of the nonlinear models. Detailed derivations of the results for many models are given in @HKOS05. A more direct approach that works for all of the models is to simply simulate many future sample paths conditional on the last estimate of the state vector, $\bm{x}_t$. Then prediction intervals can be obtained from the percentiles of the simulated sample paths. Point forecasts can also be obtained in this way by taking the average of the simulated values at each future time period. An advantage of this approach is that we generate an estimate of the complete predictive distribution, which is especially useful in applications such as inventory planning, where expected costs depend on the whole distribution. ## Estimation {#sec:estimation} In order to use these models for forecasting, we need to know the values of $\bm{x}_0$ and the parameters $\alpha$, $\beta$, $\gamma$ and $\phi$. It is easy to compute the likelihood of the innovations state space model \eqref{eq:ss}, and so obtain maximum likelihood estimates. @OKS97 show that \begin{equation}\label{likelihood} L^*(\bm\theta,\bm{x}_0) = n\log\Big(\sum_{t=1}^n \varepsilon^2_t\Big) + 2\sum_{t=1}^n \log|r(\bm{x}_{t-1})| \end{equation} is equal to twice the negative logarithm of the likelihood function (with constant terms eliminated), conditional on the parameters $\bm\theta = (\alpha,\beta,\gamma,\phi)'$ and the initial states $\bm{x}_0 = (\ell_0,b_0,s_0,s_{-1},\dots,s_{-m+1})'$, where $n$ is the number of observations. This is easily computed by simply using the recursive equations in Table \ref{table:pegels}. Unlike state space models with multiple sources of error, we do not need to use the Kalman filter to compute the likelihood. The parameters $\bm\theta$ and the initial states $\bm{x}_0$ can be estimated by minimizing $L^*$. Most implementations of exponential smoothing use an ad hoc heuristic scheme to estimate $\bm{x}_0$. However, with modern computers, there is no reason why we cannot estimate $\bm{x}_0$ along with $\bm\theta$, and the resulting forecasts are often substantially better when we do. We constrain the initial states $\bm{x}_0$ so that the seasonal indices add to zero for additive seasonality, and add to $m$ for multiplicative seasonality. There have been several suggestions for restricting the parameter space for $\alpha$, $\beta$ and $\gamma$. The traditional approach is to ensure that the various equations can be interpreted as weighted averages, thus requiring $\alpha$, $\beta^*=\beta/\alpha$, $\gamma^*=\gamma/(1-\alpha)$ and $\phi$ to all lie within $(0,1)$. This suggests $$0<\alpha<1,\qquad 0<\beta<\alpha,\qquad 0<\gamma < 1-\alpha,\qquad\mbox{and}\qquad 0<\phi<1. $$ However, @HAA08 show that these restrictions are usually stricter than necessary (although in a few cases they are not restrictive enough). ## Model selection Forecast accuracy measures such as mean squared error (MSE) can be used for selecting a model for a given set of data, provided the errors are computed from data in a hold-out set and not from the same data as were used for model estimation. However, there are often too few out-of-sample errors to draw reliable conclusions. Consequently, a penalized method based on the in-sample fit is usually better. One such approach uses a penalized likelihood such as Akaike's Information Criterion: $$\mbox{AIC} = L^*(\hat{\bm\theta},\hat{\bm{x}}_0) + 2q, $$ where $q$ is the number of parameters in $\bm\theta$ plus the number of free states in $\bm{x}_0$, and $\hat{\bm\theta}$ and $\hat{\bm{x}}_0$ denote the estimates of $\bm\theta$ and $\bm{x}_0$. We select the model that minimizes the AIC amongst all of the models that are appropriate for the data. The AIC also provides a method for selecting between the additive and multiplicative error models. The point forecasts from the two models are identical so that standard forecast accuracy measures such as the MSE or mean absolute percentage error (MAPE) are unable to select between the error types. The AIC is able to select between the error types because it is based on likelihood rather than one-step forecasts. Obviously, other model selection criteria (such as the BIC) could also be used in a similar manner. ## Automatic forecasting {#sec:algorithm} We combine the preceding ideas to obtain a robust and widely applicable automatic forecasting algorithm. The steps involved are summarized below. \begin{compactenum} \item For each series, apply all models that are appropriate, optimizing the parameters (both smoothing parameters and the initial state variable) of the model in each case. \item Select the best of the models according to the AIC. \item Produce point forecasts using the best model (with optimized parameters) for as many steps ahead as required. \item Obtain prediction intervals for the best model either using the analytical results of Hyndman, Koehler, et al. (2005), or by simulating future sample paths for $\{y_{n+1},\dots,y_{n+h}\}$ and finding the $\alpha/2$ and $1-\alpha/2$ percentiles of the simulated data at each forecasting horizon. If simulation is used, the sample paths may be generated using the normal distribution for errors (parametric bootstrap) or using the resampled errors (ordinary bootstrap). \end{compactenum} @HKSG02 applied this automatic forecasting strategy to the M-competition data [@Mcomp82] and the IJF-M3 competition data [@M3comp00] using a restricted set of exponential smoothing models, and demonstrated that the methodology is particularly good at short term forecasts (up to about 6 periods ahead), and especially for seasonal short-term series (beating all other methods in the competitions for these series). # ARIMA models {#sec:arima} A common obstacle for many people in using Autoregressive Integrated Moving Average (ARIMA) models for forecasting is that the order selection process is usually considered subjective and difficult to apply. But it does not have to be. There have been several attempts to automate ARIMA modelling in the last 25 years. @HR82 proposed a method to identify the order of an ARMA model for a stationary series. In their method the innovations can be obtained by fitting a long autoregressive model to the data, and then the likelihood of potential models is computed via a series of standard regressions. They established the asymptotic properties of the procedure under very general conditions. @Gomez98 extended the Hannan-Rissanen identification method to include multiplicative seasonal ARIMA model identification. @TRAMOSEATS98 implemented this automatic identification procedure in the software \pkg{TRAMO} and \pkg{SEATS}. For a given series, the algorithm attempts to find the model with the minimum BIC. @Liu89 proposed a method for identification of seasonal ARIMA models using a filtering method and certain heuristic rules; this algorithm is used in the \pkg{SCA-Expert} software. Another approach is described by @MP00a whose algorithm for univariate ARIMA models also allows intervention analysis. It is implemented in the software package ``Time Series Expert'' (\pkg{TSE-AX}). Other algorithms are in use in commercial software, although they are not documented in the public domain literature. In particular, \pkg{Forecast Pro} [@ForecastPro00] is well-known for its excellent automatic ARIMA algorithm which was used in the M3-forecasting competition [@M3comp00]. Another proprietary algorithm is implemented in \pkg{Autobox} [@Reilly00]. @OL96 provide an early review of some of the commercial software that implement automatic ARIMA forecasting. ## Choosing the model order using unit root tests and the AIC A non-seasonal ARIMA($p,d,q$) process is given by $$ \phi(B)(1-B^d)y_{t} = c + \theta(B)\varepsilon_t $$ where $\{\varepsilon_t\}$ is a white noise process with mean zero and variance $\sigma^2$, $B$ is the backshift operator, and $\phi(z)$ and $\theta(z)$ are polynomials of order $p$ and $q$ respectively. To ensure causality and invertibility, it is assumed that $\phi(z)$ and $\theta(z)$ have no roots for $|z|<1$ [@BDbook91]. If $c\ne0$, there is an implied polynomial of order $d$ in the forecast function. The seasonal ARIMA$(p,d,q)(P,D,Q)_m$ process is given by $$ \Phi(B^m)\phi(B)(1-B^{m})^D(1-B)^dy_{t} = c + \Theta(B^m)\theta(B)\varepsilon_t $$ where $\Phi(z)$ and $\Theta(z)$ are polynomials of orders $P$ and $Q$ respectively, each containing no roots inside the unit circle. If $c\ne0$, there is an implied polynomial of order $d+D$ in the forecast function. The main task in automatic ARIMA forecasting is selecting an appropriate model order, that is the values $p$, $q$, $P$, $Q$, $D$, $d$. If $d$ and $D$ are known, we can select the orders $p$, $q$, $P$ and $Q$ via an information criterion such as the AIC: $$\mbox{AIC} = -2\log(L) + 2(p+q+P+Q+k)$$ where $k=1$ if $c\ne0$ and 0 otherwise, and $L$ is the maximized likelihood of the model fitted to the \emph{differenced} data $(1-B^m)^D(1-B)^dy_t$. The likelihood of the full model for $y_t$ is not actually defined and so the value of the AIC for different levels of differencing are not comparable. One solution to this difficulty is the ``diffuse prior'' approach which is outlined in @DKbook01 and implemented in the \code{arima()} function [@Ripley:2002] in \R. In this approach, the initial values of the time series (before the observed values) are assumed to have mean zero and a large variance. However, choosing $d$ and $D$ by minimizing the AIC using this approach tends to lead to over-differencing. For forecasting purposes, we believe it is better to make as few differences as possible because over-differencing harms forecasts [@SY94] and widens prediction intervals. [Although, see @Hendry97 for a contrary view.] Consequently, we need some other approach to choose $d$ and $D$. We prefer unit-root tests. However, most unit-root tests are based on a null hypothesis that a unit root exists which biases results towards more differences rather than fewer differences. For example, variations on the Dickey-Fuller test [@DF81] all assume there is a unit root at lag 1, and the HEGY test of @HEGY90 is based on a null hypothesis that there is a seasonal unit root. Instead, we prefer unit-root tests based on a null hypothesis of no unit-root. For non-seasonal data, we consider ARIMA($p,d,q$) models where $d$ is selected based on successive KPSS unit-root tests [@KPSS92]. That is, we test the data for a unit root; if the test result is significant, we test the differenced data for a unit root; and so on. We stop this procedure when we obtain our first insignificant result. For seasonal data, we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $m$ is the seasonal frequency and $D=0$ or $D=1$ depending on an extended Canova-Hansen test [@CH95]. Canova and Hansen only provide critical values for $21$. Let $C_m$ be the critical value for seasonal period $m$. We plotted $C_m$ against $m$ for values of $m$ up to 365 and noted that they fit the line $C_m = 0.269 m^{0.928}$ almost exactly. So for $m>12$, we use this simple expression to obtain the critical value. We note in passing that the null hypothesis for the Canova-Hansen test is not an ARIMA model as it includes seasonal dummy terms. It is a test for whether the seasonal pattern changes sufficiently over time to warrant a seasonal unit root, or whether a stable seasonal pattern modelled using fixed dummy variables is more appropriate. Nevertheless, we have found that the test is still useful for choosing $D$ in a strictly ARIMA framework (i.e., without seasonal dummy variables). If a stable seasonal pattern is selected (i.e., the null hypothesis is not rejected), the seasonality is effectively handled by stationary seasonal AR and MA terms. After $D$ is selected, we choose $d$ by applying successive KPSS unit-root tests to the seasonally differenced data (if $D=1$) or the original data (if $D=0$). Once $d$ (and possibly $D$) are selected, we proceed to select the values of $p$, $q$, $P$ and $Q$ by minimizing the AIC. We allow $c\ne0$ for models where $d+D < 2$. ## A step-wise procedure for traversing the model space Suppose we have seasonal data and we consider ARIMA$(p,d,q)(P,D,Q)_m$ models where $p$ and $q$ can take values from 0 to 3, and $P$ and $Q$ can take values from 0 to 1. When $c=0$ there is a total of 288 possible models, and when $c\ne 0$ there is a total of 192 possible models, giving 480 models altogether. If the values of $p$, $d$, $q$, $P$, $D$ and $Q$ are allowed to range more widely, the number of possible models increases rapidly. Consequently, it is often not feasible to simply fit every potential model and choose the one with the lowest AIC. Instead, we need a way of traversing the space of models efficiently in order to arrive at the model with the lowest AIC value. We propose a step-wise algorithm as follows. \begin{description} \item[Step 1:] We try four possible models to start with. \begin{itemize} \item ARIMA($2,d,2$) if $m=1$ and ARIMA($2,d,2)(1,D,1)$ if $m>1$. \item ARIMA($0,d,0$) if $m=1$ and ARIMA($0,d,0)(0,D,0)$ if $m>1$. \item ARIMA($1,d,0$) if $m=1$ and ARIMA($1,d,0)(1,D,0)$ if $m>1$. \item ARIMA($0,d,1$) if $m=1$ and ARIMA($0,d,1)(0,D,1)$ if $m>1$. \end{itemize} If $d+D \le 1$, these models are fitted with $c\ne0$. Otherwise, we set $c=0$. Of these four models, we select the one with the smallest AIC value. This is called the ``current'' model and is denoted by ARIMA($p,d,q$) if $m=1$ or ARIMA($p,d,q)(P,D,Q)_m$ if $m>1$. \item[Step 2:] We consider up to seventeen variations on the current model: \begin{itemize} \item where one of $p$, $q$, $P$ and $Q$ is allowed to vary by $\pm1$ from the current model; \item where $p$ and $q$ both vary by $\pm1$ from the current model; \item where $P$ and $Q$ both vary by $\pm1$ from the current model; \item where the constant $c$ is included if the current model has $c=0$ or excluded if the current model has $c\ne0$. \end{itemize} Whenever a model with lower AIC is found, it becomes the new ``current'' model and the procedure is repeated. This process finishes when we cannot find a model close to the current model with lower AIC. \end{description} There are several constraints on the fitted models to avoid problems with convergence or near unit-roots. The constraints are outlined below. \begin{compactitem}\itemsep=8pt \item The values of $p$ and $q$ are not allowed to exceed specified upper bounds (with default values of 5 in each case). \item The values of $P$ and $Q$ are not allowed to exceed specified upper bounds (with default values of 2 in each case). \item We reject any model which is ``close'' to non-invertible or non-causal. Specifically, we compute the roots of $\phi(B)\Phi(B)$ and $\theta(B)\Theta(B)$. If either have a root that is smaller than 1.001 in absolute value, the model is rejected. \item If there are any errors arising in the non-linear optimization routine used for estimation, the model is rejected. The rationale here is that any model that is difficult to fit is probably not a good model for the data. \end{compactitem} The algorithm is guaranteed to return a valid model because the model space is finite and at least one of the starting models will be accepted (the model with no AR or MA parameters). The selected model is used to produce forecasts. ## Comparisons with exponential smoothing There is a widespread myth that ARIMA models are more general than exponential smoothing. This is not true. The two classes of models overlap. The linear exponential smoothing models are all special cases of ARIMA models---the equivalences are discussed in @HAA08. However, the non-linear exponential smoothing models have no equivalent ARIMA counterpart. On the other hand, there are many ARIMA models which have no exponential smoothing counterpart. Thus, the two model classes overlap and are complimentary; each has its strengths and weaknesses. The exponential smoothing state space models are all non-stationary. Models with seasonality or non-damped trend (or both) have two unit roots; all other models---that is, non-seasonal models with either no trend or damped trend---have one unit root. It is possible to define a stationary model with similar characteristics to exponential smoothing, but this is not normally done. The philosophy of exponential smoothing is that the world is non-stationary. So if a stationary model is required, ARIMA models are better. One advantage of the exponential smoothing models is that they can be non-linear. So time series that exhibit non-linear characteristics including heteroscedasticity may be better modelled using exponential smoothing state space models. For seasonal data, there are many more ARIMA models than the 30 possible models in the exponential smoothing class of Section \ref{sec:expsmooth}. It may be thought that the larger model class is advantageous. However, the results in @HKSG02 show that the exponential smoothing models performed better than the ARIMA models for the seasonal M3 competition data. (For the annual M3 data, the ARIMA models performed better.) In a discussion of these results, @Hyndman01 speculates that the larger model space of ARIMA models actually harms forecasting performance because it introduces additional uncertainty. The smaller exponential smoothing class is sufficiently rich to capture the dynamics of almost all real business and economic time series. # The forecast package {#sec:package} The algorithms and modelling frameworks for automatic univariate time series forecasting are implemented in the \pkg{forecast} package in \R. We illustrate the methods using the following four real time series shown in Figure \ref{fig:etsexamples}. \begin{compactitem} \item Figure \ref{fig:etsexamples}(a) shows 125 monthly US government bond yields (percent per annum) from January 1994 to May 2004. \item Figure \ref{fig:etsexamples}(b) displays 55 observations of annual US net electricity generation (billion kwh) for 1949 through 2003. \item Figure \ref{fig:etsexamples}(c) presents 113 quarterly observations of passenger motor vehicle production in the U.K. (thousands of cars) for the first quarter of 1977 through the first quarter of 2005. \item Figure \ref{fig:etsexamples}(d) shows 240 monthly observations of the number of short term overseas visitors to Australia from May 1985 to April 2005. \end{compactitem} ```{r etsexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using exponential smoothing state space models."} par(mfrow = c(2,2)) mod1 <- ets(bonds) mod2 <- ets(usnetelec) mod3 <- ets(ukcars) mod4 <- ets(visitors) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` ```{r etsnames, echo=FALSE} etsnames <- c(mod1$method, mod2$method, mod3$method, mod4$method) etsnames <- gsub("Ad","A\\\\damped",etsnames) ``` ## Implementation of the automatic exponential smoothing algorithm The innovations state space modelling framework described in Section \ref{sec:expsmooth} is implemented via the \code{ets()} function in the \pkg{forecast} package. (The default settings of \code{ets()} do not allow models with multiplicative trend, but they can be included using \code{allow.multiplicative.trend=TRUE}.) The models chosen via the algorithm for the four data sets were: \begin{compactitem} \item `r etsnames[1]` for monthly US 10-year bonds yield\\ ($\alpha=`r format(coef(mod1)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod1)['beta'], digits=4, nsmall=4)`$, $\phi=`r format(coef(mod1)['phi'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod1)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod1)['b'], digits=4, nsmall=4)`$); \item `r etsnames[2]` for annual US net electricity generation\\ ($\alpha=`r format(coef(mod2)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod2)['beta'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod2)['l'], digits=4, nsmall=4)`$, $b_0=`r format(coef(mod2)['b'], digits=4, nsmall=4)`$); \item `r etsnames[3]` for quarterly UK motor vehicle production\\ ($\alpha=`r format(coef(mod3)['alpha'], digits=4, nsmall=4)`$, $\gamma=`r format(coef(mod3)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod3)['l'], digits=4, nsmall=4)`$, $s_{-3}=`r format(-sum(coef(mod3)[c('s0','s1','s2')]), digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod3)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod3)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod3)['s0'], digits=4, nsmall=4)`$); \item `r etsnames[4]` for monthly Australian overseas visitors\\ ($\alpha=`r format(coef(mod4)['alpha'], digits=4, nsmall=4)`$, $\beta=`r format(coef(mod4)['beta'], digits=2, nsmall=4)`$, $\gamma=`r format(coef(mod4)['gamma'], digits=4, nsmall=4)`$, $\ell_0 = `r format(coef(mod4)['l'], digits=4, nsmall=4)`$, $b_0 = `r format(coef(mod4)['b'], digits=4, nsmall=4)`$, $s_{-11}=`r format(12-sum(tail(coef(mod4),11)), digits=4, nsmall=4)`$, $s_{-10}=`r format(coef(mod4)['s10'], digits=4, nsmall=4)`$, $s_{-9}=`r format(coef(mod4)['s9'], digits=4, nsmall=4)`$, $s_{-8}=`r format(coef(mod4)['s8'], digits=4, nsmall=4)`$, $s_{-7}=`r format(coef(mod4)['s7'], digits=4, nsmall=4)`$, $s_{-6}=`r format(coef(mod4)['s6'], digits=4, nsmall=4)`$, $s_{-5}=`r format(coef(mod4)['s5'], digits=4, nsmall=4)`$, $s_{-4}=`r format(coef(mod4)['s4'], digits=4, nsmall=4)`$, $s_{-3}=`r format(coef(mod4)['s3'], digits=4, nsmall=4)`$, $s_{-2}=`r format(coef(mod4)['s2'], digits=4, nsmall=4)`$, $s_{-1}=`r format(coef(mod4)['s1'], digits=4, nsmall=4)`$, $s_0=`r format(coef(mod4)['s0'], digits=4, nsmall=4)`$). \end{compactitem} Although there is a lot of computation involved, it can be handled remarkably quickly on modern computers. Each of the forecasts shown in Figure \ref{fig:etsexamples} took no more than a few seconds on a standard PC. The US electricity generation series took the longest as there are no analytical prediction intervals available for the ETS(M,M\damped,N) model. Consequently, the prediction intervals for this series were computed using simulation of 5000 future sample paths. To apply the algorithm to the US net electricity generation time series \code{usnetelec}, we use the following command. ```{r ets-usnetelec, echo=TRUE} etsfit <- ets(usnetelec) ``` The object \code{etsfit} is of class ``\code{ets}'' and contains all of the necessary information about the fitted model including model parameters, the value of the state vector $\bm{x}_t$ for all $t$, residuals and so on. Printing the \code{etsfit} object shows the main items of interest. ```{r ets-usnetelec-print,echo=TRUE} etsfit ``` Some goodness-of-fit measures [defined in @HK06] are obtained using \code{accuracy()}. ```{r ets-usnetelec-accuracy,eval=TRUE,echo=TRUE} accuracy(etsfit) ``` There are also \code{coef()}, \code{plot()}, \code{summary()}, \code{residuals()}, \code{fitted()} and \code{simulate()} methods for objects of class ``\code{ets}''. The \code{plot()} function shows time plots of the original time series along with the extracted components (level, growth and seasonal). The \code{forecast()} function computes the required forecasts which are then plotted as in Figure \ref{fig:etsexamples}(b). ```{r ets-usnetelec-fcast, fig.height=5, fig.width=8, message=FALSE, warning=FALSE, include=FALSE, output=FALSE} fcast <- forecast(etsfit) plot(fcast) ``` Printing the \code{fcast} object gives a table showing the prediction intervals. ```{r ets-usnetelec-fcast-print,eval=TRUE,echo=TRUE} fcast ``` The \code{ets()} function also provides the useful feature of applying a fitted model to a new data set. For example, we could withhold 10 observations from the \code{usnetelec} data set when fitting, then compute the one-step forecast errors for the out-of-sample data. ```{r ets-usnetelec-newdata,eval=FALSE,echo=TRUE} fit <- ets(usnetelec[1:45]) test <- ets(usnetelec[46:55], model = fit) accuracy(test) ``` We can also look at the measures of forecast accuracy where the forecasts are based on only the fitting data. ```{r ets-usnetelec-fcast-accuracy,eval=FALSE,echo=TRUE} accuracy(forecast(fit,10), usnetelec[46:55]) ``` ## The HoltWinters() function There is another implementation of exponential smoothing in \R\ via the \code{HoltWinters()} function [@Meyer:2002] in the \pkg{stats} package. It implements only the (N,N), (A,N), (A,A) and (A,M) methods. The initial states $\bm{x}_0$ are fixed using a heuristic algorithm. Because of the way the initial states are estimated, a full three years of seasonal data are required to implement the seasonal forecasts using \code{HoltWinters()}. (See @shortseasonal for the minimal sample size required.) The smoothing parameters are optimized by minimizing the average squared prediction errors, which is equivalent to minimizing \eqref{likelihood} in the case of additive errors. There is a \code{predict()} method for the resulting object which can produce point forecasts and prediction intervals. Although it is nowhere documented, it appears that the prediction intervals produced by \code{predict()} for an object of class \code{HoltWinters} are based on an equivalent ARIMA model in the case of the (N,N), (A,N) and (A,A) methods, assuming additive errors. These prediction intervals are equivalent to the prediction intervals that arise from the (A,N,N), (A,A,N) and (A,A,A) state space models. For the (A,M) method, the prediction interval provided by \code{predict()} appears to be based on @CY91 which is an approximation to the true prediction interval arising from the (A,A,M) model. Prediction intervals with multiplicative errors are not possible using the \code{HoltWinters()} function. ## Implementation of the automatic ARIMA algorithm ```{r arimaexamples, fig.height=7, fig.width=9, echo=FALSE, fig.cap="Four time series showing point forecasts and 80\\% \\& 95\\% prediction intervals obtained using ARIMA models."} mod1 <- auto.arima(bonds, seasonal=FALSE, approximation=FALSE) mod2 <- auto.arima(usnetelec) mod3 <- auto.arima(ukcars) mod4 <- auto.arima(visitors) par(mfrow = c(2,2)) plot(forecast(mod1), main="(a) US 10-year bonds yield", xlab="Year", ylab="Percentage per annum") plot(forecast(mod2), main="(b) US net electricity generation", xlab="Year", ylab="Billion kwh") plot(forecast(mod3), main="(c) UK passenger motor vehicle production", xlab="Year", ylab="Thousands of cars") plot(forecast(mod4), main="(d) Overseas visitors to Australia", xlab="Year", ylab="Thousands of people") ``` The algorithm of Section \ref{sec:arima} is applied to the same four time series. Unlike the exponential smoothing algorithm, the ARIMA class of models assumes homoscedasticity, which is not always appropriate. Consequently, transformations are sometimes necessary. For these four time series, we model the raw data for series (a)--(c), but the logged data for series (d). The prediction intervals are back-transformed with the point forecasts to preserve the probability coverage. To apply this algorithm to the US net electricity generation time series \code{usnetelec}, we use the following commands. ```{r arima-auto-fcast,eval=TRUE,echo=TRUE,fig.show="hide"} arimafit <- auto.arima(usnetelec) fcast <- forecast(arimafit) plot(fcast) ``` ```{r arimanames, echo=FALSE} # Convert character strings to latex arimanames <- c(as.character(mod1), as.character(mod2), as.character(mod3), as.character(mod4)) arimanames <- gsub("\\[([0-9]*)\\]", "$_{\\1}$", arimanames) ``` The function \code{auto.arima()} implements the algorithm of Section \ref{sec:arima} and returns an object of class \code{Arima}. The resulting forecasts are shown in Figure \ref{fig:arimaexamples}. The fitted models are as follows: \begin{compactitem} \item `r arimanames[1]` for monthly US 10-year bonds yield\\ ($\theta_1= `r format(coef(mod1)['ma1'], digits=4, nsmall=4)`$); \item `r arimanames[2]` for annual US net electricity generation\\ ($\phi_1= `r format(coef(mod2)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod2)['ar2'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod2)['ma1'], digits=4, nsmall=4)`$; $\theta_2= `r format(coef(mod2)['ma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod2)['drift'], digits=4, nsmall=4)`$); \item `r arimanames[3]` for quarterly UK motor vehicle production\\ ($\phi_1= `r format(coef(mod3)['ar1'], digits=4, nsmall=4)`$; $\phi_2= `r format(coef(mod3)['ar2'], digits=4, nsmall=4)`$; $\Phi_1= `r format(coef(mod3)['sar1'], digits=4, nsmall=4)`$; $\Phi_2= `r format(coef(mod3)['sar2'], digits=4, nsmall=4)`$); \item `r arimanames[4]` for monthly Australian overseas visitors\\ ($\phi_1= `r format(coef(mod4)['ar1'], digits=4, nsmall=4)`$; $\theta_1= `r format(coef(mod4)['ma1'], digits=4, nsmall=4)`$; $\Theta_1= `r format(coef(mod4)['sma1'], digits=4, nsmall=4)`$; $\Theta_2= `r format(coef(mod4)['sma2'], digits=4, nsmall=4)`$; $c= `r format(coef(mod4)['drift'], digits=4, nsmall=4)`$). \end{compactitem} Note that the \R\ parameterization has $\theta(B) = (1 + \theta_1B + \dots + \theta_qB)$ and $\phi(B) = (1 - \phi_1B + \dots - \phi_qB)$, and similarly for the seasonal terms. A summary of the forecasts is available, part of which is shown below. ``` Forecast method: ARIMA(2,1,2) with drift Series: usnetelec Coefficients: ar1 ar2 ma1 ma2 drift -1.3032 -0.4332 1.5284 0.8340 66.1585 s.e. 0.2122 0.2084 0.1417 0.1185 7.5595 sigma^2 estimated as 2262: log likelihood=-283.34 AIC=578.67 AICc=580.46 BIC=590.61 Error measures: ME RMSE MAE MPE MAPE MASE ACF1 Training set 0.046402 44.894 32.333 -0.61771 2.1012 0.45813 0.022492 Forecasts: Point Forecast Lo 80 Hi 80 Lo 95 Hi 95 2004 3968.957 3908.002 4029.912 3875.734 4062.180 2005 3970.350 3873.950 4066.751 3822.919 4117.782 2006 4097.171 3971.114 4223.228 3904.383 4289.959 2007 4112.332 3969.691 4254.973 3894.182 4330.482 2008 4218.671 4053.751 4383.591 3966.448 4470.894 2009 4254.559 4076.108 4433.010 3981.641 4527.476 2010 4342.760 4147.088 4538.431 4043.505 4642.014 2011 4393.306 4185.211 4601.401 4075.052 4711.560 2012 4470.261 4248.068 4692.455 4130.446 4810.077 2013 4529.113 4295.305 4762.920 4171.535 4886.690 ``` The training set error measures for the two models are very similar. Note that the information criteria are not comparable. The \pkg{forecast} package also contains the function \code{Arima()} which is largely a wrapper to the \code{arima()} function in the \pkg{stats} package. The \code{Arima()} function in the \pkg{forecast} package makes it easier to include a drift term when $d+D=1$. (Setting \code{include.mean=TRUE} in the \code{arima()} function from the \pkg{stats} package will only work when $d+D=0$.) It also provides the facility for fitting an existing ARIMA model to a new data set (as was demonstrated for the \code{ets()} function earlier). One-step forecasts for ARIMA models are now available via a \code{fitted()} function. We also provide a new function \code{arima.errors()} which returns the original time series after adjusting for regression variables. If there are no regression variables in the ARIMA model, then the errors will be identical to the original series. If there are regression variables in the ARIMA model, then the errors will be equal to the original series minus the effect of the regression variables, but leaving in the serial correlation that is modelled with the AR and MA terms. In contrast, \code{residuals()} provides true residuals, removing the AR and MA terms as well. The generic functions \code{summary()}, \code{print()}, \code{fitted()} and \code{forecast()} apply to models obtained from either the \code{Arima()} or \code{arima()} functions. ## The forecast() function The \code{forecast()} function is generic and has S3 methods for a wide range of time series models. It computes point forecasts and prediction intervals from the time series model. Methods exist for models fitted using \code{ets()}, \code{auto.arima()}, \code{Arima()}, \code{arima()}, \code{ar()}, \code{HoltWinters()} and \texttt{StructTS()}. There is also a method for a \code{ts} object. If a time series object is passed as the first argument to \code{forecast()}, the function will produce forecasts based on the exponential smoothing algorithm of Section \ref{sec:expsmooth}. In most cases, there is an existing \code{predict()} function which is intended to do much the same thing. Unfortunately, the resulting objects from the \code{predict()} function contain different information in each case and so it is not possible to build generic functions (such as \code{plot()} and \code{summary()}) for the results. So, instead, \code{forecast()} acts as a wrapper to \code{predict()}, and packages the information obtained in a common format (the \code{forecast} class). We also define a default \code{predict()} method which is used when no existing \code{predict()} function exists, and calls the relevant \code{forecast()} function. Thus, \code{predict()} methods parallel \code{forecast()} methods, but the latter provide consistent output that is more usable. \subsection[The forecast class]{The \code{forecast} class} The output from the \code{forecast()} function is an object of class ``\code{forecast}'' and includes at least the following information: \begin{compactitem} \item the original series; \item point forecasts; \item prediction intervals of specified coverage; \item the forecasting method used and information about the fitted model; \item residuals from the fitted model; \item one-step forecasts from the fitted model for the period of the observed data. \end{compactitem} There are \code{print()}, \code{plot()} and \code{summary()} methods for the ``\code{forecast}'' class. Figures \ref{fig:etsexamples} and \ref{fig:arimaexamples} were produced using the \code{plot()} method. The prediction intervals are, by default, computed for 80\% and 95\% coverage, although other values are possible if requested. Fan charts [@Wallis99] are possible using the combination \verb|plot(forecast(model.object, fan = TRUE))|. ## Other functions {#sec:other} We now briefly describe some of the other features of the \pkg{forecast} package. Each of the following functions produces an object of class ``\code{forecast}''. \code{croston()} : implements the method of @Croston72 for intermittent demand forecasting. In this method, the time series is decomposed into two separate sequences: the non-zero values and the time intervals between non-zero values. These are then independently forecast using simple exponential smoothing and the forecasts of the original series are obtained as ratios of the two sets of forecasts. No prediction intervals are provided because there is no underlying stochastic model [@SH05]. \code{theta()} : provides forecasts from the Theta method [@AN00]. @HB03 showed that these were equivalent to a special case of simple exponential smoothing with drift. \code{splinef()} : gives cubic-spline forecasts, based on fitting a cubic spline to the historical data and extrapolating it linearly. The details of this method, and the associated prediction intervals, are discussed in @HKPB05. \code{meanf()} : returns forecasts based on the historical mean. \code{rwf()} : gives ``naïve'' forecasts equal to the most recent observation assuming a random walk model. This function also allows forecasting using a random walk with drift. In addition, there are some new plotting functions for time series. \code{tsdisplay()} : provides a time plot along with an ACF and PACF. \code{seasonplot()} : produces a seasonal plot as described in @MWH3. \newpage # Bibliography forecast/README.md0000644000176200001440000000473414456202551013353 0ustar liggesusersforecast ====================== [![R-CMD-check](https://github.com/robjhyndman/forecast/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/robjhyndman/forecast/actions/workflows/R-CMD-check.yaml) [![CRAN_Status_Badge](https://www.r-pkg.org/badges/version/forecast)](https://cran.r-project.org/package=forecast) [![Downloads](https://cranlogs.r-pkg.org/badges/forecast)](https://cran.r-project.org/package=forecast) [![Licence](https://img.shields.io/badge/licence-GPL--3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0.en.html) The R package *forecast* provides methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. A complementary forecasting package is the [fable](http://fable.tidyverts.org/) package, which implements many of the same models but in a tidyverse framework. ## Installation You can install the **stable** version from [CRAN](https://cran.r-project.org/package=forecast). ```s install.packages('forecast', dependencies = TRUE) ``` You can install the **development** version from [Github](https://github.com/robjhyndman/forecast) ```s # install.packages("remotes") remotes::install_github("robjhyndman/forecast") ``` ## Usage ```s library(forecast) library(ggplot2) # ETS forecasts USAccDeaths %>% ets() %>% forecast() %>% autoplot() # Automatic ARIMA forecasts WWWusage %>% auto.arima() %>% forecast(h=20) %>% autoplot() # ARFIMA forecasts library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series arfima(x) %>% forecast(h=30) %>% autoplot() # Forecasting with STL USAccDeaths %>% stlm(modelfunction=ar) %>% forecast(h=36) %>% autoplot() AirPassengers %>% stlf(lambda=0) %>% autoplot() USAccDeaths %>% stl(s.window='periodic') %>% forecast() %>% autoplot() # TBATS forecasts USAccDeaths %>% tbats() %>% forecast() %>% autoplot() taylor %>% tbats() %>% forecast() %>% autoplot() ``` ## For more information * Get started in forecasting with the online textbook at http://OTexts.org/fpp2/ * Read the Hyndsight blog at https://robjhyndman.com/hyndsight/ * Ask forecasting questions on http://stats.stackexchange.com/tags/forecasting * Ask R questions on http://stackoverflow.com/tags/forecasting+r * Join the International Institute of Forecasters: http://forecasters.org/ ## License This package is free and open source software, licensed under GPL-3. forecast/build/0000755000176200001440000000000014634702027013164 5ustar liggesusersforecast/build/vignette.rds0000644000176200001440000000041614634702027015524 0ustar liggesusers‹uQÁjÃ0 u¯]:ƒþ€NcƒÊNc·A c½”¤‡^v±ÓšÖNq 1, 2 * frequency(object), 10), level = c(80, 95), fan = FALSE, robust = FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend = FALSE, ... ) } \arguments{ \item{object}{a multivariate time series or multivariate time series model for which forecasts are required} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{robust}{If TRUE, the function is robust to missing values and outliers in \code{object}. This argument is only valid when \code{object} is of class \code{mts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{find.frequency}{If TRUE, the function determines the appropriate period, if the data is of unknown period.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{...}{Additional arguments affecting the forecasts produced.} } \value{ An object of class "\code{mforecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the multivariate forecasts and prediction intervals. The generic accessors functions \code{fitted.values} and \code{residuals} extract various useful features of the value returned by \code{forecast$model}. An object of class \code{"mforecast"} is a list usually containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals will be x minus the fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{mforecast} is a class of objects for forecasting from multivariate time series or multivariate time series models. The function invokes particular \emph{methods} which depend on the class of the first argument. } \details{ For example, the function \code{\link{forecast.mlm}} makes multivariate forecasts based on the results produced by \code{\link{tslm}}. } \seealso{ Other functions which return objects of class \code{"mforecast"} are \code{\link{forecast.mlm}}, \code{forecast.varest}. } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } forecast/man/tsoutliers.Rd0000644000176200001440000000215514207263356015352 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{tsoutliers} \alias{tsoutliers} \title{Identify and replace outliers in a time series} \usage{ tsoutliers(x, iterate = 2, lambda = NULL) } \arguments{ \item{x}{time series} \item{iterate}{the number of iterations required} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ \item{index}{Indicating the index of outlier(s)} \item{replacement}{Suggested numeric values to replace identified outliers} } \description{ Uses supsmu for non-seasonal series and a periodic stl decomposition with seasonal series to identify outliers and estimate their replacements. } \examples{ data(gold) tsoutliers(gold) } \references{ Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. } \seealso{ \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsclean}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/sindexf.Rd0000644000176200001440000000164614150370574014577 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{sindexf} \alias{sindexf} \title{Forecast seasonal index} \usage{ sindexf(object, h) } \arguments{ \item{object}{Output from \code{\link[stats]{decompose}} or \link[stats]{stl}.} \item{h}{Number of periods ahead to forecast} } \value{ Time series } \description{ Returns vector containing the seasonal index for \code{h} future periods. If the seasonal index is non-periodic, it uses the last values of the index. } \examples{ uk.stl <- stl(UKDriverDeaths,"periodic") uk.sa <- seasadj(uk.stl) uk.fcast <- holt(uk.sa,36) seasf <- sindexf(uk.stl,36) uk.fcast$mean <- uk.fcast$mean + seasf uk.fcast$lower <- uk.fcast$lower + cbind(seasf,seasf) uk.fcast$upper <- uk.fcast$upper + cbind(seasf,seasf) uk.fcast$x <- UKDriverDeaths plot(uk.fcast,main="Forecasts from Holt's method with seasonal adjustment") } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.bats.Rd0000644000176200001440000000527514150370574015677 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecastBATS.R, R/forecastTBATS.R \name{forecast.bats} \alias{forecast.bats} \alias{forecast.tbats} \title{Forecasting using BATS and TBATS models} \usage{ \method{forecast}{bats}(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) \method{forecast}{tbats}(object, h, level = c(80, 95), fan = FALSE, biasadj = NULL, ...) } \arguments{ \item{object}{An object of class "\code{bats}". Usually the result of a call to \code{\link{bats}}.} \item{h}{Number of periods for forecasting. Default value is twice the largest seasonal period (for seasonal data) or ten (for non-seasonal data).} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{...}{Other arguments, currently ignored.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.bats}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A copy of the \code{bats} object} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Forecasts \code{h} steps ahead with a BATS model. Prediction intervals are also produced. } \examples{ \dontrun{ fit <- bats(USAccDeaths) plot(forecast(fit)) taylor.fit <- bats(taylor) plot(forecast(taylor.fit)) } } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{bats}}, \code{\link{tbats}},\code{\link{forecast.ets}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/seasadj.Rd0000644000176200001440000000177514150370574014554 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/seasadj.R \name{seasadj} \alias{seasadj} \alias{seasadj.stl} \alias{seasadj.mstl} \alias{seasadj.decomposed.ts} \alias{seasadj.tbats} \alias{seasadj.seas} \title{Seasonal adjustment} \usage{ seasadj(object, ...) \method{seasadj}{stl}(object, ...) \method{seasadj}{mstl}(object, ...) \method{seasadj}{decomposed.ts}(object, ...) \method{seasadj}{tbats}(object, ...) \method{seasadj}{seas}(object, ...) } \arguments{ \item{object}{Object created by \code{\link[stats]{decompose}}, \code{\link[stats]{stl}} or \code{\link{tbats}}.} \item{...}{Other arguments not currently used.} } \value{ Univariate time series. } \description{ Returns seasonally adjusted data constructed by removing the seasonal component. } \examples{ plot(AirPassengers) lines(seasadj(decompose(AirPassengers,"multiplicative")),col=4) } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link{tbats}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/CV.Rd0000644000176200001440000000135314150370574013442 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{CV} \alias{CV} \title{Cross-validation statistic} \usage{ CV(obj) } \arguments{ \item{obj}{output from \code{\link[stats]{lm}} or \code{\link{tslm}}} } \value{ Numerical vector containing CV, AIC, AICc, BIC and AdjR2 values. } \description{ Computes the leave-one-out cross-validation statistic (the mean of PRESS -- prediction residual sum of squares), AIC, corrected AIC, BIC and adjusted R^2 values for a linear model. } \examples{ y <- ts(rnorm(120,0,3) + 20*sin(2*pi*(1:120)/12), frequency=12) fit1 <- tslm(y ~ trend + season) fit2 <- tslm(y ~ season) CV(fit1) CV(fit2) } \seealso{ \code{\link[stats]{AIC}} } \author{ Rob J Hyndman } \keyword{models} forecast/man/geom_forecast.Rd0000644000176200001440000001062114634700407015744 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \docType{data} \name{StatForecast} \alias{StatForecast} \alias{GeomForecast} \alias{geom_forecast} \title{Forecast plot} \format{ An object of class \code{StatForecast} (inherits from \code{Stat}, \code{ggproto}, \code{gg}) of length 3. An object of class \code{GeomForecast} (inherits from \code{Geom}, \code{ggproto}, \code{gg}) of length 7. } \usage{ StatForecast GeomForecast geom_forecast( mapping = NULL, data = NULL, stat = "forecast", position = "identity", na.rm = FALSE, show.legend = NA, inherit.aes = TRUE, PI = TRUE, showgap = TRUE, series = NULL, ... ) } \arguments{ \item{mapping}{Set of aesthetic mappings created by \code{\link[ggplot2:aes]{aes()}}. If specified and \code{inherit.aes = TRUE} (the default), it is combined with the default mapping at the top level of the plot. You must supply \code{mapping} if there is no plot mapping.} \item{data}{The data to be displayed in this layer. There are three options: If \code{NULL}, the default, the data is inherited from the plot data as specified in the call to \code{\link[ggplot2]{ggplot}}. A \code{data.frame}, or other object, will override the plot data. All objects will be fortified to produce a data frame. See \code{\link[ggplot2]{fortify}} for which variables will be created. A \code{function} will be called with a single argument, the plot data. The return value must be a \code{data.frame}, and will be used as the layer data.} \item{stat}{The stat object to use calculate the data.} \item{position}{Position adjustment, either as a string, or the result of a call to a position adjustment function.} \item{na.rm}{If \code{FALSE} (the default), removes missing values with a warning. If \code{TRUE} silently removes missing values.} \item{show.legend}{logical. Should this layer be included in the legends? \code{NA}, the default, includes if any aesthetics are mapped. \code{FALSE} never includes, and \code{TRUE} always includes.} \item{inherit.aes}{If \code{FALSE}, overrides the default aesthetics, rather than combining with them. This is most useful for helper functions that define both data and aesthetics and shouldn't inherit behaviour from the default plot specification, e.g. \code{\link[ggplot2]{borders}}.} \item{PI}{If \code{FALSE}, confidence intervals will not be plotted, giving only the forecast line.} \item{showgap}{If \code{showgap=FALSE}, the gap between the historical observations and the forecasts is removed.} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{...}{Additional arguments for \code{\link{forecast.ts}}, other arguments are passed on to \code{\link[ggplot2]{layer}}. These are often aesthetics, used to set an aesthetic to a fixed value, like \code{color = "red"} or \code{alpha = .5}. They may also be parameters to the paired geom/stat.} } \value{ A layer for a ggplot graph. } \description{ Generates forecasts from \code{forecast.ts} and adds them to the plot. Forecasts can be modified via sending forecast specific arguments above. } \details{ Multivariate forecasting is supported by having each time series on a different group. You can also pass \code{geom_forecast} a \code{forecast} object to add it to the plot. The aesthetics required for the forecasting to work includes forecast observations on the y axis, and the \code{time} of the observations on the x axis. Refer to the examples below. To automatically set up aesthetics, use \code{autoplot}. } \examples{ \dontrun{ library(ggplot2) autoplot(USAccDeaths) + geom_forecast() lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) + geom_forecast() # Using fortify.ts p <- ggplot(aes(x=x, y=y), data=USAccDeaths) p <- p + geom_line() p + geom_forecast() # Without fortify.ts data <- data.frame(USAccDeaths=as.numeric(USAccDeaths), time=as.numeric(time(USAccDeaths))) p <- ggplot(aes(x=time, y=USAccDeaths), data=data) p <- p + geom_line() p + geom_forecast() p + geom_forecast(h=60) p <- ggplot(aes(x=time, y=USAccDeaths), data=data) p + geom_forecast(level=c(70,98)) p + geom_forecast(level=c(70,98),colour="lightblue") #Add forecasts to multivariate series with colour groups lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) + geom_forecast(forecast(mdeaths), series="mdeaths") } } \seealso{ \code{\link[generics]{forecast}}, \code{\link[ggplot2]{ggproto}} } \author{ Mitchell O'Hara-Wild } \keyword{datasets} forecast/man/autoplot.seas.Rd0000644000176200001440000000350714150370574015736 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/mstl.R \name{autoplot.decomposed.ts} \alias{autoplot.decomposed.ts} \alias{autoplot.stl} \alias{autoplot.StructTS} \alias{autoplot.seas} \alias{autoplot.mstl} \title{Plot time series decomposition components using ggplot} \usage{ \method{autoplot}{decomposed.ts}(object, labels = NULL, range.bars = NULL, ...) \method{autoplot}{stl}(object, labels = NULL, range.bars = TRUE, ...) \method{autoplot}{StructTS}(object, labels = NULL, range.bars = TRUE, ...) \method{autoplot}{seas}(object, labels = NULL, range.bars = NULL, ...) \method{autoplot}{mstl}(object, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{seas}}, \dQuote{\code{stl}}, or \dQuote{\code{decomposed.ts}}.} \item{labels}{Labels to replace \dQuote{seasonal}, \dQuote{trend}, and \dQuote{remainder}.} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If \code{NULL}, automatic selection takes place.} \item{...}{Other plotting parameters to affect the plot.} } \value{ Returns an object of class \code{ggplot}. } \description{ Produces a ggplot object of seasonally decomposed time series for objects of class \dQuote{\code{stl}} (created with \code{\link[stats]{stl}}), class \dQuote{\code{seas}} (created with \code{\link[seasonal]{seas}}), or class \dQuote{\code{decomposed.ts}} (created with \code{\link[stats]{decompose}}). } \examples{ library(ggplot2) co2 \%>\% decompose() \%>\% autoplot() nottem \%>\% stl(s.window = "periodic") \%>\% autoplot() \dontrun{ library(seasonal) seas(USAccDeaths) \%>\% autoplot() } } \seealso{ \code{\link[seasonal]{seas}}, \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link[stats]{StructTS}}, \code{\link[stats]{plot.stl}}. } \author{ Mitchell O'Hara-Wild } forecast/man/plot.mforecast.Rd0000644000176200001440000000423414150370574016073 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/mforecast.R \name{autoplot.mforecast} \alias{autoplot.mforecast} \alias{autolayer.mforecast} \alias{plot.mforecast} \title{Multivariate forecast plot} \usage{ \method{autoplot}{mforecast}(object, PI = TRUE, facets = TRUE, colour = FALSE, ...) \method{autolayer}{mforecast}(object, series = NULL, PI = TRUE, ...) \method{plot}{mforecast}(x, main = paste("Forecasts from", unique(x$method)), xlab = "time", ...) } \arguments{ \item{object}{Multivariate forecast object of class \code{mforecast}. Used for ggplot graphics (S3 method consistency).} \item{PI}{If \code{FALSE}, confidence intervals will not be plotted, giving only the forecast line.} \item{facets}{If TRUE, multiple time series will be faceted. If FALSE, each series will be assigned a colour.} \item{colour}{If TRUE, the time series will be assigned a colour aesthetic} \item{\dots}{additional arguments to each individual \code{plot}.} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{x}{Multivariate forecast object of class \code{mforecast}.} \item{main}{Main title. Default is the forecast method. For autoplot, specify a vector of titles for each plot.} \item{xlab}{X-axis label. For autoplot, specify a vector of labels for each plot.} } \description{ Plots historical data with multivariate forecasts and prediction intervals. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ library(ggplot2) lungDeaths <- cbind(mdeaths, fdeaths) fit <- tslm(lungDeaths ~ trend + season) fcast <- forecast(fit, h=10) plot(fcast) autoplot(fcast) carPower <- as.matrix(mtcars[,c("qsec","hp")]) carmpg <- mtcars[,"mpg"] fit <- lm(carPower ~ carmpg) fcast <- forecast(fit, newdata=data.frame(carmpg=30)) plot(fcast, xlab="Year") autoplot(fcast, xlab=rep("Year",2)) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[forecast]{plot.forecast}}, \code{\link[stats]{plot.ts}} } \author{ Mitchell O'Hara-Wild } \keyword{ts} forecast/man/tsdisplay.Rd0000644000176200001440000000465114150370574015152 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/graph.R \name{ggtsdisplay} \alias{ggtsdisplay} \alias{tsdisplay} \title{Time series display} \usage{ ggtsdisplay( x, plot.type = c("partial", "histogram", "scatter", "spectrum"), points = TRUE, smooth = FALSE, lag.max, na.action = na.contiguous, theme = NULL, ... ) tsdisplay( x, plot.type = c("partial", "histogram", "scatter", "spectrum"), points = TRUE, ci.type = c("white", "ma"), lag.max, na.action = na.contiguous, main = NULL, xlab = "", ylab = "", pch = 1, cex = 0.5, ... ) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{plot.type}{type of plot to include in lower right corner.} \item{points}{logical flag indicating whether to show the individual points or not in the time plot.} \item{smooth}{logical flag indicating whether to show a smooth loess curve superimposed on the time plot.} \item{lag.max}{the maximum lag to plot for the acf and pacf. A suitable value is selected by default if the argument is missing.} \item{na.action}{function to handle missing values in acf, pacf and spectrum calculations. The default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{theme}{Adds a ggplot element to each plot, typically a theme.} \item{\dots}{additional arguments to \code{\link[stats]{acf}}.} \item{ci.type}{type of confidence limits for ACF that is passed to \code{\link[stats]{acf}}. Should the confidence limits assume a white noise input or for lag \eqn{k} an MA(\eqn{k-1}) input?} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{pch}{Plotting character.} \item{cex}{Character size.} } \value{ None. } \description{ Plots a time series along with its acf and either its pacf, lagged scatterplot or spectrum. } \details{ \code{ggtsdisplay} will produce the equivalent plot using ggplot graphics. } \examples{ library(ggplot2) ggtsdisplay(USAccDeaths, plot.type="scatter", theme=theme_bw()) tsdisplay(diff(WWWusage)) ggtsdisplay(USAccDeaths, plot.type="scatter") } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[stats]{plot.ts}}, \code{\link{Acf}}, \code{\link[stats]{spec.ar}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.nnetar.Rd0000644000176200001440000001023414150370574016224 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nnetar.R \name{forecast.nnetar} \alias{forecast.nnetar} \title{Forecasting using neural network models} \usage{ \method{forecast}{nnetar}( object, h = ifelse(object$m > 1, 2 * object$m, 10), PI = FALSE, level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 1000, innov = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{nnetar}" resulting from a call to \code{\link{nnetar}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{bootstrap} and \code{npaths} are all ignored.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{xreg}{Future values of external regressor variables.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulations with resampled residuals rather than normally distributed errors. Ignored if \code{innov} is not \code{NULL}.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{innov}{Values to use as innovations for prediction intervals. Must be a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced into a matrix). If present, \code{bootstrap} is ignored.} \item{...}{Additional arguments passed to \code{\link{simulate.nnetar}}} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.nnetar}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Returns forecasts and other information for univariate neural network models. } \details{ Prediction intervals are calculated through simulations and can be slow. Note that if the network is too complex and overfits the data, the residuals can be arbitrarily small; if used for prediction interval calculations, they could lead to misleadingly small values. It is possible to use out-of-sample residuals to ameliorate this, see examples. } \examples{ ## Fit & forecast model fit <- nnetar(USAccDeaths, size=2) fcast <- forecast(fit, h=20) plot(fcast) \dontrun{ ## Include prediction intervals in forecast fcast2 <- forecast(fit, h=20, PI=TRUE, npaths=100) plot(fcast2) ## Set up out-of-sample innovations using cross-validation fit_cv <- CVar(USAccDeaths, size=2) res_sd <- sd(fit_cv$residuals, na.rm=TRUE) myinnovs <- rnorm(20*100, mean=0, sd=res_sd) ## Forecast using new innovations fcast3 <- forecast(fit, h=20, PI=TRUE, npaths=100, innov=myinnovs) plot(fcast3) } } \seealso{ \code{\link{nnetar}}. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/modeldf.Rd0000644000176200001440000000052714633662406014552 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/checkresiduals.R \name{modeldf} \alias{modeldf} \title{Compute model degrees of freedom} \usage{ modeldf(object, ...) } \arguments{ \item{object}{A time series model} \item{...}{Other arguments currently ignored} } \description{ Compute model degrees of freedom } forecast/man/tbats.components.Rd0000644000176200001440000000236214150370574016434 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tbats.R \name{tbats.components} \alias{tbats.components} \title{Extract components of a TBATS model} \usage{ tbats.components(x) } \arguments{ \item{x}{A tbats object created by \code{\link{tbats}}.} } \value{ A multiple time series (\code{mts}) object. The first series is the observed time series. The second series is the trend component of the fitted model. Series three onwards are the seasonal components of the fitted model with one time series for each of the seasonal components. All components are transformed using estimated Box-Cox parameter. } \description{ Extract the level, slope and seasonal components of a TBATS model. The extracted components are Box-Cox transformed using the estimated transformation parameter. } \examples{ \dontrun{ fit <- tbats(USAccDeaths, use.parallel=FALSE) components <- tbats.components(fit) plot(components)} } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{tbats}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/ses.Rd0000644000176200001440000001120014150370574013714 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/HoltWintersNew.R \name{ses} \alias{ses} \alias{holt} \alias{hw} \title{Exponential smoothing forecasts} \usage{ ses( y, h = 10, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), alpha = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) holt( y, h = 10, damped = FALSE, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), exponential = FALSE, alpha = NULL, beta = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) hw( y, h = 2 * frequency(x), seasonal = c("additive", "multiplicative"), damped = FALSE, level = c(80, 95), fan = FALSE, initial = c("optimal", "simple"), exponential = FALSE, alpha = NULL, beta = NULL, gamma = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{initial}{Method used for selecting initial state values. If \code{optimal}, the initial values are optimized along with the smoothing parameters using \code{\link{ets}}. If \code{simple}, the initial values are set to values obtained using simple calculations on the first few observations. See Hyndman & Athanasopoulos (2014) for details.} \item{alpha}{Value of smoothing parameter for the level. If \code{NULL}, it will be estimated.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Other arguments passed to \code{forecast.ets}.} \item{damped}{If TRUE, use a damped trend.} \item{exponential}{If TRUE, an exponential trend is fitted. Otherwise, the trend is (locally) linear.} \item{beta}{Value of smoothing parameter for the trend. If \code{NULL}, it will be estimated.} \item{phi}{Value of damping parameter if \code{damped=TRUE}. If \code{NULL}, it will be estimated.} \item{seasonal}{Type of seasonality in \code{hw} model. "additive" or "multiplicative"} \item{gamma}{Value of smoothing parameter for the seasonal component. If \code{NULL}, it will be estimated.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{ets} and associated functions. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for exponential smoothing forecasts applied to \code{y}. } \details{ ses, holt and hw are simply convenient wrapper functions for \code{forecast(ets(...))}. } \examples{ fcast <- holt(airmiles) plot(fcast) deaths.fcast <- hw(USAccDeaths,h=48) plot(deaths.fcast) } \references{ Hyndman, R.J., Koehler, A.B., Ord, J.K., Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag: New York. \url{http://www.exponentialsmoothing.net}. Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link{ets}}, \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, \code{\link[stats]{arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/monthdays.Rd0000644000176200001440000000134614150370574015142 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{monthdays} \alias{monthdays} \title{Number of days in each season} \usage{ monthdays(x) } \arguments{ \item{x}{time series} } \value{ Time series } \description{ Returns number of days in each month or quarter of the observed time period. } \details{ Useful for month length adjustments } \examples{ par(mfrow=c(2,1)) plot(ldeaths,xlab="Year",ylab="pounds", main="Monthly deaths from lung disease (UK)") ldeaths.adj <- ldeaths/monthdays(ldeaths)*365.25/12 plot(ldeaths.adj,xlab="Year",ylab="pounds", main="Adjusted monthly deaths from lung disease (UK)") } \seealso{ \code{\link[forecast]{bizdays}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/fourier.Rd0000644000176200001440000000442314150370574014606 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{fourier} \alias{fourier} \alias{fourierf} \title{Fourier terms for modelling seasonality} \usage{ fourier(x, K, h = NULL) fourierf(x, K, h) } \arguments{ \item{x}{Seasonal time series: a \code{ts} or a \code{msts} object} \item{K}{Maximum order(s) of Fourier terms} \item{h}{Number of periods ahead to forecast (optional)} } \value{ Numerical matrix. } \description{ \code{fourier} returns a matrix containing terms from a Fourier series, up to order \code{K}, suitable for use in \code{\link{Arima}}, \code{\link{auto.arima}}, or \code{\link{tslm}}. } \details{ \code{fourierf} is deprecated, instead use the \code{h} argument in \code{fourier}. The period of the Fourier terms is determined from the time series characteristics of \code{x}. When \code{h} is missing, the length of \code{x} also determines the number of rows for the matrix returned by \code{fourier}. Otherwise, the value of \code{h} determines the number of rows for the matrix returned by \code{fourier}, typically used for forecasting. The values within \code{x} are not used. Typical use would omit \code{h} when generating Fourier terms for training a model and include \code{h} when generating Fourier terms for forecasting. When \code{x} is a \code{ts} object, the value of \code{K} should be an integer and specifies the number of sine and cosine terms to return. Thus, the matrix returned has \code{2*K} columns. When \code{x} is a \code{msts} object, then \code{K} should be a vector of integers specifying the number of sine and cosine terms for each of the seasonal periods. Then the matrix returned will have \code{2*sum(K)} columns. } \examples{ library(ggplot2) # Using Fourier series for a "ts" object # K is chosen to minimize the AICc deaths.model <- auto.arima(USAccDeaths, xreg=fourier(USAccDeaths,K=5), seasonal=FALSE) deaths.fcast <- forecast(deaths.model, xreg=fourier(USAccDeaths, K=5, h=36)) autoplot(deaths.fcast) + xlab("Year") # Using Fourier series for a "msts" object taylor.lm <- tslm(taylor ~ fourier(taylor, K = c(3, 3))) taylor.fcast <- forecast(taylor.lm, data.frame(fourier(taylor, K = c(3, 3), h = 270))) autoplot(taylor.fcast) } \seealso{ \code{\link{seasonaldummy}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/findfrequency.Rd0000644000176200001440000000225214207263356015775 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/findfrequency.R \name{findfrequency} \alias{findfrequency} \title{Find dominant frequency of a time series} \usage{ findfrequency(x) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}} } \value{ an integer value } \description{ \code{findfrequency} returns the period of the dominant frequency of a time series. For seasonal data, it will return the seasonal period. For cyclic data, it will return the average cycle length. } \details{ The dominant frequency is determined from a spectral analysis of the time series. First, a linear trend is removed, then the spectral density function is estimated from the best fitting autoregressive model (based on the AIC). If there is a large (possibly local) maximum in the spectral density function at frequency \eqn{f}, then the function will return the period \eqn{1/f} (rounded to the nearest integer). If no such dominant frequency can be found, the function will return 1. } \examples{ findfrequency(USAccDeaths) # Monthly data findfrequency(taylor) # Half-hourly data findfrequency(lynx) # Annual data } \author{ Rob J Hyndman } \keyword{ts} forecast/man/seasonplot.Rd0000644000176200001440000000411514150370574015320 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R, R/graph.R \name{ggseasonplot} \alias{ggseasonplot} \alias{seasonplot} \title{Seasonal plot} \usage{ ggseasonplot( x, season.labels = NULL, year.labels = FALSE, year.labels.left = FALSE, type = NULL, col = NULL, continuous = FALSE, polar = FALSE, labelgap = 0.04, ... ) seasonplot( x, s, season.labels = NULL, year.labels = FALSE, year.labels.left = FALSE, type = "o", main, xlab = NULL, ylab = "", col = 1, labelgap = 0.1, ... ) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{season.labels}{Labels for each season in the "year"} \item{year.labels}{Logical flag indicating whether labels for each year of data should be plotted on the right.} \item{year.labels.left}{Logical flag indicating whether labels for each year of data should be plotted on the left.} \item{type}{plot type (as for \code{\link[graphics]{plot}}). Not yet supported for ggseasonplot.} \item{col}{Colour} \item{continuous}{Should the colour scheme for years be continuous or discrete?} \item{polar}{Plot the graph on seasonal coordinates} \item{labelgap}{Distance between year labels and plotted lines} \item{\dots}{additional arguments to \code{\link[graphics]{plot}}.} \item{s}{seasonal frequency of x} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} } \value{ None. } \description{ Plots a seasonal plot as described in Hyndman and Athanasopoulos (2014, chapter 2). This is like a time plot except that the data are plotted against the seasons in separate years. } \examples{ ggseasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) ggseasonplot(AirPassengers, year.labels=TRUE, continuous=TRUE) seasonplot(AirPassengers, col=rainbow(12), year.labels=TRUE) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[stats]{monthplot}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/forecast.modelAR.Rd0000644000176200001440000000705214150370574016264 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/modelAR.R \name{forecast.modelAR} \alias{forecast.modelAR} \title{Forecasting using user-defined model} \usage{ \method{forecast}{modelAR}( object, h = ifelse(object$m > 1, 2 * object$m, 10), PI = FALSE, level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 1000, innov = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{modelAR}" resulting from a call to \code{\link{modelAR}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{bootstrap} and \code{npaths} are all ignored.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{xreg}{Future values of external regressor variables.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulations with resampled residuals rather than normally distributed errors. Ignored if \code{innov} is not \code{NULL}.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{innov}{Values to use as innovations for prediction intervals. Must be a matrix with \code{h} rows and \code{npaths} columns (vectors are coerced into a matrix). If present, \code{bootstrap} is ignored.} \item{...}{Additional arguments passed to \code{\link{simulate.nnetar}}} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.nnetar}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Returns forecasts and other information for user-defined models. } \details{ Prediction intervals are calculated through simulations and can be slow. Note that if the model is too complex and overfits the data, the residuals can be arbitrarily small; if used for prediction interval calculations, they could lead to misleadingly small values. } \seealso{ \code{\link{nnetar}}. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/naive.Rd0000644000176200001440000000777214341272370014244 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/naive.R \name{rwf} \alias{rwf} \alias{naive} \alias{print.naive} \alias{snaive} \title{Naive and Random Walk Forecasts} \usage{ rwf( y, h = 10, drift = FALSE, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) naive( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) snaive( y, h = 2 * frequency(x), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, ..., x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{drift}{Logical flag. If TRUE, fits a random walk with drift model.} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Additional arguments affecting the forecasts produced. If \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or \code{\link{stlf}} depending on the frequency of the time series. If \code{model} is not \code{NULL}, the arguments are passed to the relevant modelling function.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{naive} or \code{snaive}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{rwf()} returns forecasts and prediction intervals for a random walk with drift model applied to \code{y}. This is equivalent to an ARIMA(0,1,0) model with an optional drift coefficient. \code{naive()} is simply a wrapper to \code{rwf()} for simplicity. \code{snaive()} returns forecasts and prediction intervals from an ARIMA(0,0,0)(0,1,0)m model where m is the seasonal period. } \details{ The random walk with drift model is \deqn{Y_t=c + Y_{t-1} + Z_t}{Y[t]=c + Y[t-1] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are given by \deqn{Y_n(h)=ch+Y_n}{Y[n+h]=ch+Y[n]}. If there is no drift (as in \code{naive}), the drift parameter c=0. Forecast standard errors allow for uncertainty in estimating the drift parameter (unlike the corresponding forecasts obtained by fitting an ARIMA model directly). The seasonal naive model is \deqn{Y_t= Y_{t-m} + Z_t}{Y[t]=Y[t-m] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. } \examples{ gold.fcast <- rwf(gold[1:60], h=50) plot(gold.fcast) plot(naive(gold,h=50),include=200) plot(snaive(wineind)) } \seealso{ \code{\link{Arima}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/meanf.Rd0000644000176200001440000000552114150370574014221 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{meanf} \alias{meanf} \title{Mean Forecast} \usage{ meanf( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, bootstrap = FALSE, npaths = 5000, x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{bootstrap}{If TRUE, use a bootstrap method to compute prediction intervals. Otherwise, assume a normal distribution.} \item{npaths}{Number of bootstrapped sample paths to use if \code{bootstrap==TRUE}.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{meanf}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and prediction intervals for an iid model applied to y. } \details{ The iid model is \deqn{Y_t=\mu + Z_t}{Y[t]=mu + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are given by \deqn{Y_n(h)=\mu}{Y[n+h]=mu} where \eqn{\mu}{mu} is estimated by the sample mean. } \examples{ nile.fcast <- meanf(Nile, h=10) plot(nile.fcast) } \seealso{ \code{\link{rwf}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.Arima.Rd0000644000176200001440000001064414150370574015773 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R, R/arima.R \name{forecast.fracdiff} \alias{forecast.fracdiff} \alias{forecast.Arima} \alias{forecast.forecast_ARIMA} \alias{forecast.ar} \title{Forecasting using ARIMA or ARFIMA models} \usage{ \method{forecast}{fracdiff}( object, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ... ) \method{forecast}{Arima}( object, h = ifelse(object$arma[5] > 1, 2 * object$arma[5], 10), level = c(80, 95), fan = FALSE, xreg = NULL, lambda = object$lambda, bootstrap = FALSE, npaths = 5000, biasadj = NULL, ... ) \method{forecast}{ar}( object, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, bootstrap = FALSE, npaths = 5000, biasadj = FALSE, ... ) } \arguments{ \item{object}{An object of class "\code{Arima}", "\code{ar}" or "\code{fracdiff}". Usually the result of a call to \code{\link[stats]{arima}}, \code{\link{auto.arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}} or \code{\link[fracdiff]{fracdiff}}.} \item{h}{Number of periods for forecasting. If \code{xreg} is used, \code{h} is ignored and the number of forecast periods is set to the number of rows of \code{xreg}.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} \item{xreg}{Future values of an regression variables (for class \code{Arima} objects only). A numerical vector or matrix of external regressors; it should not be a data frame.} \item{bootstrap}{If \code{TRUE}, then prediction intervals computed using simulation with resampled errors.} \item{npaths}{Number of sample paths used in computing simulated prediction intervals when \code{bootstrap=TRUE}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.Arima}. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate ARIMA models. } \details{ For \code{Arima} or \code{ar} objects, the function calls \code{\link[stats]{predict.Arima}} or \code{\link[stats]{predict.ar}} and constructs an object of class "\code{forecast}" from the results. For \code{fracdiff} objects, the calculations are all done within \code{\link{forecast.fracdiff}} using the equations given by Peiris and Perera (1988). } \examples{ fit <- Arima(WWWusage,c(3,1,0)) plot(forecast(fit)) library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series fit <- arfima(x) plot(forecast(fit,h=30)) } \references{ Peiris, M. & Perera, B. (1988), On prediction with fractionally differenced ARIMA models, \emph{Journal of Time Series Analysis}, \bold{9}(3), 215-220. } \seealso{ \code{\link[stats]{predict.Arima}}, \code{\link[stats]{predict.ar}}, \code{\link{auto.arima}}, \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/is.forecast.Rd0000644000176200001440000000066314150370574015355 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R, R/mforecast.R, R/spline.R \name{is.forecast} \alias{is.forecast} \alias{is.mforecast} \alias{is.splineforecast} \title{Is an object a particular forecast type?} \usage{ is.forecast(x) is.mforecast(x) is.splineforecast(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the forecast object is of a particular type } forecast/man/msts.Rd0000644000176200001440000000276414150370574014127 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/msts.R \name{msts} \alias{msts} \alias{print.msts} \alias{window.msts} \alias{`[.msts`} \title{Multi-Seasonal Time Series} \usage{ msts(data, seasonal.periods, ts.frequency = floor(max(seasonal.periods)), ...) } \arguments{ \item{data}{A numeric vector, ts object, matrix or data frame. It is intended that the time series data is univariate, otherwise treated the same as ts().} \item{seasonal.periods}{A vector of the seasonal periods of the msts.} \item{ts.frequency}{The seasonal period that should be used as frequency of the underlying ts object. The default value is \code{max(seasonal.periods)}.} \item{...}{Arguments to be passed to the underlying call to \code{ts()}. For example \code{start=c(1987,5)}.} } \value{ An object of class \code{c("msts", "ts")}. If there is only one seasonal period (i.e., \code{length(seasonal.periods)==1}), then the object is of class \code{"ts"}. } \description{ msts is an S3 class for multi seasonal time series objects, intended to be used for models that support multiple seasonal periods. The msts class inherits from the ts class and has an additional "msts" attribute which contains the vector of seasonal periods. All methods that work on a ts class, should also work on a msts class. } \examples{ x <- msts(taylor, seasonal.periods=c(2*24,2*24*7,2*24*365), start=2000+22/52) y <- msts(USAccDeaths, seasonal.periods=12, start=1949) } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/residuals.forecast.Rd0000644000176200001440000000610714150370574016734 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/residuals.R \name{residuals.forecast} \alias{residuals.forecast} \alias{residuals.ar} \alias{residuals.Arima} \alias{residuals.forecast_ARIMA} \alias{residuals.bats} \alias{residuals.tbats} \alias{residuals.ets} \alias{residuals.ARFIMA} \alias{residuals.nnetar} \alias{residuals.stlm} \alias{residuals.tslm} \title{Residuals for various time series models} \usage{ \method{residuals}{forecast}(object, type = c("innovation", "response"), ...) \method{residuals}{ar}(object, type = c("innovation", "response"), ...) \method{residuals}{Arima}(object, type = c("innovation", "response", "regression"), h = 1, ...) \method{residuals}{bats}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{tbats}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{ets}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{ARFIMA}(object, type = c("innovation", "response"), ...) \method{residuals}{nnetar}(object, type = c("innovation", "response"), h = 1, ...) \method{residuals}{stlm}(object, type = c("innovation", "response"), ...) \method{residuals}{tslm}(object, type = c("innovation", "response", "deviance"), ...) } \arguments{ \item{object}{An object containing a time series model of class \code{ar}, \code{Arima}, \code{bats}, \code{ets}, \code{arfima}, \code{nnetar} or \code{stlm}. If \code{object} is of class \code{forecast}, then the function will return \code{object$residuals} if it exists, otherwise it returns the differences between the observations and their fitted values.} \item{type}{Type of residual.} \item{...}{Other arguments not used.} \item{h}{If \code{type='response'}, then the fitted values are computed for \code{h}-step forecasts.} } \value{ A \code{ts} object. } \description{ Returns time series of residuals from a fitted model. } \details{ Innovation residuals correspond to the white noise process that drives the evolution of the time series model. Response residuals are the difference between the observations and the fitted values (equivalent to \code{h}-step forecasts). For functions with no \code{h} argument, \code{h=1}. For homoscedastic models, the innovation residuals and the response residuals for \code{h=1} are identical. Regression residuals are available for regression models with ARIMA errors, and are equal to the original data minus the effect of the regression variables. If there are no regression variables, the errors will be identical to the original series (possibly adjusted to have zero mean). \code{arima.errors} is a deprecated function which is identical to \code{residuals.Arima(object, type="regression")}. For \code{nnetar} objects, when \code{type="innovations"} and \code{lambda} is used, a matrix of time-series consisting of the residuals from each of the fitted neural networks is returned. } \examples{ fit <- Arima(lynx,order=c(4,0,0), lambda=0.5) plot(residuals(fit)) plot(residuals(fit, type='response')) } \seealso{ \code{\link{fitted.Arima}}, \code{\link{checkresiduals}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/plot.ets.Rd0000644000176200001440000000207514150370574014704 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ets.R, R/ggplot.R \name{plot.ets} \alias{plot.ets} \alias{autoplot.ets} \title{Plot components from ETS model} \usage{ \method{plot}{ets}(x, ...) \method{autoplot}{ets}(object, range.bars = NULL, ...) } \arguments{ \item{x}{Object of class \dQuote{ets}.} \item{...}{Other plotting parameters to affect the plot.} \item{object}{Object of class \dQuote{ets}. Used for ggplot graphics (S3 method consistency).} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If NULL, automatic selection takes place.} } \value{ None. Function produces a plot } \description{ Produces a plot of the level, slope and seasonal components from an ETS model. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ fit <- ets(USAccDeaths) plot(fit) plot(fit,plot.type="single",ylab="",col=1:3) library(ggplot2) autoplot(fit) } \seealso{ \code{\link{ets}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{hplot} forecast/man/tslm.Rd0000644000176200001440000000433114150370574014110 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{tslm} \alias{tslm} \title{Fit a linear model with time series components} \usage{ tslm(formula, data, subset, lambda = NULL, biasadj = FALSE, ...) } \arguments{ \item{formula}{an object of class "formula" (or one that can be coerced to that class): a symbolic description of the model to be fitted.} \item{data}{an optional data frame, list or environment (or object coercible by as.data.frame to a data frame) containing the variables in the model. If not found in data, the variables are taken from environment(formula), typically the environment from which lm is called.} \item{subset}{an optional subset containing rows of data to keep. For best results, pass a logical vector of rows to keep. Also supports \code{\link[base]{subset}()} functions.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments passed to \code{\link[stats]{lm}()}} } \value{ Returns an object of class "lm". } \description{ \code{tslm} is used to fit linear models to time series including trend and seasonality components. } \details{ \code{tslm} is largely a wrapper for \code{\link[stats]{lm}()} except that it allows variables "trend" and "season" which are created on the fly from the time series characteristics of the data. The variable "trend" is a simple time trend and "season" is a factor indicating the season (e.g., the month or the quarter depending on the frequency of the data). } \examples{ y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) fit <- tslm(y ~ trend + season) plot(forecast(fit, h=20)) } \seealso{ \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. } \author{ Mitchell O'Hara-Wild and Rob J Hyndman } \keyword{stats} forecast/man/taylor.Rd0000644000176200001440000000125114150370574014441 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{taylor} \alias{taylor} \title{Half-hourly electricity demand} \format{ Time series data } \source{ James W Taylor } \usage{ taylor } \description{ Half-hourly electricity demand in England and Wales from Monday 5 June 2000 to Sunday 27 August 2000. Discussed in Taylor (2003), and kindly provided by James W Taylor. Units: Megawatts } \examples{ plot(taylor) } \references{ Taylor, J.W. (2003) Short-term electricity demand forecasting using double seasonal exponential smoothing. \emph{Journal of the Operational Research Society}, \bold{54}, 799-805. } \keyword{datasets} forecast/man/na.interp.Rd0000644000176200001440000000217514150370574015033 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{na.interp} \alias{na.interp} \title{Interpolate missing values in a time series} \usage{ na.interp( x, lambda = NULL, linear = (frequency(x) <= 1 | sum(!is.na(x)) <= 2 * frequency(x)) ) } \arguments{ \item{x}{time series} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{linear}{Should a linear interpolation be used.} } \value{ Time series } \description{ By default, uses linear interpolation for non-seasonal series. For seasonal series, a robust STL decomposition is first computed. Then a linear interpolation is applied to the seasonally adjusted data, and the seasonal component is added back. } \details{ A more general and flexible approach is available using \code{na.approx} in the \code{zoo} package. } \examples{ data(gold) plot(na.interp(gold)) } \seealso{ \code{\link[forecast]{tsoutliers}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.ts.Rd0000644000176200001440000001144714207263356015374 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R \name{forecast.ts} \alias{forecast.ts} \alias{print.forecast} \alias{summary.forecast} \alias{as.data.frame.forecast} \alias{as.ts.forecast} \alias{forecast.default} \title{Forecasting time series} \usage{ \method{forecast}{ts}( object, h = ifelse(frequency(object) > 1, 2 * frequency(object), 10), level = c(80, 95), fan = FALSE, robust = FALSE, lambda = NULL, biasadj = FALSE, find.frequency = FALSE, allow.multiplicative.trend = FALSE, model = NULL, ... ) \method{forecast}{default}(object, ...) \method{print}{forecast}(x, ...) } \arguments{ \item{object}{a time series or time series model for which forecasts are required} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, \code{level} is set to \code{seq(51,99,by=3)}. This is suitable for fan plots.} \item{robust}{If TRUE, the function is robust to missing values and outliers in \code{object}. This argument is only valid when \code{object} is of class \code{ts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{find.frequency}{If TRUE, the function determines the appropriate period, if the data is of unknown period.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{model}{An object describing a time series model; e.g., one of of class \code{ets}, \code{Arima}, \code{bats}, \code{tbats}, or \code{nnetar}.} \item{...}{Additional arguments affecting the forecasts produced. If \code{model=NULL}, \code{forecast.ts} passes these to \code{\link{ets}} or \code{\link{stlf}} depending on the frequency of the time series. If \code{model} is not \code{NULL}, the arguments are passed to the relevant modelling function.} \item{x}{a numeric vector or time series of class \code{ts}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessors functions \code{fitted.values} and \code{residuals} extract various useful features of the value returned by \code{forecast$model}. An object of class \code{"forecast"} is a list usually containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals will be x minus the fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ \code{forecast} is a generic function for forecasting from time series or time series models. The function invokes particular \emph{methods} which depend on the class of the first argument. } \details{ For example, the function \code{\link{forecast.Arima}} makes forecasts based on the results produced by \code{\link[stats]{arima}}. If \code{model=NULL},the function \code{\link{forecast.ts}} makes forecasts using \code{\link{ets}} models (if the data are non-seasonal or the seasonal period is 12 or less) or \code{\link{stlf}} (if the seasonal period is 13 or more). If \code{model} is not \code{NULL}, \code{forecast.ts} will apply the \code{model} to the \code{object} time series, and then generate forecasts accordingly. } \examples{ WWWusage \%>\% forecast \%>\% plot fit <- ets(window(WWWusage, end=60)) fc <- forecast(WWWusage, model=fit) } \seealso{ Other functions which return objects of class \code{"forecast"} are \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}, \code{\link{forecast.HoltWinters}}, \code{\link{forecast.StructTS}}, \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{splinef}}, \code{\link{thetaf}}, \code{\link{croston}}, \code{\link{ses}}, \code{\link{holt}}, \code{\link{hw}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/arimaorder.Rd0000644000176200001440000000211114150370574015250 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{arimaorder} \alias{arimaorder} \title{Return the order of an ARIMA or ARFIMA model} \usage{ arimaorder(object) } \arguments{ \item{object}{An object of class \dQuote{\code{Arima}}, dQuote\code{ar} or \dQuote{\code{fracdiff}}. Usually the result of a call to \code{\link[stats]{arima}}, \code{\link{Arima}}, \code{\link{auto.arima}}, \code{\link[stats]{ar}}, \code{\link{arfima}} or \code{\link[fracdiff]{fracdiff}}.} } \value{ A numerical vector giving the values \eqn{p}, \eqn{d} and \eqn{q} of the ARIMA or ARFIMA model. For a seasonal ARIMA model, the returned vector contains the values \eqn{p}, \eqn{d}, \eqn{q}, \eqn{P}, \eqn{D}, \eqn{Q} and \eqn{m}, where \eqn{m} is the period of seasonality. } \description{ Returns the order of a univariate ARIMA or ARFIMA model. } \examples{ WWWusage \%>\% auto.arima \%>\% arimaorder } \seealso{ \code{\link[stats]{ar}}, \code{\link{auto.arima}}, \code{\link{Arima}}, \code{\link[stats]{arima}}, \code{\link{arfima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/autoplot.acf.Rd0000644000176200001440000000553614150370574015540 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autoplot.acf} \alias{autoplot.acf} \alias{ggAcf} \alias{ggPacf} \alias{ggCcf} \alias{autoplot.mpacf} \alias{ggtaperedacf} \alias{ggtaperedpacf} \title{ggplot (Partial) Autocorrelation and Cross-Correlation Function Estimation and Plotting} \usage{ \method{autoplot}{acf}(object, ci = 0.95, ...) ggAcf( x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) ggPacf( x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) ggCcf( x, y, lag.max = NULL, type = c("correlation", "covariance"), plot = TRUE, na.action = na.contiguous, ... ) \method{autoplot}{mpacf}(object, ...) ggtaperedacf( x, lag.max = NULL, type = c("correlation", "partial"), plot = TRUE, calc.ci = TRUE, level = 95, nsim = 100, ... ) ggtaperedpacf(x, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{acf}}.} \item{ci}{coverage probability for confidence interval. Plotting of the confidence interval is suppressed if ci is zero or negative.} \item{...}{Other plotting parameters to affect the plot.} \item{x}{a univariate or multivariate (not Ccf) numeric time series object or a numeric vector or matrix.} \item{lag.max}{maximum lag at which to calculate the acf.} \item{type}{character string giving the type of acf to be computed. Allowed values are "\code{correlation}" (the default), \dQuote{\code{covariance}} or \dQuote{\code{partial}}.} \item{plot}{logical. If \code{TRUE} (the default) the resulting ACF, PACF or CCF is plotted.} \item{na.action}{function to handle missing values. Default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{demean}{Should covariances be about the sample means?} \item{y}{a univariate numeric time series object or a numeric vector.} \item{calc.ci}{If \code{TRUE}, confidence intervals for the ACF/PACF estimates are calculated.} \item{level}{Percentage level used for the confidence intervals.} \item{nsim}{The number of bootstrap samples used in estimating the confidence intervals.} } \value{ A ggplot object. } \description{ Produces a ggplot object of their equivalent Acf, Pacf, Ccf, taperedacf and taperedpacf functions. } \details{ If \code{autoplot} is given an \code{acf} or \code{mpacf} object, then an appropriate ggplot object will be created. ggtaperedpacf } \examples{ library(ggplot2) ggAcf(wineind) wineind \%>\% Acf(plot=FALSE) \%>\% autoplot \dontrun{ wineind \%>\% taperedacf(plot=FALSE) \%>\% autoplot ggtaperedacf(wineind) ggtaperedpacf(wineind)} ggCcf(mdeaths, fdeaths) } \seealso{ \code{\link[stats]{plot.acf}}, \code{\link{Acf}}, \code{\link[stats]{acf}}, \code{\link{taperedacf}} } \author{ Mitchell O'Hara-Wild } forecast/man/forecast.baggedModel.Rd0000644000176200001440000000456614150370574017142 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/baggedModel.R \name{forecast.baggedModel} \alias{forecast.baggedModel} \title{Forecasting using a bagged model} \usage{ \method{forecast}{baggedModel}( object, h = ifelse(frequency(object$y) > 1, 2 * frequency(object$y), 10), ... ) } \arguments{ \item{object}{An object of class "\code{baggedModel}" resulting from a call to \code{\link{baggedModel}}.} \item{h}{Number of periods for forecasting.} \item{...}{Other arguments, passed on to the \code{\link{forecast}} function of the original method} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. An object of class "\code{forecast}" is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for bagged models. } \details{ Intervals are calculated as min and max values over the point forecasts from the models in the ensemble. I.e., the intervals are not prediction intervals, but give an indication of how different the forecasts within the ensemble are. } \examples{ fit <- baggedModel(WWWusage) fcast <- forecast(fit) plot(fcast) \dontrun{ fit2 <- baggedModel(WWWusage, fn="auto.arima") fcast2 <- forecast(fit2) plot(fcast2) accuracy(fcast2)} } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \seealso{ \code{\link{baggedModel}}. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/nsdiffs.Rd0000644000176200001440000000527514150370574014575 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{nsdiffs} \alias{nsdiffs} \title{Number of differences required for a seasonally stationary series} \usage{ nsdiffs( x, alpha = 0.05, m = frequency(x), test = c("seas", "ocsb", "hegy", "ch"), max.D = 1, ... ) } \arguments{ \item{x}{A univariate time series} \item{alpha}{Level of the test, possible values range from 0.01 to 0.1.} \item{m}{Deprecated. Length of seasonal period} \item{test}{Type of unit root test to use} \item{max.D}{Maximum number of seasonal differences allowed} \item{...}{Additional arguments to be passed on to the unit root test} } \value{ An integer indicating the number of differences required for stationarity. } \description{ Functions to estimate the number of differences required to make a given time series stationary. \code{nsdiffs} estimates the number of seasonal differences necessary. } \details{ \code{nsdiffs} uses seasonal unit root tests to determine the number of seasonal differences required for time series \code{x} to be made stationary (possibly with some lag-one differencing as well). Several different tests are available: \itemize{ \item If \code{test="seas"} (default), a measure of seasonal strength is used, where differencing is selected if the seasonal strength (Wang, Smith & Hyndman, 2006) exceeds 0.64 (based on minimizing MASE when forecasting using auto.arima on M3 and M4 data). \item If \code{test="ch"}, the Canova-Hansen (1995) test is used (with null hypothesis of deterministic seasonality) \item If \code{test="hegy"}, the Hylleberg, Engle, Granger & Yoo (1990) test is used. \item If \code{test="ocsb"}, the Osborn-Chui-Smith-Birchenhall (1988) test is used (with null hypothesis that a seasonal unit root exists). } } \examples{ nsdiffs(AirPassengers) } \references{ Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering for time series data", \emph{Data Mining and Knowledge Discovery}, \bold{13}(3), 335-364. Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", \emph{Oxford Bulletin of Economics and Statistics} \bold{50}(4):361-377. Canova F and Hansen BE (1995) "Are Seasonal Patterns Constant over Time? A Test for Seasonal Stability", \emph{Journal of Business and Economic Statistics} \bold{13}(3):237-252. Hylleberg S, Engle R, Granger C and Yoo B (1990) "Seasonal integration and cointegration.", \emph{Journal of Econometrics} \bold{44}(1), pp. 215-238. } \seealso{ \code{\link{auto.arima}}, \code{\link{ndiffs}}, \code{\link{ocsb.test}}, \code{\link[uroot]{hegy.test}}, and \code{\link[uroot]{ch.test}} } \author{ Rob J Hyndman, Slava Razbash and Mitchell O'Hara-Wild } forecast/man/Arima.Rd0000644000176200001440000001067214150370574014167 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{Arima} \alias{Arima} \alias{print.ARIMA} \alias{summary.Arima} \alias{as.character.Arima} \title{Fit ARIMA model to univariate time series} \usage{ Arima( y, order = c(0, 0, 0), seasonal = c(0, 0, 0), xreg = NULL, include.mean = TRUE, include.drift = FALSE, include.constant, lambda = model$lambda, biasadj = FALSE, method = c("CSS-ML", "ML", "CSS"), model = NULL, x = y, ... ) } \arguments{ \item{y}{a univariate time series of class \code{ts}.} \item{order}{A specification of the non-seasonal part of the ARIMA model: the three components (p, d, q) are the AR order, the degree of differencing, and the MA order.} \item{seasonal}{A specification of the seasonal part of the ARIMA model, plus the period (which defaults to frequency(y)). This should be a list with components order and period, but a specification of just a numeric vector of length 3 will be turned into a suitable list with the specification as the order.} \item{xreg}{Optionally, a numerical vector or matrix of external regressors, which must have the same number of rows as y. It should not be a data frame.} \item{include.mean}{Should the ARIMA model include a mean term? The default is \code{TRUE} for undifferenced series, \code{FALSE} for differenced ones (where a mean would not affect the fit nor predictions).} \item{include.drift}{Should the ARIMA model include a linear drift term? (i.e., a linear regression with ARIMA errors is fitted.) The default is \code{FALSE}.} \item{include.constant}{If \code{TRUE}, then \code{include.mean} is set to be \code{TRUE} for undifferenced series and \code{include.drift} is set to be \code{TRUE} for differenced series. Note that if there is more than one difference taken, no constant is included regardless of the value of this argument. This is deliberate as otherwise quadratic and higher order polynomial trends would be induced.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{method}{Fitting method: maximum likelihood or minimize conditional sum-of-squares. The default (unless there are missing values) is to use conditional-sum-of-squares to find starting values, then maximum likelihood.} \item{model}{Output from a previous call to \code{Arima}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Additional arguments to be passed to \code{\link[stats]{arima}}.} } \value{ See the \code{\link[stats]{arima}} function in the stats package. The additional objects returned are \item{x}{The time series data} \item{xreg}{The regressors used in fitting (when relevant).} \item{sigma2}{The bias adjusted MLE of the innovations variance.} } \description{ Largely a wrapper for the \code{\link[stats]{arima}} function in the stats package. The main difference is that this function allows a drift term. It is also possible to take an ARIMA model from a previous call to \code{Arima} and re-apply it to the data \code{y}. } \details{ See the \code{\link[stats]{arima}} function in the stats package. } \examples{ library(ggplot2) WWWusage \%>\% Arima(order=c(3,1,0)) \%>\% forecast(h=20) \%>\% autoplot # Fit model to first few years of AirPassengers data air.model <- Arima(window(AirPassengers,end=1956+11/12),order=c(0,1,1), seasonal=list(order=c(0,1,1),period=12),lambda=0) plot(forecast(air.model,h=48)) lines(AirPassengers) # Apply fitted model to later data air.model2 <- Arima(window(AirPassengers,start=1957),model=air.model) # Forecast accuracy measures on the log scale. # in-sample one-step forecasts. accuracy(air.model) # out-of-sample one-step forecasts. accuracy(air.model2) # out-of-sample multi-step forecasts accuracy(forecast(air.model,h=48,lambda=NULL), log(window(AirPassengers,start=1957))) } \seealso{ \code{\link{auto.arima}}, \code{\link{forecast.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/arima.errors.Rd0000644000176200001440000000153114150370574015534 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arima.R \name{arima.errors} \alias{arima.errors} \title{Errors from a regression model with ARIMA errors} \usage{ arima.errors(object) } \arguments{ \item{object}{An object containing a time series model of class \code{Arima}.} } \value{ A \code{ts} object } \description{ Returns time series of the regression residuals from a fitted ARIMA model. } \details{ This is a deprecated function which is identical to \code{\link{residuals.Arima}(object, type="regression")} Regression residuals are equal to the original data minus the effect of any regression variables. If there are no regression variables, the errors will be identical to the original series (possibly adjusted to have zero mean). } \seealso{ \code{\link{residuals.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.StructTS.Rd0000644000176200001440000000531514150370574016474 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{forecast.StructTS} \alias{forecast.StructTS} \title{Forecasting using Structural Time Series models} \usage{ \method{forecast}{StructTS}( object, h = ifelse(object$coef["epsilon"] > 1e-10, 2 * object$xtsp[3], 10), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{StructTS}". Usually the result of a call to \code{\link[stats]{StructTS}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.StructTS}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate structural time series models. } \details{ This function calls \code{predict.StructTS} and constructs an object of class "\code{forecast}" from the results. } \examples{ fit <- StructTS(WWWusage,"level") plot(forecast(fit)) } \seealso{ \code{\link[stats]{StructTS}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/ma.Rd0000644000176200001440000000262014150370574013525 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{ma} \alias{ma} \title{Moving-average smoothing} \usage{ ma(x, order, centre = TRUE) } \arguments{ \item{x}{Univariate time series} \item{order}{Order of moving average smoother} \item{centre}{If TRUE, then the moving average is centred for even orders.} } \value{ Numerical time series object containing the simple moving average smoothed values. } \description{ \code{ma} computes a simple moving average smoother of a given time series. } \details{ The moving average smoother averages the nearest \code{order} periods of each observation. As neighbouring observations of a time series are likely to be similar in value, averaging eliminates some of the randomness in the data, leaving a smooth trend-cycle component. \deqn{\hat{T}_{t} = \frac{1}{m} \sum_{j=-k}^k y_{t+j}}{T[t]=1/m(y[t-k]+y[t-k+1]+\ldots+y[t]+\ldots+y[t+k-1]+y[t+k])} where \eqn{k=\frac{m-1}{2}}{k=(m-1)/2} When an even \code{order} is specified, the observations averaged will include one more observation from the future than the past (k is rounded up). If centre is TRUE, the value from two moving averages (where k is rounded up and down respectively) are averaged, centering the moving average. } \examples{ plot(wineind) sm <- ma(wineind,order=12) lines(sm,col="red") } \seealso{ \code{\link[stats]{decompose}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gold.Rd0000644000176200001440000000052514150370574014057 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{gold} \alias{gold} \title{Daily morning gold prices} \format{ Time series data } \usage{ gold } \description{ Daily morning gold prices in US dollars. 1 January 1985 -- 31 March 1989. } \examples{ tsdisplay(gold) } \keyword{datasets} forecast/man/dm.test.Rd0000644000176200001440000000625514456202551014514 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/DM2.R \name{dm.test} \alias{dm.test} \title{Diebold-Mariano test for predictive accuracy} \usage{ dm.test( e1, e2, alternative = c("two.sided", "less", "greater"), h = 1, power = 2, varestimator = c("acf", "bartlett") ) } \arguments{ \item{e1}{Forecast errors from method 1.} \item{e2}{Forecast errors from method 2.} \item{alternative}{a character string specifying the alternative hypothesis, must be one of \code{"two.sided"} (default), \code{"greater"} or \code{"less"}. You can specify just the initial letter.} \item{h}{The forecast horizon used in calculating \code{e1} and \code{e2}.} \item{power}{The power used in the loss function. Usually 1 or 2.} \item{varestimator}{a character string specifying the long-run variance estimator. Options are \code{"acf"} (default) or \code{"bartlett"}.} } \value{ A list with class \code{"htest"} containing the following components: \item{statistic}{the value of the DM-statistic.} \item{parameter}{the forecast horizon and loss function power used in the test.} \item{alternative}{a character string describing the alternative hypothesis.} \item{varestimator}{a character string describing the long-run variance estimator.} \item{p.value}{the p-value for the test.} \item{method}{a character string with the value "Diebold-Mariano Test".} \item{data.name}{a character vector giving the names of the two error series.} } \description{ The Diebold-Mariano test compares the forecast accuracy of two forecast methods. } \details{ This function implements the modified test proposed by Harvey, Leybourne and Newbold (1997). The null hypothesis is that the two methods have the same forecast accuracy. For \code{alternative="less"}, the alternative hypothesis is that method 2 is less accurate than method 1. For \code{alternative="greater"}, the alternative hypothesis is that method 2 is more accurate than method 1. For \code{alternative="two.sided"}, the alternative hypothesis is that method 1 and method 2 have different levels of accuracy. The long-run variance estimator can either the auto-correlation estimator \code{varestimator = "acf"}, or the estimator based on Bartlett weights \code{varestimator = "bartlett"} which ensures a positive estimate. Both long-run variance estimators are proposed in Diebold and Mariano (1995). } \examples{ # Test on in-sample one-step forecasts f1 <- ets(WWWusage) f2 <- auto.arima(WWWusage) accuracy(f1) accuracy(f2) dm.test(residuals(f1), residuals(f2), h = 1) # Test on out-of-sample one-step forecasts f1 <- ets(WWWusage[1:80]) f2 <- auto.arima(WWWusage[1:80]) f1.out <- ets(WWWusage[81:100], model = f1) f2.out <- Arima(WWWusage[81:100], model = f2) accuracy(f1.out) accuracy(f2.out) dm.test(residuals(f1.out), residuals(f2.out), h = 1) } \references{ Diebold, F.X. and Mariano, R.S. (1995) Comparing predictive accuracy. \emph{Journal of Business and Economic Statistics}, \bold{13}, 253-263. Harvey, D., Leybourne, S., & Newbold, P. (1997). Testing the equality of prediction mean squared errors. \emph{International Journal of forecasting}, \bold{13}(2), 281-291. } \author{ George Athanasopoulos and Kirill Kuroptev } \keyword{htest} \keyword{ts} forecast/man/plot.forecast.Rd0000644000176200001440000000704014150370574015714 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast.R, R/ggplot.R, R/spline.R \name{plot.forecast} \alias{plot.forecast} \alias{autoplot.forecast} \alias{autoplot.splineforecast} \alias{autolayer.forecast} \alias{plot.splineforecast} \title{Forecast plot} \usage{ \method{plot}{forecast}( x, include, PI = TRUE, showgap = TRUE, shaded = TRUE, shadebars = (length(x$mean) < 5), shadecols = NULL, col = 1, fcol = 4, pi.col = 1, pi.lty = 2, ylim = NULL, main = NULL, xlab = "", ylab = "", type = "l", flty = 1, flwd = 2, ... ) \method{autoplot}{forecast}( object, include, PI = TRUE, shadecols = c("#596DD5", "#D5DBFF"), fcol = "#0000AA", flwd = 0.5, ... ) \method{autoplot}{splineforecast}(object, PI = TRUE, ...) \method{autolayer}{forecast}(object, series = NULL, PI = TRUE, showgap = TRUE, ...) \method{plot}{splineforecast}(x, fitcol = 2, type = "o", pch = 19, ...) } \arguments{ \item{x}{Forecast object produced by \code{\link{forecast}}.} \item{include}{number of values from time series to include in plot. Default is all values.} \item{PI}{Logical flag indicating whether to plot prediction intervals.} \item{showgap}{If \code{showgap=FALSE}, the gap between the historical observations and the forecasts is removed.} \item{shaded}{Logical flag indicating whether prediction intervals should be shaded (\code{TRUE}) or lines (\code{FALSE})} \item{shadebars}{Logical flag indicating if prediction intervals should be plotted as shaded bars (if \code{TRUE}) or a shaded polygon (if \code{FALSE}). Ignored if \code{shaded=FALSE}. Bars are plotted by default if there are fewer than five forecast horizons.} \item{shadecols}{Colors for shaded prediction intervals. To get default colors used prior to v3.26, set \code{shadecols="oldstyle"}.} \item{col}{Colour for the data line.} \item{fcol}{Colour for the forecast line.} \item{pi.col}{If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction intervals are plotted in this colour.} \item{pi.lty}{If \code{shaded=FALSE} and \code{PI=TRUE}, the prediction intervals are plotted using this line type.} \item{ylim}{Limits on y-axis.} \item{main}{Main title.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{type}{1-character string giving the type of plot desired. As for \code{\link[graphics]{plot.default}}.} \item{flty}{Line type for the forecast line.} \item{flwd}{Line width for the forecast line.} \item{...}{Other plotting parameters to affect the plot.} \item{object}{Forecast object produced by \code{\link{forecast}}. Used for ggplot graphics (S3 method consistency).} \item{series}{Matches an unidentified forecast layer with a coloured object on the plot.} \item{fitcol}{Line colour for fitted values.} \item{pch}{Plotting character (if \code{type=="p"} or \code{type=="o"}).} } \value{ None. } \description{ Plots historical data with forecasts and prediction intervals. } \details{ \code{autoplot} will produce a ggplot object. plot.splineforecast autoplot.splineforecast } \examples{ library(ggplot2) wine.fit <- hw(wineind,h=48) plot(wine.fit) autoplot(wine.fit) fit <- tslm(wineind ~ fourier(wineind,4)) fcast <- forecast(fit, newdata=data.frame(fourier(wineind,4,20))) autoplot(fcast) fcast <- splinef(airmiles,h=5) plot(fcast) autoplot(fcast) } \references{ Hyndman and Athanasopoulos (2018) \emph{Forecasting: principles and practice}, 2nd edition, OTexts: Melbourne, Australia. \url{https://otexts.com/fpp2/} } \seealso{ \code{\link[stats]{plot.ts}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/woolyrnq.Rd0000644000176200001440000000072514150370574015026 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{woolyrnq} \alias{woolyrnq} \title{Quarterly production of woollen yarn in Australia} \format{ Time series data } \source{ Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} } \usage{ woolyrnq } \description{ Quarterly production of woollen yarn in Australia: tonnes. Mar 1965 -- Sep 1994. } \examples{ tsdisplay(woolyrnq) } \keyword{datasets} forecast/man/tbats.Rd0000644000176200001440000000740514150370574014253 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tbats.R \name{tbats} \alias{tbats} \alias{as.character.tbats} \alias{print.tbats} \title{TBATS model (Exponential smoothing state space model with Box-Cox transformation, ARMA errors, Trend and Seasonal components)} \usage{ tbats( y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ... ) } \arguments{ \item{y}{The time series to be forecast. Can be \code{numeric}, \code{msts} or \code{ts}. Only univariate time series are supported.} \item{use.box.cox}{\code{TRUE/FALSE} indicates whether to use the Box-Cox transformation or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.trend}{\code{TRUE/FALSE} indicates whether to include a trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.damped.trend}{\code{TRUE/FALSE} indicates whether to include a damping parameter in the trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{seasonal.periods}{If \code{y} is \code{numeric} then seasonal periods can be specified with this parameter.} \item{use.arma.errors}{\code{TRUE/FALSE} indicates whether to include ARMA errors or not. If \code{TRUE} the best fit is selected by AIC. If \code{FALSE} then the selection algorithm does not consider ARMA errors.} \item{use.parallel}{\code{TRUE/FALSE} indicates whether or not to use parallel processing.} \item{num.cores}{The number of parallel processes to be used if using parallel processing. If \code{NULL} then the number of logical cores is detected and all available cores are used.} \item{bc.lower}{The lower limit (inclusive) for the Box-Cox transformation.} \item{bc.upper}{The upper limit (inclusive) for the Box-Cox transformation.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{model}{Output from a previous call to \code{tbats}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{...}{Additional arguments to be passed to \code{auto.arima} when choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, as will any arguments concerning seasonality and differencing, but arguments controlling the values of p and q will be used.)} } \value{ An object with class \code{c("tbats", "bats")}. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{bats} and associated functions. The fitted model is designated TBATS(omega, p,q, phi, ,...,) where omega is the Box-Cox parameter and phi is the damping parameter; the error is modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model and k1,...,kJ are the corresponding number of Fourier terms used for each seasonality. } \description{ Fits a TBATS model applied to \code{y}, as described in De Livera, Hyndman & Snyder (2011). Parallel processing is used by default to speed up the computations. } \examples{ \dontrun{ fit <- tbats(USAccDeaths) plot(forecast(fit)) taylor.fit <- tbats(taylor) plot(forecast(taylor.fit))} } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \seealso{ \code{\link{tbats.components}}. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/dshw.Rd0000644000176200001440000001004214150370574014072 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/dshw.r \name{dshw} \alias{dshw} \title{Double-Seasonal Holt-Winters Forecasting} \usage{ dshw( y, period1 = NULL, period2 = NULL, h = 2 * max(period1, period2), alpha = NULL, beta = NULL, gamma = NULL, omega = NULL, phi = NULL, lambda = NULL, biasadj = FALSE, armethod = TRUE, model = NULL ) } \arguments{ \item{y}{Either an \code{\link{msts}} object with two seasonal periods or a numeric vector.} \item{period1}{Period of the shorter seasonal period. Only used if \code{y} is not an \code{\link{msts}} object.} \item{period2}{Period of the longer seasonal period. Only used if \code{y} is not an \code{\link{msts}} object.} \item{h}{Number of periods for forecasting.} \item{alpha}{Smoothing parameter for the level. If \code{NULL}, the parameter is estimated using least squares.} \item{beta}{Smoothing parameter for the slope. If \code{NULL}, the parameter is estimated using least squares.} \item{gamma}{Smoothing parameter for the first seasonal period. If \code{NULL}, the parameter is estimated using least squares.} \item{omega}{Smoothing parameter for the second seasonal period. If \code{NULL}, the parameter is estimated using least squares.} \item{phi}{Autoregressive parameter. If \code{NULL}, the parameter is estimated using least squares.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{armethod}{If TRUE, the forecasts are adjusted using an AR(1) model for the errors.} \item{model}{If it's specified, an existing model is applied to a new data set.} } \value{ An object of class "\code{forecast}" which is a list that includes the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{x}{The original time series.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{dshw}. } \description{ Returns forecasts using Taylor's (2003) Double-Seasonal Holt-Winters method. } \details{ Taylor's (2003) double-seasonal Holt-Winters method uses additive trend and multiplicative seasonality, where there are two seasonal components which are multiplied together. For example, with a series of half-hourly data, one would set \code{period1=48} for the daily period and \code{period2=336} for the weekly period. The smoothing parameter notation used here is different from that in Taylor (2003); instead it matches that used in Hyndman et al (2008) and that used for the \code{\link{ets}} function. } \examples{ \dontrun{ fcast <- dshw(taylor) plot(fcast) t <- seq(0,5,by=1/20) x <- exp(sin(2*pi*t) + cos(2*pi*t*4) + rnorm(length(t),0,.1)) fit <- dshw(x,20,5) plot(fit) } } \references{ Taylor, J.W. (2003) Short-term electricity demand forecasting using double seasonal exponential smoothing. \emph{Journal of the Operational Research Society}, \bold{54}, 799-805. Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag. \url{http://www.exponentialsmoothing.net}. } \seealso{ \code{\link[stats]{HoltWinters}}, \code{\link{ets}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/forecast.stl.Rd0000644000176200001440000001636514150370574015552 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mstl.R \name{forecast.stl} \alias{forecast.stl} \alias{stlm} \alias{forecast.stlm} \alias{stlf} \title{Forecasting using stl objects} \usage{ \method{forecast}{stl}( object, method = c("ets", "arima", "naive", "rwdrift"), etsmodel = "ZZN", forecastfunction = NULL, h = frequency(object$time.series) * 2, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, xreg = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ... ) stlm( y, s.window = 7 + 4 * seq(6), robust = FALSE, method = c("ets", "arima"), modelfunction = NULL, model = NULL, etsmodel = "ZZN", lambda = NULL, biasadj = FALSE, xreg = NULL, allow.multiplicative.trend = FALSE, x = y, ... ) \method{forecast}{stlm}( object, h = 2 * object$m, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, newxreg = NULL, allow.multiplicative.trend = FALSE, ... ) stlf( y, h = frequency(x) * 2, s.window = 7 + 4 * seq(6), t.window = NULL, robust = FALSE, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{object}{An object of class \code{stl} or \code{stlm}. Usually the result of a call to \code{\link[stats]{stl}} or \code{stlm}.} \item{method}{Method to use for forecasting the seasonally adjusted series.} \item{etsmodel}{The ets model specification passed to \code{\link[forecast]{ets}}. By default it allows any non-seasonal model. If \code{method!="ets"}, this argument is ignored.} \item{forecastfunction}{An alternative way of specifying the function for forecasting the seasonally adjusted series. If \code{forecastfunction} is not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used to specify the forecasting method to be used.} \item{h}{Number of periods for forecasting.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{xreg}{Historical regressors to be used in \code{\link[forecast]{auto.arima}()} when \code{method=="arima"}.} \item{newxreg}{Future regressors to be used in \code{\link[forecast]{forecast.Arima}()}.} \item{allow.multiplicative.trend}{If TRUE, then ETS models with multiplicative trends are allowed. Otherwise, only additive or no trend ETS models are permitted.} \item{...}{Other arguments passed to \code{forecast.stl}, \code{modelfunction} or \code{forecastfunction}.} \item{y}{A univariate numeric time series of class \code{ts}} \item{s.window}{Either the character string ``periodic'' or the span (in lags) of the loess window for seasonal extraction.} \item{robust}{If \code{TRUE}, robust fitting will used in the loess procedure within \code{\link[stats]{stl}}.} \item{modelfunction}{An alternative way of specifying the function for modelling the seasonally adjusted series. If \code{modelfunction} is not \code{NULL}, then \code{method} is ignored. Otherwise \code{method} is used to specify the time series model to be used.} \item{model}{Output from a previous call to \code{stlm}. If a \code{stlm} model is passed, this same model is fitted to y without re-estimating any parameters.} \item{x}{Deprecated. Included for backwards compatibility.} \item{t.window}{A number to control the smoothness of the trend. See \code{\link[stats]{stl}} for details.} } \value{ \code{stlm} returns an object of class \code{stlm}. The other functions return objects of class \code{forecast}. There are many methods for working with \code{\link{forecast}} objects including \code{summary} to obtain and print a summary of the results, while \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features. } \description{ Forecasts of STL objects are obtained by applying a non-seasonal forecasting method to the seasonally adjusted data and re-seasonalizing using the last year of the seasonal component. } \details{ \code{stlm} takes a time series \code{y}, applies an STL decomposition, and models the seasonally adjusted data using the model passed as \code{modelfunction} or specified using \code{method}. It returns an object that includes the original STL decomposition and a time series model fitted to the seasonally adjusted data. This object can be passed to the \code{forecast.stlm} for forecasting. \code{forecast.stlm} forecasts the seasonally adjusted data, then re-seasonalizes the results by adding back the last year of the estimated seasonal component. \code{stlf} combines \code{stlm} and \code{forecast.stlm}. It takes a \code{ts} argument, applies an STL decomposition, models the seasonally adjusted data, reseasonalizes, and returns the forecasts. However, it allows more general forecasting methods to be specified via \code{forecastfunction}. \code{forecast.stl} is similar to \code{stlf} except that it takes the STL decomposition as the first argument, instead of the time series. Note that the prediction intervals ignore the uncertainty associated with the seasonal component. They are computed using the prediction intervals from the seasonally adjusted series, which are then reseasonalized using the last year of the seasonal component. The uncertainty in the seasonal component is ignored. The time series model for the seasonally adjusted data can be specified in \code{stlm} using either \code{method} or \code{modelfunction}. The \code{method} argument provides a shorthand way of specifying \code{modelfunction} for a few special cases. More generally, \code{modelfunction} can be any function with first argument a \code{ts} object, that returns an object that can be passed to \code{\link{forecast}}. For example, \code{forecastfunction=ar} uses the \code{\link{ar}} function for modelling the seasonally adjusted series. The forecasting method for the seasonally adjusted data can be specified in \code{stlf} and \code{forecast.stl} using either \code{method} or \code{forecastfunction}. The \code{method} argument provides a shorthand way of specifying \code{forecastfunction} for a few special cases. More generally, \code{forecastfunction} can be any function with first argument a \code{ts} object, and other \code{h} and \code{level}, which returns an object of class \code{\link{forecast}}. For example, \code{forecastfunction=thetaf} uses the \code{\link{thetaf}} function for forecasting the seasonally adjusted series. } \examples{ tsmod <- stlm(USAccDeaths, modelfunction = ar) plot(forecast(tsmod, h = 36)) decomp <- stl(USAccDeaths, s.window = "periodic") plot(forecast(decomp)) plot(stlf(AirPassengers, lambda = 0)) } \seealso{ \code{\link[stats]{stl}}, \code{\link{forecast.ets}}, \code{\link{forecast.Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/checkresiduals.Rd0000644000176200001440000000316214456202551016121 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/checkresiduals.R \name{checkresiduals} \alias{checkresiduals} \title{Check that residuals from a time series model look like white noise} \usage{ checkresiduals(object, lag, test, plot = TRUE, ...) } \arguments{ \item{object}{Either a time series model, a forecast object, or a time series (assumed to be residuals).} \item{lag}{Number of lags to use in the Ljung-Box or Breusch-Godfrey test. If missing, it is set to \code{min(10,n/5)} for non-seasonal data, and \code{min(2m, n/5)} for seasonal data, where \code{n} is the length of the series, and \code{m} is the seasonal period of the data. It is further constrained to be at least \code{df+3} where \code{df} is the degrees of freedom of the model. This ensures there are at least 3 degrees of freedom used in the chi-squared test.} \item{test}{Test to use for serial correlation. By default, if \code{object} is of class \code{lm}, then \code{test="BG"}. Otherwise, \code{test="LB"}. Setting \code{test=FALSE} will prevent the test results being printed.} \item{plot}{Logical. If \code{TRUE}, will produce the plot.} \item{...}{Other arguments are passed to \code{\link{ggtsdisplay}}.} } \value{ None } \description{ If \code{plot=TRUE}, produces a time plot of the residuals, the corresponding ACF, and a histogram. If \code{test} is not \code{FALSE}, the output from either a Ljung-Box test or Breusch-Godfrey test is printed. } \examples{ fit <- ets(WWWusage) checkresiduals(fit) } \seealso{ \code{\link{ggtsdisplay}}, \code{\link[stats]{Box.test}}, \code{\link[lmtest]{bgtest}} } \author{ Rob J Hyndman } forecast/man/gghistogram.Rd0000644000176200001440000000171514150370574015447 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{gghistogram} \alias{gghistogram} \title{Histogram with optional normal and kernel density functions} \usage{ gghistogram( x, add.normal = FALSE, add.kde = FALSE, add.rug = TRUE, bins, boundary = 0 ) } \arguments{ \item{x}{a numerical vector.} \item{add.normal}{Add a normal density function for comparison} \item{add.kde}{Add a kernel density estimate for comparison} \item{add.rug}{Add a rug plot on the horizontal axis} \item{bins}{The number of bins to use for the histogram. Selected by default using the Friedman-Diaconis rule given by \code{\link[grDevices]{nclass.FD}}} \item{boundary}{A boundary between two bins.} } \value{ None. } \description{ Plots a histogram and density estimates using ggplot. } \examples{ gghistogram(lynx, add.kde=TRUE) } \seealso{ \code{\link[graphics]{hist}}, \code{\link[ggplot2]{geom_histogram}} } \author{ Rob J Hyndman } forecast/man/subset.ts.Rd0000644000176200001440000000404514150370574015065 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/subset.R \name{subset.ts} \alias{subset.ts} \alias{subset.msts} \title{Subsetting a time series} \usage{ \method{subset}{ts}( x, subset = NULL, month = NULL, quarter = NULL, season = NULL, start = NULL, end = NULL, ... ) \method{subset}{msts}(x, subset = NULL, start = NULL, end = NULL, ...) } \arguments{ \item{x}{a univariate time series to be subsetted} \item{subset}{optional logical expression indicating elements to keep; missing values are taken as false. \code{subset} must be the same length as \code{x}.} \item{month}{Numeric or character vector of months to retain. Partial matching on month names used.} \item{quarter}{Numeric or character vector of quarters to retain.} \item{season}{Numeric vector of seasons to retain.} \item{start}{Index of start of contiguous subset.} \item{end}{Index of end of contiguous subset.} \item{...}{Other arguments, unused.} } \value{ If \code{subset} is used, a numeric vector is returned with no ts attributes. If \code{start} and/or \code{end} are used, a ts object is returned consisting of x[start:end], with the appropriate time series attributes retained. Otherwise, a ts object is returned with frequency equal to the length of \code{month}, \code{quarter} or \code{season}. } \description{ Various types of subsetting of a time series. Allows subsetting by index values (unlike \code{\link[stats]{window}}). Also allows extraction of the values of a specific season or subset of seasons in each year. For example, to extract all values for the month of May from a time series. } \details{ If character values for months are used, either upper or lower case may be used, and partial unambiguous names are acceptable. Possible character values for quarters are \code{"Q1"}, \code{"Q2"}, \code{"Q3"}, and \code{"Q4"}. } \examples{ plot(subset(gas,month="November")) subset(woolyrnq,quarter=3) subset(USAccDeaths, start=49) } \seealso{ \code{\link[base]{subset}}, \code{\link[stats]{window}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/BoxCox.lambda.Rd0000644000176200001440000000305614150370574015555 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/guerrero.R \name{BoxCox.lambda} \alias{BoxCox.lambda} \title{Automatic selection of Box Cox transformation parameter} \usage{ BoxCox.lambda(x, method = c("guerrero", "loglik"), lower = -1, upper = 2) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}} \item{method}{Choose method to be used in calculating lambda.} \item{lower}{Lower limit for possible lambda values.} \item{upper}{Upper limit for possible lambda values.} } \value{ a number indicating the Box-Cox transformation parameter. } \description{ If \code{method=="guerrero"}, Guerrero's (1993) method is used, where lambda minimizes the coefficient of variation for subseries of \code{x}. } \details{ If \code{method=="loglik"}, the value of lambda is chosen to maximize the profile log likelihood of a linear model fitted to \code{x}. For non-seasonal data, a linear time trend is fitted while for seasonal data, a linear time trend with seasonal dummy variables is used. } \examples{ lambda <- BoxCox.lambda(AirPassengers,lower=0) air.fit <- Arima(AirPassengers, order=c(0,1,1), seasonal=list(order=c(0,1,1),period=12), lambda=lambda) plot(forecast(air.fit)) } \references{ Box, G. E. P. and Cox, D. R. (1964) An analysis of transformations. \emph{JRSS B} \bold{26} 211--246. Guerrero, V.M. (1993) Time-series analysis supported by power transformations. \emph{Journal of Forecasting}, \bold{12}, 37--48. } \seealso{ \code{\link{BoxCox}} } \author{ Leanne Chhay and Rob J Hyndman } \keyword{ts} forecast/man/simulate.ets.Rd0000644000176200001440000001023514207263356015550 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/simulate.R, R/simulate_tbats.R \name{simulate.ets} \alias{simulate.ets} \alias{simulate.Arima} \alias{simulate.ar} \alias{simulate.lagwalk} \alias{simulate.fracdiff} \alias{simulate.nnetar} \alias{simulate.modelAR} \alias{simulate.tbats} \title{Simulation from a time series model} \usage{ \method{simulate}{ets}( object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{Arima}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{ar}( object, nsim = object$n.used, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{lagwalk}( object, nsim = length(object$x), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{fracdiff}( object, nsim = object$n, seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) \method{simulate}{nnetar}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{modelAR}( object, nsim = length(object$x), seed = NULL, xreg = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, lambda = object$lambda, ... ) \method{simulate}{tbats}( object, nsim = length(object$y), seed = NULL, future = TRUE, bootstrap = FALSE, innov = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{ets}", "\code{Arima}", "\code{ar}" or "\code{nnetar}".} \item{nsim}{Number of periods for the simulated series. Ignored if either \code{xreg} or \code{innov} are not \code{NULL}. Otherwise the default is the length of series used to train model (or 100 if no data found).} \item{seed}{Either \code{NULL} or an integer that will be used in a call to \code{\link[base]{set.seed}} before simulating the time series. The default, \code{NULL}, will not change the random generator state.} \item{future}{Produce sample paths that are future to and conditional on the data in \code{object}. Otherwise simulate unconditionally.} \item{bootstrap}{Do simulation using resampled errors rather than normally distributed errors or errors provided as \code{innov}.} \item{innov}{A vector of innovations to use as the error series. Ignored if \code{bootstrap==TRUE}. If not \code{NULL}, the value of \code{nsim} is set to length of \code{innov}.} \item{...}{Other arguments, not currently used.} \item{xreg}{New values of \code{xreg} to be used for forecasting. The value of \code{nsim} is set to the number of rows of \code{xreg} if it is not \code{NULL}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ An object of class "\code{ts}". } \description{ Returns a time series based on the model object \code{object}. } \details{ With \code{simulate.Arima()}, the \code{object} should be produced by \code{\link{Arima}} or \code{\link{auto.arima}}, rather than \code{\link[stats]{arima}}. By default, the error series is assumed normally distributed and generated using \code{\link[stats]{rnorm}}. If \code{innov} is present, it is used instead. If \code{bootstrap=TRUE} and \code{innov=NULL}, the residuals are resampled instead. When \code{future=TRUE}, the sample paths are conditional on the data. When \code{future=FALSE} and the model is stationary, the sample paths do not depend on the data at all. When \code{future=FALSE} and the model is non-stationary, the location of the sample paths is arbitrary, so they all start at the value of the first observation. } \examples{ fit <- ets(USAccDeaths) plot(USAccDeaths, xlim = c(1973, 1982)) lines(simulate(fit, 36), col = "red") } \seealso{ \code{\link{ets}}, \code{\link{Arima}}, \code{\link{auto.arima}}, \code{\link{ar}}, \code{\link{arfima}}, \code{\link{nnetar}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/is.ets.Rd0000644000176200001440000000120614150370574014334 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/acf.R, R/arima.R, R/baggedModel.R, R/bats.R, % R/ets.R, R/modelAR.R, R/mstl.R, R/nnetar.R \name{is.acf} \alias{is.acf} \alias{is.Arima} \alias{is.baggedModel} \alias{is.bats} \alias{is.ets} \alias{is.modelAR} \alias{is.stlm} \alias{is.nnetar} \alias{is.nnetarmodels} \title{Is an object a particular model type?} \usage{ is.acf(x) is.Arima(x) is.baggedModel(x) is.bats(x) is.ets(x) is.modelAR(x) is.stlm(x) is.nnetar(x) is.nnetarmodels(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the model object is of a particular type } forecast/man/tsclean.Rd0000644000176200001440000000235214207263356014565 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/clean.R \name{tsclean} \alias{tsclean} \title{Identify and replace outliers and missing values in a time series} \usage{ tsclean(x, replace.missing = TRUE, iterate = 2, lambda = NULL) } \arguments{ \item{x}{time series} \item{replace.missing}{If TRUE, it not only replaces outliers, but also interpolates missing values} \item{iterate}{the number of iterations required} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} } \value{ Time series } \description{ Uses supsmu for non-seasonal series and a robust STL decomposition for seasonal series. To estimate missing values and outlier replacements, linear interpolation is used on the (possibly seasonally adjusted) series } \examples{ cleangold <- tsclean(gold) } \references{ Hyndman (2021) "Detecting time series outliers" \url{https://robjhyndman.com/hyndsight/tsoutliers/}. } \seealso{ \code{\link[forecast]{na.interp}}, \code{\link[forecast]{tsoutliers}}, \code{\link[stats]{supsmu}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/mstl.Rd0000644000176200001440000000316714150370574014116 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mstl.R \name{mstl} \alias{mstl} \title{Multiple seasonal decomposition} \usage{ mstl(x, lambda = NULL, iterate = 2, s.window = 7 + 4 * seq(6), ...) } \arguments{ \item{x}{Univariate time series of class \code{msts} or \code{ts}.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{iterate}{Number of iterations to use to refine the seasonal component.} \item{s.window}{Seasonal windows to be used in the decompositions. If scalar, the same value is used for all seasonal components. Otherwise, it should be a vector of the same length as the number of seasonal components (or longer).} \item{...}{Other arguments are passed to \code{\link[stats]{stl}}.} } \description{ Decompose a time series into seasonal, trend and remainder components. Seasonal components are estimated iteratively using STL. Multiple seasonal periods are allowed. The trend component is computed for the last iteration of STL. Non-seasonal time series are decomposed into trend and remainder only. In this case, \code{\link[stats]{supsmu}} is used to estimate the trend. Optionally, the time series may be Box-Cox transformed before decomposition. Unlike \code{\link[stats]{stl}}, \code{mstl} is completely automated. } \examples{ library(ggplot2) mstl(taylor) \%>\% autoplot() mstl(AirPassengers, lambda = "auto") \%>\% autoplot() } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{supsmu}} } forecast/man/reexports.Rd0000644000176200001440000000114314207263356015164 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast-package.R, R/ggplot.R \docType{import} \name{reexports} \alias{reexports} \alias{\%>\%} \alias{forecast} \alias{accuracy} \alias{autoplot} \title{Objects exported from other packages} \keyword{internal} \description{ These objects are imported from other packages. Follow the links below to see their documentation. \describe{ \item{generics}{\code{\link[generics]{accuracy}}, \code{\link[generics]{forecast}}} \item{ggplot2}{\code{\link[ggplot2]{autoplot}}} \item{magrittr}{\code{\link[magrittr:pipe]{\%>\%}}} }} forecast/man/plot.Arima.Rd0000644000176200001440000000317514150370574015144 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/armaroots.R, R/ggplot.R \name{plot.Arima} \alias{plot.Arima} \alias{plot.ar} \alias{autoplot.Arima} \alias{autoplot.ar} \title{Plot characteristic roots from ARIMA model} \usage{ \method{plot}{Arima}( x, type = c("both", "ar", "ma"), main, xlab = "Real", ylab = "Imaginary", ... ) \method{plot}{ar}(x, main, xlab = "Real", ylab = "Imaginary", ...) \method{autoplot}{Arima}(object, type = c("both", "ar", "ma"), ...) \method{autoplot}{ar}(object, ...) } \arguments{ \item{x}{Object of class \dQuote{Arima} or \dQuote{ar}.} \item{type}{Determines if both AR and MA roots are plotted, of if just one set is plotted.} \item{main}{Main title. Default is "Inverse AR roots" or "Inverse MA roots".} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{...}{Other plotting parameters passed to \code{\link[graphics]{par}}.} \item{object}{Object of class \dQuote{Arima} or \dQuote{ar}. Used for ggplot graphics (S3 method consistency).} } \value{ None. Function produces a plot } \description{ Produces a plot of the inverse AR and MA roots of an ARIMA model. Inverse roots outside the unit circle are shown in red. } \details{ \code{autoplot} will produce an equivalent plot as a ggplot object. } \examples{ library(ggplot2) fit <- Arima(WWWusage, order = c(3, 1, 0)) plot(fit) autoplot(fit) fit <- Arima(woolyrnq, order = c(2, 0, 0), seasonal = c(2, 1, 1)) plot(fit) autoplot(fit) plot(ar.ols(gold[1:61])) autoplot(ar.ols(gold[1:61])) } \seealso{ \code{\link{Arima}}, \code{\link[stats]{ar}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{hplot} forecast/man/plot.bats.Rd0000644000176200001440000000244714150370574015045 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bats.R, R/ggplot.R, R/tbats.R \name{plot.bats} \alias{plot.bats} \alias{autoplot.tbats} \alias{autoplot.bats} \alias{plot.tbats} \title{Plot components from BATS model} \usage{ \method{plot}{bats}(x, main = "Decomposition by BATS model", ...) \method{autoplot}{tbats}(object, range.bars = FALSE, ...) \method{autoplot}{bats}(object, range.bars = FALSE, ...) \method{plot}{tbats}(x, main = "Decomposition by TBATS model", ...) } \arguments{ \item{x}{Object of class \dQuote{bats/tbats}.} \item{main}{Main title for plot.} \item{...}{Other plotting parameters passed to \code{\link[graphics]{par}}.} \item{object}{Object of class \dQuote{bats/tbats}.} \item{range.bars}{Logical indicating if each plot should have a bar at its right side representing relative size. If NULL, automatic selection takes place.} } \value{ None. Function produces a plot } \description{ Produces a plot of the level, slope and seasonal components from a BATS or TBATS model. The plotted components are Box-Cox transformed using the estimated transformation parameter. } \examples{ \dontrun{ fit <- tbats(USAccDeaths) plot(fit) autoplot(fit, range.bars = TRUE)} } \seealso{ \code{\link{bats}},\code{\link{tbats}} } \author{ Rob J Hyndman } \keyword{hplot} forecast/man/autoplot.ts.Rd0000644000176200001440000000424114634700243015422 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autolayer.mts} \alias{autolayer.mts} \alias{autolayer.msts} \alias{autolayer.ts} \alias{autoplot.ts} \alias{autoplot.mts} \alias{autoplot.msts} \alias{fortify.ts} \title{Automatically create a ggplot for time series objects} \usage{ \method{autolayer}{mts}(object, colour = TRUE, series = NULL, ...) \method{autolayer}{msts}(object, series = NULL, ...) \method{autolayer}{ts}(object, colour = TRUE, series = NULL, ...) \method{autoplot}{ts}( object, series = NULL, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ... ) \method{autoplot}{mts}( object, colour = TRUE, facets = FALSE, xlab = "Time", ylab = deparse(substitute(object)), main = NULL, ... ) \method{autoplot}{msts}(object, ...) \method{fortify}{ts}(model, data, ...) } \arguments{ \item{object}{Object of class \dQuote{\code{ts}} or \dQuote{\code{mts}}.} \item{colour}{If TRUE, the time series will be assigned a colour aesthetic} \item{series}{Identifies the time series with a colour, which integrates well with the functionality of \link{geom_forecast}.} \item{...}{Other plotting parameters to affect the plot.} \item{xlab}{X-axis label.} \item{ylab}{Y-axis label.} \item{main}{Main title.} \item{facets}{If TRUE, multiple time series will be faceted (and unless specified, colour is set to FALSE). If FALSE, each series will be assigned a colour.} \item{model}{Object of class \dQuote{\code{ts}} to be converted to \dQuote{\code{data.frame}}.} \item{data}{Not used (required for \code{\link[ggplot2]{fortify}} method)} } \value{ None. Function produces a ggplot graph. } \description{ \code{autoplot} takes an object of type \code{ts} or \code{mts} and creates a ggplot object suitable for usage with \code{stat_forecast}. } \details{ \code{fortify.ts} takes a \code{ts} object and converts it into a data frame (for usage with ggplot2). } \examples{ library(ggplot2) autoplot(USAccDeaths) lungDeaths <- cbind(mdeaths, fdeaths) autoplot(lungDeaths) autoplot(lungDeaths, facets=TRUE) } \seealso{ \code{\link[stats]{plot.ts}}, \code{\link[ggplot2]{fortify}} } \author{ Mitchell O'Hara-Wild } forecast/man/ets.Rd0000644000176200001440000001346114254256650013733 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ets.R \name{ets} \alias{ets} \alias{print.ets} \alias{summary.ets} \alias{as.character.ets} \alias{coef.ets} \alias{tsdiag.ets} \title{Exponential smoothing state space model} \usage{ ets( y, model = "ZZZ", damped = NULL, alpha = NULL, beta = NULL, gamma = NULL, phi = NULL, additive.only = FALSE, lambda = NULL, biasadj = FALSE, lower = c(rep(1e-04, 3), 0.8), upper = c(rep(0.9999, 3), 0.98), opt.crit = c("lik", "amse", "mse", "sigma", "mae"), nmse = 3, bounds = c("both", "usual", "admissible"), ic = c("aicc", "aic", "bic"), restrict = TRUE, allow.multiplicative.trend = FALSE, use.initial.values = FALSE, na.action = c("na.contiguous", "na.interp", "na.fail"), ... ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{model}{Usually a three-character string identifying method using the framework terminology of Hyndman et al. (2002) and Hyndman et al. (2008). The first letter denotes the error type ("A", "M" or "Z"); the second letter denotes the trend type ("N","A","M" or "Z"); and the third letter denotes the season type ("N","A","M" or "Z"). In all cases, "N"=none, "A"=additive, "M"=multiplicative and "Z"=automatically selected. So, for example, "ANN" is simple exponential smoothing with additive errors, "MAM" is multiplicative Holt-Winters' method with multiplicative errors, and so on. It is also possible for the model to be of class \code{"ets"}, and equal to the output from a previous call to \code{ets}. In this case, the same model is fitted to \code{y} without re-estimating any smoothing parameters. See also the \code{use.initial.values} argument.} \item{damped}{If TRUE, use a damped trend (either additive or multiplicative). If NULL, both damped and non-damped trends will be tried and the best model (according to the information criterion \code{ic}) returned.} \item{alpha}{Value of alpha. If NULL, it is estimated.} \item{beta}{Value of beta. If NULL, it is estimated.} \item{gamma}{Value of gamma. If NULL, it is estimated.} \item{phi}{Value of phi. If NULL, it is estimated.} \item{additive.only}{If TRUE, will only consider additive models. Default is FALSE.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated. When \code{lambda} is specified, \code{additive.only} is set to \code{TRUE}.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{lower}{Lower bounds for the parameters (alpha, beta, gamma, phi). Ignored if \code{bounds=="admissible"}.} \item{upper}{Upper bounds for the parameters (alpha, beta, gamma, phi). Ignored if \code{bounds=="admissible"}.} \item{opt.crit}{Optimization criterion. One of "mse" (Mean Square Error), "amse" (Average MSE over first \code{nmse} forecast horizons), "sigma" (Standard deviation of residuals), "mae" (Mean of absolute residuals), or "lik" (Log-likelihood, the default).} \item{nmse}{Number of steps for average multistep MSE (1<=\code{nmse}<=30).} \item{bounds}{Type of parameter space to impose: \code{"usual" } indicates all parameters must lie between specified lower and upper bounds; \code{"admissible"} indicates parameters must lie in the admissible space; \code{"both"} (default) takes the intersection of these regions.} \item{ic}{Information criterion to be used in model selection.} \item{restrict}{If \code{TRUE} (default), the models with infinite variance will not be allowed.} \item{allow.multiplicative.trend}{If \code{TRUE}, models with multiplicative trend are allowed when searching for a model. Otherwise, the model space excludes them. This argument is ignored if a multiplicative trend model is explicitly requested (e.g., using \code{model="MMN"}).} \item{use.initial.values}{If \code{TRUE} and \code{model} is of class \code{"ets"}, then the initial values in the model are also not re-estimated.} \item{na.action}{A function which indicates what should happen when the data contains NA values. By default, the largest contiguous portion of the time-series will be used.} \item{...}{Other undocumented arguments.} } \value{ An object of class "\code{ets}". The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{ets} and associated functions. } \description{ Returns ets model applied to \code{y}. } \details{ Based on the classification of methods as described in Hyndman et al (2008). The methodology is fully automatic. The only required argument for ets is the time series. The model is chosen automatically if not specified. This methodology performed extremely well on the M3-competition data. (See Hyndman, et al, 2002, below.) } \examples{ fit <- ets(USAccDeaths) plot(forecast(fit)) } \references{ Hyndman, R.J., Koehler, A.B., Snyder, R.D., and Grose, S. (2002) "A state space framework for automatic forecasting using exponential smoothing methods", \emph{International J. Forecasting}, \bold{18}(3), 439--454. Hyndman, R.J., Akram, Md., and Archibald, B. (2008) "The admissible parameter space for exponential smoothing models". \emph{Annals of Statistical Mathematics}, \bold{60}(2), 407--426. Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) \emph{Forecasting with exponential smoothing: the state space approach}, Springer-Verlag. \url{http://www.exponentialsmoothing.net}. } \seealso{ \code{\link[stats]{HoltWinters}}, \code{\link{rwf}}, \code{\link{Arima}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/figures/0000755000176200001440000000000014150370574014305 5ustar liggesusersforecast/man/figures/logo.png0000644000176200001440000001365414150370574015764 0ustar liggesusers‰PNG  IHDRx‹ªb]egAMA± üa cHRMz&€„ú€èu0ê`:˜pœºQ<bKGDÿÿÿ ½§“žIDATxÚí]ùWg¿çOèŸàŸÐŸR[ëÚj«­ÔÚUZßrÛkß÷¼oϹµ'®½XÔ.â†RÑ\QY¤ŠDvYCBˆl › B"ÇÐ×{?÷˜¹O&³<“2ç|Îdfž™çó|¿Ÿï2))ËdS©™u*5cT©ŸJÍ|›’܆Ø*5S¦R3àÁ¨R3ë’3´t‰}I¥f2ˆå£L¥fV$gli‘›®R3Cä’ÈT©™—’³ÿ:[«X>•šIOÎd|êì•0ˆå£6©ÏñCnæ‚åI’öúÁ[ؑۆ/òØxä>-ÑW’ú¼xÄn£ÑÙW÷cÛ©:üç–ìÈmÃÚÿþ“Öm'õ9†Ä¾L«³[~yˆôK&|u¥KÛ³áÕ}Å4D©Ô̶$ÑM{²iˆ]ÿC9væuHK"ý¢ ›y¨DŸ_N2Yr¿¥ÒÙ¥øðl3¾¼ÒvþÞõ?”+Ñç¤ÛŽ@ÚC¥³ïfÕàËË]Á'çZ±ê@)­>›d*´´‡Jg7­ÀßòMøË]Ç;YÕJô9™VE°¼ˆõ‡Êñi®é—:£ŠùFl:Z¡DŸ“i•DyQVgW(Å¶Ó øâRgLñi®ëQësvRŸuÖH£³[ŽUãóË3b˱j¬¤ÓçåÕ–$tVÖ¯þþO|Ó‚;ãŸ^0`#½>'~[’¶·ro1RO6`g¾yIàƒ³-XwèÎòmK*)/nþ¥;~ëÀçùæ%‡Ô“ X¹—:­JOwLÕÆ[{è>ÎÕã³<󒯧¿uàí_ª¿-I«³«”âý³-HË3'ÎU à³3U‰×–¤mã­Ü[Œ-YµHûÝœp8ZnC©Þ…R½ Ù÷{±ñú¶dB”7þX‰O.t`Ç濾ÃWWºPØæD‰ÞÅáa÷$~(êÀêƒ%K¯-©¤·6ã¶ç´áÓßM ‹œª‘rËMãh˜BuÏ$®·ŒáŸùK§-IÛÆ{í@)¶žjÄ'¿™ß\ïF±ÞÅ¡ÔàF£Õ‡–)´ L¡ÔàF±Þ…‹õÃØüã½ø-{Ò¶ñVî-Æ[?W%<±,4Íö‚vOrä¶ LáÁc÷ÝÉCØzª¯ÅS[RYyQ‹s;ðñÓ²ÀáÛ6·»8Ü1Ž£Å6€Æ~_À>Ÿåuâã &lú¹JIþ¼.Z:KÕÆ[“q[O7ᣠÆeƒô˸Þê@Q»EíóÖ£~/šm¾ ”›Æ¹ýÞêçÎñA®kcß–¤mã½¶¿›³êðÑyã²Ãqí GZQ»•=‚ä6Û|¨íäö»Ò8t®÷δbMF Êž´m<•šÁÆŸª°ýœž7.;|q©3€ÜÛF·(¹,n\ÜþÿUØ#xÞÍYuxmÚ’JÞX{è.¶Õáƒ\ã²Å’~ÜÔ994ôyÑdõIâÅÃíºrXôÜïÿjÀÆŸª°roQømI%m¼UËðΩ&lÏ5.{\~4Æ‘¥íš%·Éêã…`‹=nçÅNÉ1Þ;«ÃúLmèmÉ•ûoQµñ^ÙS„5Gª±í\¶çÆ?ö—ôEõü_^éâHºepS‘Ë¢Ü4Î{´Ü&;Öûç:°îç¬ÜKÙÛ´åÅU‡´X¼Nè°éT;Þ?××øø7ß1=QãH¹ ŒÎ FçD]¯WÁu½^îØ‚»ìXondž:d”Yq °«Е=SdŸ…:xë5cà ]6ŸÑcÛ¹Ž¸Å¾’>0:'.=‹Ú-v0 \3enŽäÝLè8[Îè¹yϯ…¦É޼º|£]–dQ‚Wî+ÁÚŸ‚ˆ%ñîYCÜ|éÑÕä…Š]ë£sâf»+$r›¬>T>öp×Èèœ8ù`{‹ûðYž™gkŽoœœŸï¯5hšìÐ4ÙQÜîB]¯ez;vœªTFðêÃ%‰eñÆI¶æðÞ¯q…] Ûœxøx>Z=_û$âcœ¨Ba›wLãh´ú$a÷ùÑ56#ø]q» …mÎ \¨}‚ÌÛ6|tÁÄÍ÷ÑòŽàû¨ëõr¸\;€µÝ*A‚iÈeñæ©v¤æðÞ¯ñƒŸï àV‡ V/îwŽ£°Í]tEìüž7¢°ÍÂ6j{'ÑhõŠbpbýîgp?üž½>!\mž'3¿~eVŽÜ‚fG¹,„,9l‚7œÐáÍ“íØšc@j`Çï&¶9PÕãA£Õ‹º¾I¶9pæápÄÆPõ¢°Í½K’ÜnÇ F&Ÿ£ÑêÅÈäs´OíÃ^ ä ¡ÌàŽ-Á,Éïž5`kÎââ@i?J .<²z9Ü6ºq£Í¯®tEdŒÜšÜhs Â20‰Ž‘iØ}~î÷ö¡©€ß…®…¹š&;*-žØÌ’üÎYÞ]Dä×¢ªÛƒGý^µ½“¸ÑêÀqí`Øçÿä7n´:p£Õú>oÀ8,Zl>¸§çÐbó|n÷ù÷¯êöpç¼*Cna›Sܺ^/Òsª¢K0Kò–3úE!÷WÏ[¯À$–\¸ÑêÀ'¿™ÂãПVÜhuà¶Ñ-8Σ~/<3¡}h*èóÎѧèw=<æ¦Î)K®¦ÉŽ»æ Ar-c3øÇ…šè¼á„oœÐáíÓíxçŒ>¦ÈÒY/i%×[8Zn kŒ¼úQ\ouˆŽãžžCÇÈ´$ùüÏú½ÐvNàz‹C–àšžIA‚SsÑwÑ|l:=oͱÀGŒ(!¬·ÅæÃ¿ÿçÜdQ» ×[øè‚1¤1¾¾jÁõVn꜂äõ¹ž¡OÄBÉ}ÈÐÐHT¥Åƒ2ƒ[ìR½KÜÖ) {žÇž`6W~;[Íg¢‹}%}xøØƒ†…F»gæ/F¦1âyŽÖJº&p­Õ#å¶Æ8RnõVîuNpçda™Æ˜Ïô9Í6ßü~}^Ô‹h©Ùüܗōׯ~ßâÌYó©vlÎÖG 4ÙÑÐ7oc^? ÃÓhèóÂ5=Ç}ÞÐçÓæÄåÆ1¼÷k‡¢ó¿÷k.7ŽáZ‹5=“çÔ NÁ3óWÀgRpMÏ¡q¡M ±È¹±ß‡Q¯?6Q45o:ÝŽ·³õÅ®?,¨|ìACŸÞç0>å&ó±}}ÎgÜï÷Ìã¸ÖâÀÞâ>EcüóZ7®µ8Pjp ÖlõÉ[¿`µ­S°¹g,†^ç3臦ãƒ`>ÑoE§+‡Pß7³½ÎgóIÀ5=Çý\Û;‰ë­\z4Š­9ê1N>BA‹]ç¶Øg`±ÏɇPP ‚'fþŠ]¼á„©9di)Sªy¢ÃÁöÜT>öÀ0<@$ Ûø, ÃÓÜïwLã(h±cOq/õ-v¶9$ ±¤åYÆfÂ"×ôä)zÏbGð.V/*,HÍ1(²è'Û±é”rì/éCÛ‚ŠMr“Õ‡1¯Ÿû½ºgÍv\¨}B5Æž¢^4Ûñg‡;à¼mƒS°Ï*"–…C0ÿø¨¼KcáˆMË3CÓd¹P²ñ=ÊÍãpMÏ¡Éê ˜àÚÞI<|ì °4rŸ[®6Ûq¢bê¢^|žß‰÷ÏuŽq¾ö ®6ÛQÝ30Ƙ×4®‚†=ÏÑ:0¹­S±#8K;ˆ½+à³ ËD´ºoJà_׺᚞ p¿Ú® ”èçÉcˉ¬µ {žXñÕf{.6ŒâDÅ”öãïWãïWãj³EíNQ¯ EDMÏ$ Ûœ¨êž¤")œÅ‚³´ƒ‚Öº›é¡Öb%®œD]ß$L£OQañ ÔàBA‹šf{îwM ®Ï‹º>/œÓsÜÏu}^ë]Aû‹ánçÿŸ§®× ›{VÖúXr¥:@¡,t\Ä Ö4Ù%Il´z£‰Çôà~çN=B–v{”…öɶ^K%]ÙÒ Ûå—å¢àJ‹ÍÙ.?P¢M„´ˆ\¢wÉZh–v»™ž¨,µx>1¥z—,Áü¡elF2æ“+×( Sˆ-®ˆœšc@…e»4*""¡Å¡.œ·³õÐ4ÙqÛ8ÎM‚Í=ËHWz¿se7çR¥ú¯RÖ+F®T«,VÈA?4-Z$‰Á%z5¹¬OË3GŒ\vÑîŸQfÅõ‡`iOJ;ïš'Pªw¸ø:™*Ô]ó„¬7*96öûàœšCc¿J{Åö ›àÔCP´LCˆÒc䌷Ϧld¡žv2•L0 ¹r¡Q¯£^¿hð&·8Ã&8K;¨ÈzI7­¤ðéÅ’[ó7uΠöšRrÅ&XŽ\òiH©ž.;†Í= çÔ\Î É ‰NT„Gp¨Qq8…¾<„âÖ-lÑ@(5ªé™ÕÜüúQ|½`$ÉwLãTãYÆfàœšÃ°ç9çÆÅö½kžÀûÇ´¡n^n°•–gÙÕ³Á™‹òS¦P£×ÛÆqAr³+‡ñáycÐ"×4ÙbÚª•E“(ls†Gp‰Þ–› w„;þÑò \T‰ ¥FbÖ{´|ogë¥BìÁõpÀæó!©@)T+ÞµðÊF8c§_ê ÊE•¤'B‹AH{¿¹Ö-z ä«'dL.nêœáœ[3‘T'K;ˆŒ2ë¢iü`‹ š¤"W©Ôˆ|n*·æI€K“ ²â&TŸÕzÃ"8’Å ¥çŠdMû;¦WpbÉÈUH—…R#Òz³+‡]²X^.÷†‚•¸ŽtÓ ·fDQªÉzvjŽAvb-c3õú¹nXjDZ¯ïÂ^ƒX[ ªº'ä!$‚#Y¤Pªç¡ºt¥Á–Ô£¨¾Ù¹oÒzC¹>¶'UŸVj½!Nj"g”Ye-9ul¡`K)Xëͯ )6 ƒ-©WQ”X¯¦Éއï*#8Òud!ׯi²s¸“µ6jŽV'*¿~4ä(– j¾czà øØóTty_?ÿÎ(³âÕƒ·• ¢%>·f$jc°mD±giR’üúQêÀJ*à“ªOK5CøùwjŽAÁeÖˆë_¼€ t”Nl¤¬—lÉÕ§i¬wà 2‚£¡½ñ„,í šŠrÑHY¯PÊtSç¤vÕBÖ«ˆàH5âl SÐìÀm㸬EÒz…êÓ´Dó«g¤—¥&8šÁU<¬*4;$#ëâvWD­W(Ø¢!šÿÖ!ÅS¼ÁÕb€ ¶øÕð'–LI"e½B)“جŒHY¯"‚5¸’ tøÄN,[Pˆ´õ Õ§¥ˆ–²^ÅAÖrÁ׋ä“Õ¢H[¯”'‘ƒ& –°"þc5BOgDÚzi<‰„*hI‚)ʘbAO´¬Wè¯Ø…b½I‚X3ßeFÓzi˜œõF•àÝL*,0=y Ó“§¨°L,ù4+-ÏÌMv´­Wè!A±¸@*Ž Á– m‘x# õN´= Y¯¦é=GœàŒ2+GèŒÿ*,óyšéÉÓeíæ#€±ú,—¾Fœ`›{–#8Z­½åŠ´’¥ ˆ%ÈÍ9íçÈc«MÎi¿à¾RÏnïÒX›ñ¿à'’Xšy ‚w3=˜ñ¿"•¿ =–KL’Ë'8K;t~þb"¶ðHÈ…Jn6÷lЊEvi,sàœöw³®6·f$`õò]cZž9àÆÈ޵rã[2K{Žÿ ”è]cìÒX&—t«iyfÉ(Ÿ$wÆÿYÚÁ Ì(³-¼ ËDÐgüqøß“‹“ün¾2ä¾ÉëÊ­Q$91Õ`þ…Êé ?w.Ñ»Ò,vòfü/5“´:²ÇM^»Ø¹•ê7»ñÔg(m¦±$‚¬ÔC€K’:iå$Iä ¹Xr ©XO@^¹ø"õÖ†ØB%µ—f!- ‚•¤Päd“7$w£äV¯hažœ`¡Eê{lí<ñ &ïcÆÿBöq¨%A0©Ïr)yCä¾r7J”ˆ)bdДfùA£Pf æEøQ·ÑK‚`1Ò”LNŠÁ6÷,=K!‚Éh øX’Ȩ_ìx¾µ éÿ’ ˜lB„ê¢å&'\©Ž ¥Ar £y!-•#X¨)çÒã–`2ª” ²ÈUMº,9‚ÉIéS J›$佊Ş“Æ+¤æcƒ%Uª”Ê …´š?1r‡¡ ksÏÊ–å‚F~>Ns äý‰y?¥ïgÅ”`~®Éïe’“,Uè"˜¯‹B )-ÏôyjŽ! X²¹gƒÆHË3s×Lz¤ÿ‹ 6¿¢%÷Â;YÝâ{8rÞhߢ6Èb ñƒ ©R¥œ«*7²AI"¢ø%D–<ò8ròÉëa÷#?r¹üz€PU¨‰/©Ó¸~j‚åþ1´’nRF™U´@ïœöË6h´(·fDt v!ž–gLa„â‡ÔƒhCƒ}Ï™OpZžYôº¤!YÚAÁ¾Ô¬?ÞŠ•ûJèÿøªŒ{‚ÿÚ]¨ýGó€Ýn¦‡ø‹4ò§c—Æ¢¸ÇÖÈÙqh¯uãìqR²qñªIF™•»6²(t y]äqÒýPÍ×ë™ðÊž"eÿâ]¥fðÊž"¼žù ÙˆS¬ù±VÐjù—Ií R3X¹¯k~¬MNjœ`Ý/‚z+cJJJJŠJͬS©£Ü¯¼u¿4&'y‘°þx+VÒÒëS©™oSø›Jͤ/|)y‚U÷°þxkrÒcˆÕ‡Šê,Ù*5óRŠØ¦R3/½²¯$[îD¯ì)ÂêÓ“e¬ý©AVgP«R3+Rh7•šY±p¬>Ó¤UI(tÇÇšñÚ÷å4Ä©Ô̺”P·}¢Ñg©´* z}=ó­Îf¦DjS©™oiôùõÌI}5í9RM«³W$u6 ’_Z8¹¬>¯9R$M‰Îî¿E«³/§D{S©™—©ôyÿ­¤>Ëè쪌{´:»-%Ö›JÍl£Ñç×¾/Oê³€ÎR¸cŸJÍdFÅ+$:SNŸÙ²çr×ç5GªiÓž+ŠÒž¼‚FŸWî+Y–ú¬¤¼VÚ¢×Ñèó«o/ }^¼•Vg}*5“ž²T¶…²çÐr.{Rê,âBgÃH«2iÒªDjK*(/–ŕΆ©Ï ß–\¬™Vg‡âZgÃÔç„kK†ÝÆK@¢©Êž«iã^Ÿ#ÖÆK@’_Z¸é%Ù–TX^\‘²\·¥Ö–TØÆÛ–’Ü”µ%«ì¹hm¼$:3ÞÚ’‹ÞÆKP}^ô¶äÚŸhÓžÚ„L{µ-©°—žd*–eÏ0ôyɵñÐmG­-Ió–@B•ã<­ŠX[2aÚxɲç2iã%¨>+zCAy1©³q¦Ï²iUTÞHn1Õç2 —Wo¹–=—]/‰¦iKf'uvéësö¢½%ÜbªÏµË­÷¬óâñÌ´L%tEXtdate:create2018-03-12T05:44:18+00:00;2?T%tEXtdate:modify2018-03-12T05:44:18+00:00Jo‡ètEXtSoftwareAdobe ImageReadyqÉe<IEND®B`‚forecast/man/CVar.Rd0000644000176200001440000000404214150370574013763 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tscv.R \name{CVar} \alias{CVar} \alias{print.CVar} \title{k-fold Cross-Validation applied to an autoregressive model} \usage{ CVar( y, k = 10, FUN = nnetar, cvtrace = FALSE, blocked = FALSE, LBlags = 24, ... ) } \arguments{ \item{y}{Univariate time series} \item{k}{Number of folds to use for cross-validation.} \item{FUN}{Function to fit an autoregressive model. Currently, it only works with the \code{\link{nnetar}} function.} \item{cvtrace}{Provide progress information.} \item{blocked}{choose folds randomly or as blocks?} \item{LBlags}{lags for the Ljung-Box test, defaults to 24, for yearly series can be set to 20} \item{...}{Other arguments are passed to \code{FUN}.} } \value{ A list containing information about the model and accuracy for each fold, plus other summary information computed across folds. } \description{ \code{CVar} computes the errors obtained by applying an autoregressive modelling function to subsets of the time series \code{y} using k-fold cross-validation as described in Bergmeir, Hyndman and Koo (2015). It also applies a Ljung-Box test to the residuals. If this test is significant (see returned pvalue), there is serial correlation in the residuals and the model can be considered to be underfitting the data. In this case, the cross-validated errors can underestimate the generalization error and should not be used. } \examples{ modelcv <- CVar(lynx, k=5, lambda=0.15) print(modelcv) print(modelcv$fold1) library(ggplot2) autoplot(lynx, series="Data") + autolayer(modelcv$testfit, series="Fits") + autolayer(modelcv$residuals, series="Residuals") ggAcf(modelcv$residuals) } \references{ Bergmeir, C., Hyndman, R.J., Koo, B. (2018) A note on the validity of cross-validation for evaluating time series prediction. \emph{Computational Statistics & Data Analysis}, \bold{120}, 70-83. \url{https://robjhyndman.com/publications/cv-time-series/}. } \seealso{ \link{CV}, \link{tsCV}. } \author{ Gabriel Caceres and Rob J Hyndman } \keyword{ts} forecast/man/bld.mbb.bootstrap.Rd0000644000176200001440000000234514150370574016450 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bootstrap.R \name{bld.mbb.bootstrap} \alias{bld.mbb.bootstrap} \title{Box-Cox and Loess-based decomposition bootstrap.} \usage{ bld.mbb.bootstrap(x, num, block_size = NULL) } \arguments{ \item{x}{Original time series.} \item{num}{Number of bootstrapped versions to generate.} \item{block_size}{Block size for the moving block bootstrap.} } \value{ A list with bootstrapped versions of the series. The first series in the list is the original series. } \description{ Generates bootstrapped versions of a time series using the Box-Cox and Loess-based decomposition bootstrap. } \details{ The procedure is described in Bergmeir et al. Box-Cox decomposition is applied, together with STL or Loess (for non-seasonal time series), and the remainder is bootstrapped using a moving block bootstrap. } \examples{ bootstrapped_series <- bld.mbb.bootstrap(WWWusage, 100) } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \seealso{ \code{\link{baggedETS}}. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/ndiffs.Rd0000644000176200001440000000523114150370574014402 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{ndiffs} \alias{ndiffs} \title{Number of differences required for a stationary series} \usage{ ndiffs( x, alpha = 0.05, test = c("kpss", "adf", "pp"), type = c("level", "trend"), max.d = 2, ... ) } \arguments{ \item{x}{A univariate time series} \item{alpha}{Level of the test, possible values range from 0.01 to 0.1.} \item{test}{Type of unit root test to use} \item{type}{Specification of the deterministic component in the regression} \item{max.d}{Maximum number of non-seasonal differences allowed} \item{...}{Additional arguments to be passed on to the unit root test} } \value{ An integer indicating the number of differences required for stationarity. } \description{ Functions to estimate the number of differences required to make a given time series stationary. \code{ndiffs} estimates the number of first differences necessary. } \details{ \code{ndiffs} uses a unit root test to determine the number of differences required for time series \code{x} to be made stationary. If \code{test="kpss"}, the KPSS test is used with the null hypothesis that \code{x} has a stationary root against a unit-root alternative. Then the test returns the least number of differences required to pass the test at the level \code{alpha}. If \code{test="adf"}, the Augmented Dickey-Fuller test is used and if \code{test="pp"} the Phillips-Perron test is used. In both of these cases, the null hypothesis is that \code{x} has a unit root against a stationary root alternative. Then the test returns the least number of differences required to fail the test at the level \code{alpha}. } \examples{ ndiffs(WWWusage) ndiffs(diff(log(AirPassengers), 12)) } \references{ Dickey DA and Fuller WA (1979), "Distribution of the Estimators for Autoregressive Time Series with a Unit Root", \emph{Journal of the American Statistical Association} \bold{74}:427-431. Kwiatkowski D, Phillips PCB, Schmidt P and Shin Y (1992) "Testing the Null Hypothesis of Stationarity against the Alternative of a Unit Root", \emph{Journal of Econometrics} \bold{54}:159-178. Osborn, D.R. (1990) "A survey of seasonality in UK macroeconomic variables", \emph{International Journal of Forecasting}, \bold{6}:327-336. Phillips, P.C.B. and Perron, P. (1988) "Testing for a unit root in time series regression", \emph{Biometrika}, \bold{72}(2), 335-346. Said E and Dickey DA (1984), "Testing for Unit Roots in Autoregressive Moving Average Models of Unknown Order", \emph{Biometrika} \bold{71}:599-607. } \seealso{ \code{\link{auto.arima}} and \code{\link{ndiffs}} } \author{ Rob J Hyndman, Slava Razbash & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/forecast.lm.Rd0000644000176200001440000000662314150370574015354 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/lm.R \name{forecast.lm} \alias{forecast.lm} \title{Forecast a linear model with possible time series components} \usage{ \method{forecast}{lm}( object, newdata, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ts = TRUE, ... ) } \arguments{ \item{object}{Object of class "lm", usually the result of a call to \code{\link[stats]{lm}} or \code{\link{tslm}}.} \item{newdata}{An optional data frame in which to look for variables with which to predict. If omitted, it is assumed that the only variables are trend and season, and \code{h} forecasts are produced.} \item{h}{Number of periods for forecasting. Ignored if \code{newdata} present.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{ts}{If \code{TRUE}, the forecasts will be treated as time series provided the original data is a time series; the \code{newdata} will be interpreted as related to the subsequent time periods. If \code{FALSE}, any time series attributes of the original data will be ignored.} \item{...}{Other arguments passed to \code{\link[stats]{predict.lm}()}.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.lm}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The historical data for the response variable.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values} } \description{ \code{forecast.lm} is used to predict linear models, especially those involving trend and seasonality components. } \details{ \code{forecast.lm} is largely a wrapper for \code{\link[stats]{predict.lm}()} except that it allows variables "trend" and "season" which are created on the fly from the time series characteristics of the data. Also, the output is reformatted into a \code{forecast} object. } \examples{ y <- ts(rnorm(120,0,3) + 1:120 + 20*sin(2*pi*(1:120)/12), frequency=12) fit <- tslm(y ~ trend + season) plot(forecast(fit, h=20)) } \seealso{ \code{\link{tslm}}, \code{\link[stats]{lm}}. } \author{ Rob J Hyndman } \keyword{stats} forecast/man/forecast.HoltWinters.Rd0000644000176200001440000000557414150370574017232 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{forecast.HoltWinters} \alias{forecast.HoltWinters} \title{Forecasting using Holt-Winters objects} \usage{ \method{forecast}{HoltWinters}( object, h = ifelse(frequency(object$x) > 1, 2 * frequency(object$x), 10), level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{HoltWinters}". Usually the result of a call to \code{\link[stats]{HoltWinters}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.HoltWinters}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate Holt-Winters time series models. } \details{ This function calls \code{\link[stats]{predict.HoltWinters}} and constructs an object of class "\code{forecast}" from the results. It is included for completeness, but the \code{\link{ets}} is recommended for use instead of \code{\link[stats]{HoltWinters}}. } \examples{ fit <- HoltWinters(WWWusage,gamma=FALSE) plot(forecast(fit)) } \seealso{ \code{\link[stats]{predict.HoltWinters}}, \code{\link[stats]{HoltWinters}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/nnetar.Rd0000644000176200001440000001116114456202551014415 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/nnetar.R \name{nnetar} \alias{nnetar} \alias{print.nnetar} \alias{print.nnetarmodels} \title{Neural Network Time Series Forecasts} \usage{ nnetar( y, p, P = 1, size, repeats = 20, xreg = NULL, lambda = NULL, model = NULL, subset = NULL, scale.inputs = TRUE, x = y, ... ) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{p}{Embedding dimension for non-seasonal time series. Number of non-seasonal lags used as inputs. For non-seasonal time series, the default is the optimal number of lags (according to the AIC) for a linear AR(p) model. For seasonal time series, the same method is used but applied to seasonally adjusted data (from an stl decomposition). If set to zero to indicate that no non-seasonal lags should be included, then P must be at least 1 and a model with only seasonal lags will be fit.} \item{P}{Number of seasonal lags used as inputs.} \item{size}{Number of nodes in the hidden layer. Default is half of the number of input nodes (including external regressors, if given) plus 1.} \item{repeats}{Number of networks to fit with different random starting weights. These are then averaged when producing forecasts.} \item{xreg}{Optionally, a vector or matrix of external regressors, which must have the same number of rows as \code{y}. Must be numeric.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{model}{Output from a previous call to \code{nnetar}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{subset}{Optional vector specifying a subset of observations to be used in the fit. Can be an integer index vector or a logical vector the same length as \code{y}. All observations are used by default.} \item{scale.inputs}{If TRUE, inputs are scaled by subtracting the column means and dividing by their respective standard deviations. If \code{lambda} is not \code{NULL}, scaling is applied after Box-Cox transformation.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{\link[nnet]{nnet}} for \code{nnetar}.} } \value{ Returns an object of class "\code{nnetar}". The function \code{summary} is used to obtain and print a summary of the results. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{nnetar}. \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{x}{The original time series.} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Feed-forward neural networks with a single hidden layer and lagged inputs for forecasting univariate time series. } \details{ A feed-forward neural network is fitted with lagged values of \code{y} as inputs and a single hidden layer with \code{size} nodes. The inputs are for lags 1 to \code{p}, and lags \code{m} to \code{mP} where \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also used as inputs. If there are missing values in \code{y} or \code{xreg}, the corresponding rows (and any others which depend on them as lags) are omitted from the fit. A total of \code{repeats} networks are fitted, each with random starting weights. These are then averaged when computing forecasts. The network is trained for one-step forecasting. Multi-step forecasts are computed recursively. For non-seasonal data, the fitted model is denoted as an NNAR(p,k) model, where k is the number of hidden nodes. This is analogous to an AR(p) model but with nonlinear functions. For seasonal data, the fitted model is called an NNAR(p,P,k)[m] model, which is analogous to an ARIMA(p,0,0)(P,0,0)[m] model but with nonlinear functions. } \examples{ fit <- nnetar(lynx) fcast <- forecast(fit) plot(fcast) ## Arguments can be passed to nnet() fit <- nnetar(lynx, decay=0.5, maxit=150) plot(forecast(fit)) lines(lynx) ## Fit model to first 100 years of lynx data fit <- nnetar(window(lynx,end=1920), decay=0.5, maxit=150) plot(forecast(fit,h=14)) lines(lynx) ## Apply fitted model to later data, including all optional arguments fit2 <- nnetar(window(lynx,start=1921), model=fit) } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/croston.Rd0000644000176200001440000000511114150370574014615 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{croston} \alias{croston} \title{Forecasts for intermittent demand using Croston's method} \usage{ croston(y, h = 10, alpha = 0.1, x = y) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting.} \item{alpha}{Value of alpha. Default value is 0.1.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model. The first element gives the model used for non-zero demands. The second element gives the model used for times between non-zero demands. Both elements are of class \code{forecast}.} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is y minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{croston} and associated functions. } \description{ Returns forecasts and other information for Croston's forecasts applied to y. } \details{ Based on Croston's (1972) method for intermittent demand forecasting, also described in Shenstone and Hyndman (2005). Croston's method involves using simple exponential smoothing (SES) on the non-zero elements of the time series and a separate application of SES to the times between non-zero elements of the time series. The smoothing parameters of the two applications of SES are assumed to be equal and are denoted by \code{alpha}. Note that prediction intervals are not computed as Croston's method has no underlying stochastic model. } \examples{ y <- rpois(20,lambda=.3) fcast <- croston(y) plot(fcast) } \references{ Croston, J. (1972) "Forecasting and stock control for intermittent demands", \emph{Operational Research Quarterly}, \bold{23}(3), 289-303. Shenstone, L., and Hyndman, R.J. (2005) "Stochastic models underlying Croston's method for intermittent demand forecasting". \emph{Journal of Forecasting}, \bold{24}, 389-402. } \seealso{ \code{\link{ses}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/wineind.Rd0000644000176200001440000000070214150370574014564 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{wineind} \alias{wineind} \title{Australian total wine sales} \format{ Time series data } \source{ Time Series Data Library. \url{https://pkg.yangzhuoranyang.com/tsdl/} } \usage{ wineind } \description{ Australian total wine sales by wine makers in bottles <= 1 litre. Jan 1980 -- Aug 1994. } \examples{ tsdisplay(wineind) } \keyword{datasets} forecast/man/seasonal.Rd0000644000176200001440000000205414150370574014736 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/components.R \name{seasonal} \alias{seasonal} \alias{trendcycle} \alias{remainder} \title{Extract components from a time series decomposition} \usage{ seasonal(object) trendcycle(object) remainder(object) } \arguments{ \item{object}{Object created by \code{\link[stats]{decompose}}, \code{\link[stats]{stl}} or \code{\link{tbats}}.} } \value{ Univariate time series. } \description{ Returns a univariate time series equal to either a seasonal component, trend-cycle component or remainder component from a time series decomposition. } \examples{ plot(USAccDeaths) fit <- stl(USAccDeaths, s.window="periodic") lines(trendcycle(fit),col="red") library(ggplot2) autoplot(cbind( Data=USAccDeaths, Seasonal=seasonal(fit), Trend=trendcycle(fit), Remainder=remainder(fit)), facets=TRUE) + ylab("") + xlab("Year") } \seealso{ \code{\link[stats]{stl}}, \code{\link[stats]{decompose}}, \code{\link{tbats}}, \code{\link{seasadj}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/ggmonthplot.Rd0000644000176200001440000000222614150370574015474 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{ggmonthplot} \alias{ggmonthplot} \alias{ggsubseriesplot} \title{Create a seasonal subseries ggplot} \usage{ ggmonthplot(x, labels = NULL, times = time(x), phase = cycle(x), ...) ggsubseriesplot(x, labels = NULL, times = time(x), phase = cycle(x), ...) } \arguments{ \item{x}{a time series object (type \code{ts}).} \item{labels}{A vector of labels to use for each 'season'} \item{times}{A vector of times for each observation} \item{phase}{A vector of seasonal components} \item{\dots}{Not used (for consistency with monthplot)} } \value{ Returns an object of class \code{ggplot}. } \description{ Plots a subseries plot using ggplot. Each season is plotted as a separate mini time series. The blue lines represent the mean of the observations within each season. } \details{ The \code{ggmonthplot} function is simply a wrapper for \code{ggsubseriesplot} as a convenience for users familiar with \code{\link[stats]{monthplot}}. } \examples{ ggsubseriesplot(AirPassengers) ggsubseriesplot(woolyrnq) } \seealso{ \code{\link[stats]{monthplot}} } \author{ Mitchell O'Hara-Wild } forecast/man/BoxCox.Rd0000644000176200001440000000364414341272370014336 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast2.R \name{BoxCox} \alias{BoxCox} \alias{InvBoxCox} \title{Box Cox Transformation} \usage{ BoxCox(x, lambda) InvBoxCox(x, lambda, biasadj = FALSE, fvar = NULL) } \arguments{ \item{x}{a numeric vector or time series of class \code{ts}.} \item{lambda}{transformation parameter. If \code{lambda = "auto"}, then the transformation parameter lambda is chosen using BoxCox.lambda (with a lower bound of -0.9)} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{fvar}{Optional parameter required if biasadj=TRUE. Can either be the forecast variance, or a list containing the interval \code{level}, and the corresponding \code{upper} and \code{lower} intervals.} } \value{ a numeric vector of the same length as x. } \description{ BoxCox() returns a transformation of the input variable using a Box-Cox transformation. InvBoxCox() reverses the transformation. } \details{ The Box-Cox transformation (as given by Bickel & Doksum 1981) is given by \deqn{f_\lambda(x) =(sign(x)|x|^\lambda - 1)/\lambda}{f(x;lambda)=(sign(x)|x|^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda is not equal to 0}. For \eqn{\lambda=0}{lambda=0}, \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}. } \examples{ lambda <- BoxCox.lambda(lynx) lynx.fit <- ar(BoxCox(lynx,lambda)) plot(forecast(lynx.fit,h=20,lambda=lambda)) } \references{ Box, G. E. P. and Cox, D. R. (1964) An analysis of transformations. \emph{JRSS B} \bold{26} 211--246. Bickel, P. J. and Doksum K. A. (1981) An Analysis of Transformations Revisited. \emph{JASA} \bold{76} 296-311. } \seealso{ \code{\link{BoxCox.lambda}} } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/is.constant.Rd0000644000176200001440000000046014150370574015373 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/newarima2.R \name{is.constant} \alias{is.constant} \title{Is an object constant?} \usage{ is.constant(x) } \arguments{ \item{x}{object to be tested} } \description{ Returns true if the object's numerical values do not vary. } forecast/man/forecast-package.Rd0000644000176200001440000000366714633662406016347 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/forecast-package.R \docType{package} \name{forecast-package} \alias{forecast-package} \title{forecast: Forecasting Functions for Time Series and Linear Models} \description{ \if{html}{\figure{logo.png}{options: style='float: right' alt='logo' width='120'}} Methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. } \seealso{ Useful links: \itemize{ \item \url{https://pkg.robjhyndman.com/forecast/} \item \url{https://github.com/robjhyndman/forecast} \item Report bugs at \url{https://github.com/robjhyndman/forecast/issues} } } \author{ \strong{Maintainer}: Rob Hyndman \email{Rob.Hyndman@monash.edu} (\href{https://orcid.org/0000-0002-2140-5352}{ORCID}) [copyright holder] Authors: \itemize{ \item George Athanasopoulos (\href{https://orcid.org/0000-0002-5389-2802}{ORCID}) \item Christoph Bergmeir (\href{https://orcid.org/0000-0002-3665-9021}{ORCID}) \item Gabriel Caceres (\href{https://orcid.org/0000-0002-2947-2023}{ORCID}) \item Leanne Chhay \item Kirill Kuroptev \item Mitchell O'Hara-Wild (\href{https://orcid.org/0000-0001-6729-7695}{ORCID}) \item Fotios Petropoulos (\href{https://orcid.org/0000-0003-3039-4955}{ORCID}) \item Slava Razbash \item Earo Wang (\href{https://orcid.org/0000-0001-6448-5260}{ORCID}) \item Farah Yasmeen (\href{https://orcid.org/0000-0002-1479-5401}{ORCID}) } Other contributors: \itemize{ \item Federico Garza [contributor] \item Daniele Girolimetto [contributor] \item Ross Ihaka [contributor, copyright holder] \item R Core Team [contributor, copyright holder] \item Daniel Reid [contributor] \item David Shaub [contributor] \item Yuan Tang (\href{https://orcid.org/0000-0001-5243-233X}{ORCID}) [contributor] \item Xiaoqian Wang [contributor] \item Zhenyu Zhou [contributor] } } \keyword{package} forecast/man/fitted.Arima.Rd0000644000176200001440000000325514150370574015444 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R, R/arima.R, R/bats.R, R/ets.R, % R/modelAR.R, R/nnetar.R, R/tbats.R \name{fitted.ARFIMA} \alias{fitted.ARFIMA} \alias{fitted.Arima} \alias{fitted.forecast_ARIMA} \alias{fitted.ar} \alias{fitted.bats} \alias{fitted.ets} \alias{fitted.modelAR} \alias{fitted.nnetar} \alias{fitted.tbats} \title{h-step in-sample forecasts for time series models.} \usage{ \method{fitted}{ARFIMA}(object, h = 1, ...) \method{fitted}{Arima}(object, h = 1, ...) \method{fitted}{ar}(object, ...) \method{fitted}{bats}(object, h = 1, ...) \method{fitted}{ets}(object, h = 1, ...) \method{fitted}{modelAR}(object, h = 1, ...) \method{fitted}{nnetar}(object, h = 1, ...) \method{fitted}{tbats}(object, h = 1, ...) } \arguments{ \item{object}{An object of class "\code{Arima}", "\code{bats}", "\code{tbats}", "\code{ets}" or "\code{nnetar}".} \item{h}{The number of steps to forecast ahead.} \item{...}{Other arguments.} } \value{ A time series of the h-step forecasts. } \description{ Returns h-step forecasts for the data used in fitting the model. } \examples{ fit <- ets(WWWusage) plot(WWWusage) lines(fitted(fit), col='red') lines(fitted(fit, h=2), col='green') lines(fitted(fit, h=3), col='blue') legend("topleft", legend=paste("h =",1:3), col=2:4, lty=1) } \seealso{ \code{\link{forecast.Arima}}, \code{\link{forecast.bats}}, \code{\link{forecast.tbats}}, \code{\link{forecast.ets}}, \code{\link{forecast.nnetar}}, \code{\link{residuals.Arima}}, \code{\link{residuals.bats}}, \code{\link{residuals.tbats}}, \code{\link{residuals.ets}}, \code{\link{residuals.nnetar}}. } \author{ Rob J Hyndman & Mitchell O'Hara-Wild } \keyword{ts} forecast/man/seasonaldummy.Rd0000644000176200001440000000313714150370574016015 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/season.R \name{seasonaldummy} \alias{seasonaldummy} \alias{seasonaldummyf} \title{Seasonal dummy variables} \usage{ seasonaldummy(x, h = NULL) seasonaldummyf(x, h) } \arguments{ \item{x}{Seasonal time series: a \code{ts} or a \code{msts} object} \item{h}{Number of periods ahead to forecast (optional)} } \value{ Numerical matrix. } \description{ \code{seasonaldummy} returns a matrix of dummy variables suitable for use in \code{\link{Arima}}, \code{\link{auto.arima}} or \code{\link{tslm}}. The last season is omitted and used as the control. } \details{ \code{seasonaldummyf} is deprecated, instead use the \code{h} argument in \code{seasonaldummy}. The number of dummy variables is determined from the time series characteristics of \code{x}. When \code{h} is missing, the length of \code{x} also determines the number of rows for the matrix returned by \code{seasonaldummy}. the value of \code{h} determines the number of rows for the matrix returned by \code{seasonaldummy}, typically used for forecasting. The values within \code{x} are not used. } \examples{ plot(ldeaths) # Using seasonal dummy variables month <- seasonaldummy(ldeaths) deaths.lm <- tslm(ldeaths ~ month) tsdisplay(residuals(deaths.lm)) ldeaths.fcast <- forecast(deaths.lm, data.frame(month=I(seasonaldummy(ldeaths,36)))) plot(ldeaths.fcast) # A simpler approach to seasonal dummy variables deaths.lm <- tslm(ldeaths ~ season) ldeaths.fcast <- forecast(deaths.lm, h=36) plot(ldeaths.fcast) } \seealso{ \code{\link{fourier}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/gglagplot.Rd0000644000176200001440000000370514150370574015115 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{gglagplot} \alias{gglagplot} \alias{gglagchull} \title{Time series lag ggplots} \usage{ gglagplot( x, lags = ifelse(frequency(x) > 9, 16, 9), set.lags = 1:lags, diag = TRUE, diag.col = "gray", do.lines = TRUE, colour = TRUE, continuous = frequency(x) > 12, labels = FALSE, seasonal = TRUE, ... ) gglagchull( x, lags = ifelse(frequency(x) > 1, min(12, frequency(x)), 4), set.lags = 1:lags, diag = TRUE, diag.col = "gray", ... ) } \arguments{ \item{x}{a time series object (type \code{ts}).} \item{lags}{number of lag plots desired, see arg set.lags.} \item{set.lags}{vector of positive integers specifying which lags to use.} \item{diag}{logical indicating if the x=y diagonal should be drawn.} \item{diag.col}{color to be used for the diagonal if(diag).} \item{do.lines}{if TRUE, lines will be drawn, otherwise points will be drawn.} \item{colour}{logical indicating if lines should be coloured.} \item{continuous}{Should the colour scheme for years be continuous or discrete?} \item{labels}{logical indicating if labels should be used.} \item{seasonal}{Should the line colour be based on seasonal characteristics (TRUE), or sequential (FALSE).} \item{\dots}{Not used (for consistency with lag.plot)} } \value{ None. } \description{ Plots a lag plot using ggplot. } \details{ \dQuote{gglagplot} will plot time series against lagged versions of themselves. Helps visualising 'auto-dependence' even when auto-correlations vanish. \dQuote{gglagchull} will layer convex hulls of the lags, layered on a single plot. This helps visualise the change in 'auto-dependence' as lags increase. } \examples{ gglagplot(woolyrnq) gglagplot(woolyrnq,seasonal=FALSE) lungDeaths <- cbind(mdeaths, fdeaths) gglagplot(lungDeaths, lags=2) gglagchull(lungDeaths, lags=6) gglagchull(woolyrnq) } \seealso{ \code{\link[stats]{lag.plot}} } \author{ Mitchell O'Hara-Wild } forecast/man/auto.arima.Rd0000644000176200001440000001462214150370574015175 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/newarima2.R \name{auto.arima} \alias{auto.arima} \title{Fit best ARIMA model to univariate time series} \usage{ auto.arima( y, d = NA, D = NA, max.p = 5, max.q = 5, max.P = 2, max.Q = 2, max.order = 5, max.d = 2, max.D = 1, start.p = 2, start.q = 2, start.P = 1, start.Q = 1, stationary = FALSE, seasonal = TRUE, ic = c("aicc", "aic", "bic"), stepwise = TRUE, nmodels = 94, trace = FALSE, approximation = (length(x) > 150 | frequency(x) > 12), method = NULL, truncate = NULL, xreg = NULL, test = c("kpss", "adf", "pp"), test.args = list(), seasonal.test = c("seas", "ocsb", "hegy", "ch"), seasonal.test.args = list(), allowdrift = TRUE, allowmean = TRUE, lambda = NULL, biasadj = FALSE, parallel = FALSE, num.cores = 2, x = y, ... ) } \arguments{ \item{y}{a univariate time series} \item{d}{Order of first-differencing. If missing, will choose a value based on \code{test}.} \item{D}{Order of seasonal-differencing. If missing, will choose a value based on \code{season.test}.} \item{max.p}{Maximum value of p} \item{max.q}{Maximum value of q} \item{max.P}{Maximum value of P} \item{max.Q}{Maximum value of Q} \item{max.order}{Maximum value of p+q+P+Q if model selection is not stepwise.} \item{max.d}{Maximum number of non-seasonal differences} \item{max.D}{Maximum number of seasonal differences} \item{start.p}{Starting value of p in stepwise procedure.} \item{start.q}{Starting value of q in stepwise procedure.} \item{start.P}{Starting value of P in stepwise procedure.} \item{start.Q}{Starting value of Q in stepwise procedure.} \item{stationary}{If \code{TRUE}, restricts search to stationary models.} \item{seasonal}{If \code{FALSE}, restricts search to non-seasonal models.} \item{ic}{Information criterion to be used in model selection.} \item{stepwise}{If \code{TRUE}, will do stepwise selection (faster). Otherwise, it searches over all models. Non-stepwise selection can be very slow, especially for seasonal models.} \item{nmodels}{Maximum number of models considered in the stepwise search.} \item{trace}{If \code{TRUE}, the list of ARIMA models considered will be reported.} \item{approximation}{If \code{TRUE}, estimation is via conditional sums of squares and the information criteria used for model selection are approximated. The final model is still computed using maximum likelihood estimation. Approximation should be used for long time series or a high seasonal period to avoid excessive computation times.} \item{method}{fitting method: maximum likelihood or minimize conditional sum-of-squares. The default (unless there are missing values) is to use conditional-sum-of-squares to find starting values, then maximum likelihood. Can be abbreviated.} \item{truncate}{An integer value indicating how many observations to use in model selection. The last \code{truncate} values of the series are used to select a model when \code{truncate} is not \code{NULL} and \code{approximation=TRUE}. All observations are used if either \code{truncate=NULL} or \code{approximation=FALSE}.} \item{xreg}{Optionally, a numerical vector or matrix of external regressors, which must have the same number of rows as \code{y}. (It should not be a data frame.)} \item{test}{Type of unit root test to use. See \code{\link{ndiffs}} for details.} \item{test.args}{Additional arguments to be passed to the unit root test.} \item{seasonal.test}{This determines which method is used to select the number of seasonal differences. The default method is to use a measure of seasonal strength computed from an STL decomposition. Other possibilities involve seasonal unit root tests.} \item{seasonal.test.args}{Additional arguments to be passed to the seasonal unit root test. See \code{\link{nsdiffs}} for details.} \item{allowdrift}{If \code{TRUE}, models with drift terms are considered.} \item{allowmean}{If \code{TRUE}, models with a non-zero mean are considered.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{parallel}{If \code{TRUE} and \code{stepwise = FALSE}, then the specification search is done in parallel. This can give a significant speedup on multicore machines.} \item{num.cores}{Allows the user to specify the amount of parallel processes to be used if \code{parallel = TRUE} and \code{stepwise = FALSE}. If \code{NULL}, then the number of logical cores is automatically detected and all available cores are used.} \item{x}{Deprecated. Included for backwards compatibility.} \item{...}{Additional arguments to be passed to \code{\link[stats]{arima}}.} } \value{ Same as for \code{\link{Arima}} } \description{ Returns best ARIMA model according to either AIC, AICc or BIC value. The function conducts a search over possible model within the order constraints provided. } \details{ The default arguments are designed for rapid estimation of models for many time series. If you are analysing just one time series, and can afford to take some more time, it is recommended that you set \code{stepwise=FALSE} and \code{approximation=FALSE}. Non-stepwise selection can be slow, especially for seasonal data. The stepwise algorithm outlined in Hyndman & Khandakar (2008) is used except that the default method for selecting seasonal differences is now based on an estimate of seasonal strength (Wang, Smith & Hyndman, 2006) rather than the Canova-Hansen test. There are also some other minor variations to the algorithm described in Hyndman and Khandakar (2008). } \examples{ fit <- auto.arima(WWWusage) plot(forecast(fit,h=20)) } \references{ Hyndman, RJ and Khandakar, Y (2008) "Automatic time series forecasting: The forecast package for R", \emph{Journal of Statistical Software}, \bold{26}(3). Wang, X, Smith, KA, Hyndman, RJ (2006) "Characteristic-based clustering for time series data", \emph{Data Mining and Knowledge Discovery}, \bold{13}(3), 335-364. } \seealso{ \code{\link{Arima}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/arfima.Rd0000644000176200001440000000636214150370574014376 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/arfima.R \name{arfima} \alias{arfima} \title{Fit a fractionally differenced ARFIMA model} \usage{ arfima( y, drange = c(0, 0.5), estim = c("mle", "ls"), model = NULL, lambda = NULL, biasadj = FALSE, x = y, ... ) } \arguments{ \item{y}{a univariate time series (numeric vector).} \item{drange}{Allowable values of d to be considered. Default of \code{c(0,0.5)} ensures a stationary model is returned.} \item{estim}{If \code{estim=="ls"}, then the ARMA parameters are calculated using the Haslett-Raftery algorithm. If \code{estim=="mle"}, then the ARMA parameters are calculated using full MLE via the \code{\link[stats]{arima}} function.} \item{model}{Output from a previous call to \code{arfima}. If model is passed, this same model is fitted to y without re-estimating any parameters.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{\link{auto.arima}} when selecting p and q.} } \value{ A list object of S3 class \code{"fracdiff"}, which is described in the \code{\link[fracdiff]{fracdiff}} documentation. A few additional objects are added to the list including \code{x} (the original time series), and the \code{residuals} and \code{fitted} values. } \description{ An ARFIMA(p,d,q) model is selected and estimated automatically using the Hyndman-Khandakar (2008) algorithm to select p and q and the Haslett and Raftery (1989) algorithm to estimate the parameters including d. } \details{ This function combines \code{\link[fracdiff]{fracdiff}} and \code{\link{auto.arima}} to automatically select and estimate an ARFIMA model. The fractional differencing parameter is chosen first assuming an ARFIMA(2,d,0) model. Then the data are fractionally differenced using the estimated d and an ARMA model is selected for the resulting time series using \code{\link{auto.arima}}. Finally, the full ARFIMA(p,d,q) model is re-estimated using \code{\link[fracdiff]{fracdiff}}. If \code{estim=="mle"}, the ARMA coefficients are refined using \code{\link[stats]{arima}}. } \examples{ library(fracdiff) x <- fracdiff.sim( 100, ma=-.4, d=.3)$series fit <- arfima(x) tsdisplay(residuals(fit)) } \references{ J. Haslett and A. E. Raftery (1989) Space-time Modelling with Long-memory Dependence: Assessing Ireland's Wind Power Resource (with discussion); \emph{Applied Statistics} \bold{38}, 1-50. Hyndman, R.J. and Khandakar, Y. (2008) "Automatic time series forecasting: The forecast package for R", \emph{Journal of Statistical Software}, \bold{26}(3). } \seealso{ \code{\link[fracdiff]{fracdiff}}, \code{\link{auto.arima}}, \code{\link{forecast.fracdiff}}. } \author{ Rob J Hyndman and Farah Yasmeen } \keyword{ts} forecast/man/thetaf.Rd0000644000176200001440000000546414150370574014414 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/theta.R \name{thetaf} \alias{thetaf} \title{Theta method forecast} \usage{ thetaf( y, h = ifelse(frequency(y) > 1, 2 * frequency(y), 10), level = c(80, 95), fan = FALSE, x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence levels for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{rwf}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and prediction intervals for a theta method forecast. } \details{ The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to simple exponential smoothing with drift. This is demonstrated in Hyndman and Billah (2003). The series is tested for seasonality using the test outlined in A&N. If deemed seasonal, the series is seasonally adjusted using a classical multiplicative decomposition before applying the theta method. The resulting forecasts are then reseasonalized. Prediction intervals are computed using the underlying state space model. More general theta methods are available in the \code{\link[forecTheta]{forecTheta}} package. } \examples{ nile.fcast <- thetaf(Nile) plot(nile.fcast) } \references{ Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: a decomposition approach to forecasting. \emph{International Journal of Forecasting} \bold{16}, 521-530. Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. \emph{International J. Forecasting}, \bold{19}, 287-290. } \seealso{ \code{\link[stats]{arima}}, \code{\link{meanf}}, \code{\link{rwf}}, \code{\link{ses}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/autolayer.Rd0000644000176200001440000000137014633664201015135 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ggplot.R \name{autolayer} \alias{autolayer} \title{Create a ggplot layer appropriate to a particular data type} \usage{ autolayer(object, ...) } \arguments{ \item{object}{an object, whose class will determine the behaviour of autolayer} \item{...}{other arguments passed to specific methods} } \value{ a ggplot layer } \description{ \code{autolayer()} uses ggplot2 to draw a particular layer for an object of a particular class in a single command. This defines the S3 generic that other classes and packages can extend. } \seealso{ Other plotting automation topics: \code{\link[ggplot2]{automatic_plotting}}, \code{\link[ggplot2]{autoplot}()}, \code{\link[ggplot2]{fortify}()} } forecast/man/forecast.mlm.Rd0000644000176200001440000000710014150370574015520 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mforecast.R \name{forecast.mlm} \alias{forecast.mlm} \title{Forecast a multiple linear model with possible time series components} \usage{ \method{forecast}{mlm}( object, newdata, h = 10, level = c(80, 95), fan = FALSE, lambda = object$lambda, biasadj = NULL, ts = TRUE, ... ) } \arguments{ \item{object}{Object of class "mlm", usually the result of a call to \code{\link[stats]{lm}} or \code{\link{tslm}}.} \item{newdata}{An optional data frame in which to look for variables with which to predict. If omitted, it is assumed that the only variables are trend and season, and \code{h} forecasts are produced.} \item{h}{Number of periods for forecasting. Ignored if \code{newdata} present.} \item{level}{Confidence level for prediction intervals.} \item{fan}{If \code{TRUE}, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{ts}{If \code{TRUE}, the forecasts will be treated as time series provided the original data is a time series; the \code{newdata} will be interpreted as related to the subsequent time periods. If \code{FALSE}, any time series attributes of the original data will be ignored.} \item{...}{Other arguments passed to \code{\link[forecast]{forecast.lm}()}.} } \value{ An object of class "\code{mforecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.lm}. An object of class \code{"mforecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a multivariate time series} \item{lower}{Lower limits for prediction intervals of each series} \item{upper}{Upper limits for prediction intervals of each series} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The historical data for the response variable.} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values} } \description{ \code{forecast.mlm} is used to predict multiple linear models, especially those involving trend and seasonality components. } \details{ \code{forecast.mlm} is largely a wrapper for \code{\link[forecast]{forecast.lm}()} except that it allows forecasts to be generated on multiple series. Also, the output is reformatted into a \code{mforecast} object. } \examples{ lungDeaths <- cbind(mdeaths, fdeaths) fit <- tslm(lungDeaths ~ trend + season) fcast <- forecast(fit, h=10) carPower <- as.matrix(mtcars[,c("qsec","hp")]) carmpg <- mtcars[,"mpg"] fit <- lm(carPower ~ carmpg) fcast <- forecast(fit, newdata=data.frame(carmpg=30)) } \seealso{ \code{\link{tslm}}, \code{\link{forecast.lm}}, \code{\link[stats]{lm}}. } \author{ Mitchell O'Hara-Wild } forecast/man/splinef.Rd0000644000176200001440000000672414150370574014601 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/spline.R \name{splinef} \alias{splinef} \title{Cubic Spline Forecast} \usage{ splinef( y, h = 10, level = c(80, 95), fan = FALSE, lambda = NULL, biasadj = FALSE, method = c("gcv", "mle"), x = y ) } \arguments{ \item{y}{a numeric vector or time series of class \code{ts}} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{method}{Method for selecting the smoothing parameter. If \code{method="gcv"}, the generalized cross-validation method from \code{\link[stats]{smooth.spline}} is used. If \code{method="mle"}, the maximum likelihood method from Hyndman et al (2002) is used.} \item{x}{Deprecated. Included for backwards compatibility.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{splinef}. An object of class \code{"forecast"} containing the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{onestepf}{One-step forecasts from the fitted model.} \item{fitted}{Smooth estimates of the fitted trend using all data.} \item{residuals}{Residuals from the fitted model. That is x minus one-step forecasts.} } \description{ Returns local linear forecasts and prediction intervals using cubic smoothing splines. } \details{ The cubic smoothing spline model is equivalent to an ARIMA(0,2,2) model but with a restricted parameter space. The advantage of the spline model over the full ARIMA model is that it provides a smooth historical trend as well as a linear forecast function. Hyndman, King, Pitrun, and Billah (2002) show that the forecast performance of the method is hardly affected by the restricted parameter space. } \examples{ fcast <- splinef(uspop,h=5) plot(fcast) summary(fcast) } \references{ Hyndman, King, Pitrun and Billah (2005) Local linear forecasts using cubic smoothing splines. \emph{Australian and New Zealand Journal of Statistics}, \bold{47}(1), 87-99. \url{https://robjhyndman.com/publications/splinefcast/}. } \seealso{ \code{\link[stats]{smooth.spline}}, \code{\link[stats]{arima}}, \code{\link{holt}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/easter.Rd0000644000176200001440000000136614150370574014421 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calendar.R \name{easter} \alias{easter} \title{Easter holidays in each season} \usage{ easter(x, easter.mon = FALSE) } \arguments{ \item{x}{Monthly or quarterly time series} \item{easter.mon}{If TRUE, the length of Easter holidays includes Easter Monday.} } \value{ Time series } \description{ Returns a vector of 0's and 1's or fractional results if Easter spans March and April in the observed time period. Easter is defined as the days from Good Friday to Easter Sunday inclusively, plus optionally Easter Monday if \code{easter.mon=TRUE}. } \details{ Useful for adjusting calendar effects. } \examples{ easter(wineind, easter.mon = TRUE) } \author{ Earo Wang } \keyword{ts} forecast/man/tsCV.Rd0000644000176200001440000000515614150370574014016 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/tscv.R \name{tsCV} \alias{tsCV} \title{Time series cross-validation} \usage{ tsCV(y, forecastfunction, h = 1, window = NULL, xreg = NULL, initial = 0, ...) } \arguments{ \item{y}{Univariate time series} \item{forecastfunction}{Function to return an object of class \code{forecast}. Its first argument must be a univariate time series, and it must have an argument \code{h} for the forecast horizon. If exogenous predictors are used, then it must also have \code{xreg} and \code{newxreg} arguments corresponding to the training and test periods.} \item{h}{Forecast horizon} \item{window}{Length of the rolling window, if NULL, a rolling window will not be used.} \item{xreg}{Exogeneous predictor variables passed to the forecast function if required.} \item{initial}{Initial period of the time series where no cross-validation is performed.} \item{...}{Other arguments are passed to \code{forecastfunction}.} } \value{ Numerical time series object containing the forecast errors as a vector (if h=1) and a matrix otherwise. The time index corresponds to the last period of the training data. The columns correspond to the forecast horizons. } \description{ \code{tsCV} computes the forecast errors obtained by applying \code{forecastfunction} to subsets of the time series \code{y} using a rolling forecast origin. } \details{ Let \code{y} contain the time series \eqn{y_1,\dots,y_T}{y[1:T]}. Then \code{forecastfunction} is applied successively to the time series \eqn{y_1,\dots,y_t}{y[1:t]}, for \eqn{t=1,\dots,T-h}, making predictions \eqn{\hat{y}_{t+h|t}}{f[t+h]}. The errors are given by \eqn{e_{t+h} = y_{t+h}-\hat{y}_{t+h|t}}{e[t+h] = y[t+h]-f[t+h]}. If h=1, these are returned as a vector, \eqn{e_1,\dots,e_T}{e[1:T]}. For h>1, they are returned as a matrix with the hth column containing errors for forecast horizon h. The first few errors may be missing as it may not be possible to apply \code{forecastfunction} to very short time series. } \examples{ #Fit an AR(2) model to each rolling origin subset far2 <- function(x, h){forecast(Arima(x, order=c(2,0,0)), h=h)} e <- tsCV(lynx, far2, h=1) #Fit the same model with a rolling window of length 30 e <- tsCV(lynx, far2, h=1, window=30) #Example with exogenous predictors far2_xreg <- function(x, h, xreg, newxreg) { forecast(Arima(x, order=c(2,0,0), xreg=xreg), xreg=newxreg) } y <- ts(rnorm(50)) xreg <- matrix(rnorm(100),ncol=2) e <- tsCV(y, far2_xreg, h=3, xreg=xreg) } \seealso{ \link{CV}, \link{CVar}, \link{residuals.Arima}, \url{https://robjhyndman.com/hyndsight/tscv/}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/bizdays.Rd0000644000176200001440000000150114272665773014607 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/calendar.R \name{bizdays} \alias{bizdays} \title{Number of trading days in each season} \usage{ bizdays(x, FinCenter = c("New York", "London", "NERC", "Toronto", "Zurich")) } \arguments{ \item{x}{Monthly or quarterly time series} \item{FinCenter}{Major financial center.} } \value{ Time series } \description{ Returns number of trading days in each month or quarter of the observed time period in a major financial center. } \details{ Useful for trading days length adjustments. More on how to define "business days", please refer to \code{\link[timeDate]{isBizday}}. } \examples{ x <- ts(rnorm(30), start = c(2013, 2), frequency = 12) bizdays(x, FinCenter = "New York") } \seealso{ \code{\link[forecast]{monthdays}} } \author{ Earo Wang } \keyword{ts} forecast/man/modelAR.Rd0000644000176200001440000000733214150370574014460 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/modelAR.R \name{modelAR} \alias{modelAR} \alias{print.modelAR} \title{Time Series Forecasts with a user-defined model} \usage{ modelAR( y, p, P = 1, FUN, predict.FUN, xreg = NULL, lambda = NULL, model = NULL, subset = NULL, scale.inputs = FALSE, x = y, ... ) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{p}{Embedding dimension for non-seasonal time series. Number of non-seasonal lags used as inputs. For non-seasonal time series, the default is the optimal number of lags (according to the AIC) for a linear AR(p) model. For seasonal time series, the same method is used but applied to seasonally adjusted data (from an stl decomposition).} \item{P}{Number of seasonal lags used as inputs.} \item{FUN}{Function used for model fitting. Must accept argument \code{x} and \code{y} for the predictors and response, respectively (\code{formula} object not currently supported).} \item{predict.FUN}{Prediction function used to apply \code{FUN} to new data. Must accept an object of class \code{FUN} as its first argument, and a data frame or matrix of new data for its second argument. Additionally, it should return fitted values when new data is omitted.} \item{xreg}{Optionally, a vector or matrix of external regressors, which must have the same number of rows as \code{y}. Must be numeric.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{model}{Output from a previous call to \code{nnetar}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{subset}{Optional vector specifying a subset of observations to be used in the fit. Can be an integer index vector or a logical vector the same length as \code{y}. All observations are used by default.} \item{scale.inputs}{If TRUE, inputs are scaled by subtracting the column means and dividing by their respective standard deviations. If \code{lambda} is not \code{NULL}, scaling is applied after Box-Cox transformation.} \item{x}{Deprecated. Included for backwards compatibility.} \item{\dots}{Other arguments passed to \code{FUN} for \code{modelAR}.} } \value{ Returns an object of class "\code{modelAR}". The function \code{summary} is used to obtain and print a summary of the results. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{nnetar}. \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{x}{The original time series.} \item{xreg}{The external regressors used in fitting (if given).} \item{residuals}{Residuals from the fitted model. That is x minus fitted values.} \item{fitted}{Fitted values (one-step forecasts)} \item{...}{Other arguments} } \description{ Experimental function to forecast univariate time series with a user-defined model } \details{ This is an experimental function and only recommended for advanced users. The selected model is fitted with lagged values of \code{y} as inputs. The inputs are for lags 1 to \code{p}, and lags \code{m} to \code{mP} where \code{m=frequency(y)}. If \code{xreg} is provided, its columns are also used as inputs. If there are missing values in \code{y} or \code{xreg}, the corresponding rows (and any others which depend on them as lags) are omitted from the fit. The model is trained for one-step forecasting. Multi-step forecasts are computed recursively. } \author{ Rob J Hyndman and Gabriel Caceres } \keyword{ts} forecast/man/accuracy.default.Rd0000644000176200001440000000642614207263356016357 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/errors.R \name{accuracy.default} \alias{accuracy.default} \title{Accuracy measures for a forecast model} \usage{ \method{accuracy}{default}(object, x, test = NULL, d = NULL, D = NULL, f = NULL, ...) } \arguments{ \item{object}{An object of class \dQuote{\code{forecast}}, or a numerical vector containing forecasts. It will also work with \code{Arima}, \code{ets} and \code{lm} objects if \code{x} is omitted -- in which case training set accuracy measures are returned.} \item{x}{An optional numerical vector containing actual values of the same length as object, or a time series overlapping with the times of \code{f}.} \item{test}{Indicator of which elements of \code{x} and \code{f} to test. If \code{test} is \code{NULL}, all elements are used. Otherwise test is a numeric vector containing the indices of the elements to use in the test.} \item{d}{An integer indicating the number of lag-1 differences to be used for the denominator in MASE calculation. Default value is 1 for non-seasonal series and 0 for seasonal series.} \item{D}{An integer indicating the number of seasonal differences to be used for the denominator in MASE calculation. Default value is 0 for non-seasonal series and 1 for seasonal series.} \item{f}{Deprecated. Please use `object` instead.} \item{...}{Additional arguments depending on the specific method.} } \value{ Matrix giving forecast accuracy measures. } \description{ Returns range of summary measures of the forecast accuracy. If \code{x} is provided, the function measures test set forecast accuracy based on \code{x-f}. If \code{x} is not provided, the function only produces training set accuracy measures of the forecasts based on \code{f["x"]-fitted(f)}. All measures are defined and discussed in Hyndman and Koehler (2006). } \details{ The measures calculated are: \itemize{ \item ME: Mean Error \item RMSE: Root Mean Squared Error \item MAE: Mean Absolute Error \item MPE: Mean Percentage Error \item MAPE: Mean Absolute Percentage Error \item MASE: Mean Absolute Scaled Error \item ACF1: Autocorrelation of errors at lag 1. } By default, the MASE calculation is scaled using MAE of training set naive forecasts for non-seasonal time series, training set seasonal naive forecasts for seasonal time series and training set mean forecasts for non-time series data. If \code{f} is a numerical vector rather than a \code{forecast} object, the MASE will not be returned as the training data will not be available. See Hyndman and Koehler (2006) and Hyndman and Athanasopoulos (2014, Section 2.5) for further details. } \examples{ fit1 <- rwf(EuStockMarkets[1:200, 1], h = 100) fit2 <- meanf(EuStockMarkets[1:200, 1], h = 100) accuracy(fit1) accuracy(fit2) accuracy(fit1, EuStockMarkets[201:300, 1]) accuracy(fit2, EuStockMarkets[201:300, 1]) plot(fit1) lines(EuStockMarkets[1:300, 1]) } \references{ Hyndman, R.J. and Koehler, A.B. (2006) "Another look at measures of forecast accuracy". \emph{International Journal of Forecasting}, \bold{22}(4), 679-688. Hyndman, R.J. and Athanasopoulos, G. (2018) "Forecasting: principles and practice", 2nd ed., OTexts, Melbourne, Australia. Section 3.4 "Evaluating forecast accuracy". \url{https://otexts.com/fpp2/accuracy.html}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/baggedModel.Rd0000644000176200001440000000445614150370574015333 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/baggedModel.R \name{baggedModel} \alias{baggedModel} \alias{print.baggedModel} \alias{baggedETS} \title{Forecasting using a bagged model} \usage{ baggedModel(y, bootstrapped_series = bld.mbb.bootstrap(y, 100), fn = ets, ...) baggedETS(y, bootstrapped_series = bld.mbb.bootstrap(y, 100), ...) } \arguments{ \item{y}{A numeric vector or time series of class \code{ts}.} \item{bootstrapped_series}{bootstrapped versions of y.} \item{fn}{the forecast function to use. Default is \code{\link{ets}}.} \item{\dots}{Other arguments passed to the forecast function.} } \value{ Returns an object of class "\code{baggedModel}". The function \code{print} is used to obtain and print a summary of the results. \item{models}{A list containing the fitted ensemble models.} \item{method}{The function for producing a forecastable model.} \item{y}{The original time series.} \item{bootstrapped_series}{The bootstrapped series.} \item{modelargs}{The arguments passed through to \code{fn}.} \item{fitted}{Fitted values (one-step forecasts). The mean of the fitted values is calculated over the ensemble.} \item{residuals}{Original values minus fitted values.} } \description{ The bagged model forecasting method. } \details{ This function implements the bagged model forecasting method described in Bergmeir et al. By default, the \code{\link{ets}} function is applied to all bootstrapped series. Base models other than \code{\link{ets}} can be given by the parameter \code{fn}. Using the default parameters, the function \code{\link{bld.mbb.bootstrap}} is used to calculate the bootstrapped series with the Box-Cox and Loess-based decomposition (BLD) bootstrap. The function \code{\link{forecast.baggedModel}} can then be used to calculate forecasts. \code{baggedETS} is a wrapper for \code{baggedModel}, setting \code{fn} to "ets". This function is included for backwards compatibility only, and may be deprecated in the future. } \examples{ fit <- baggedModel(WWWusage) fcast <- forecast(fit) plot(fcast) } \references{ Bergmeir, C., R. J. Hyndman, and J. M. Benitez (2016). Bagging Exponential Smoothing Methods using STL Decomposition and Box-Cox Transformation. International Journal of Forecasting 32, 303-312. } \author{ Christoph Bergmeir, Fotios Petropoulos } \keyword{ts} forecast/man/forecast.ets.Rd0000644000176200001440000000652414150370574015537 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/etsforecast.R \name{forecast.ets} \alias{forecast.ets} \title{Forecasting using ETS models} \usage{ \method{forecast}{ets}( object, h = ifelse(object$m > 1, 2 * object$m, 10), level = c(80, 95), fan = FALSE, simulate = FALSE, bootstrap = FALSE, npaths = 5000, PI = TRUE, lambda = object$lambda, biasadj = NULL, ... ) } \arguments{ \item{object}{An object of class "\code{ets}". Usually the result of a call to \code{\link{ets}}.} \item{h}{Number of periods for forecasting} \item{level}{Confidence level for prediction intervals.} \item{fan}{If TRUE, level is set to seq(51,99,by=3). This is suitable for fan plots.} \item{simulate}{If TRUE, prediction intervals are produced by simulation rather than using analytic formulae. Errors are assumed to be normally distributed.} \item{bootstrap}{If TRUE, then prediction intervals are produced by simulation using resampled errors (rather than normally distributed errors).} \item{npaths}{Number of sample paths used in computing simulated prediction intervals.} \item{PI}{If TRUE, prediction intervals are produced, otherwise only point forecasts are calculated. If \code{PI} is FALSE, then \code{level}, \code{fan}, \code{simulate}, \code{bootstrap} and \code{npaths} are all ignored.} \item{lambda}{Box-Cox transformation parameter. If \code{lambda="auto"}, then a transformation is automatically selected using \code{BoxCox.lambda}. The transformation is ignored if NULL. Otherwise, data transformed before model is estimated.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If transformed data is used to produce forecasts and fitted values, a regular back transformation will result in median forecasts. If biasadj is TRUE, an adjustment will be made to produce mean forecasts and fitted values.} \item{...}{Other arguments.} } \value{ An object of class "\code{forecast}". The function \code{summary} is used to obtain and print a summary of the results, while the function \code{plot} produces a plot of the forecasts and prediction intervals. The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{forecast.ets}. An object of class \code{"forecast"} is a list containing at least the following elements: \item{model}{A list containing information about the fitted model} \item{method}{The name of the forecasting method as a character string} \item{mean}{Point forecasts as a time series} \item{lower}{Lower limits for prediction intervals} \item{upper}{Upper limits for prediction intervals} \item{level}{The confidence values associated with the prediction intervals} \item{x}{The original time series (either \code{object} itself or the time series used to create the model stored as \code{object}).} \item{residuals}{Residuals from the fitted model. For models with additive errors, the residuals are x - fitted values. For models with multiplicative errors, the residuals are equal to x /(fitted values) - 1.} \item{fitted}{Fitted values (one-step forecasts)} } \description{ Returns forecasts and other information for univariate ETS models. } \examples{ fit <- ets(USAccDeaths) plot(forecast(fit,h=48)) } \seealso{ \code{\link{ets}}, \code{\link{ses}}, \code{\link{holt}}, \code{\link{hw}}. } \author{ Rob J Hyndman } \keyword{ts} forecast/man/ocsb.test.Rd0000644000176200001440000000342514150370574015040 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/unitRoot.R \name{ocsb.test} \alias{ocsb.test} \alias{print.OCSBtest} \title{Osborn, Chui, Smith, and Birchenhall Test for Seasonal Unit Roots} \usage{ ocsb.test(x, lag.method = c("fixed", "AIC", "BIC", "AICc"), maxlag = 0) } \arguments{ \item{x}{a univariate seasonal time series.} \item{lag.method}{a character specifying the lag order selection method.} \item{maxlag}{the maximum lag order to be considered by \code{lag.method}.} } \value{ ocsb.test returns a list of class "OCSBtest" with the following components: * statistics the value of the test statistics. * pvalues the p-values for each test statistics. * method a character string describing the type of test. * data.name a character string giving the name of the data. * fitted.model the fitted regression model. } \description{ An implementation of the Osborn, Chui, Smith, and Birchenhall (OCSB) test. } \details{ The regression equation may include lags of the dependent variable. When lag.method = "fixed", the lag order is fixed to maxlag; otherwise, maxlag is the maximum number of lags considered in a lag selection procedure that minimises the lag.method criterion, which can be AIC or BIC or corrected AIC, AICc, obtained as AIC + (2k(k+1))/(n-k-1), where k is the number of parameters and n is the number of available observations in the model. Critical values for the test are based on simulations, which has been smoothed over to produce critical values for all seasonal periods. } \examples{ ocsb.test(AirPassengers) } \references{ Osborn DR, Chui APL, Smith J, and Birchenhall CR (1988) "Seasonality and the order of integration for consumption", \emph{Oxford Bulletin of Economics and Statistics} \bold{50}(4):361-377. } \seealso{ \code{\link{nsdiffs}} } forecast/man/gas.Rd0000644000176200001440000000060414150370574013702 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/data.R \docType{data} \name{gas} \alias{gas} \title{Australian monthly gas production} \format{ Time series data } \source{ Australian Bureau of Statistics. } \usage{ gas } \description{ Australian monthly gas production: 1956--1995. } \examples{ plot(gas) seasonplot(gas) tsdisplay(gas) } \keyword{datasets} forecast/man/Acf.Rd0000644000176200001440000000747114150370574013632 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/acf.R \name{Acf} \alias{Acf} \alias{Pacf} \alias{Ccf} \alias{taperedacf} \alias{taperedpacf} \title{(Partial) Autocorrelation and Cross-Correlation Function Estimation} \usage{ Acf( x, lag.max = NULL, type = c("correlation", "covariance", "partial"), plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) Pacf( x, lag.max = NULL, plot = TRUE, na.action = na.contiguous, demean = TRUE, ... ) Ccf( x, y, lag.max = NULL, type = c("correlation", "covariance"), plot = TRUE, na.action = na.contiguous, ... ) taperedacf( x, lag.max = NULL, type = c("correlation", "partial"), plot = TRUE, calc.ci = TRUE, level = 95, nsim = 100, ... ) taperedpacf(x, ...) } \arguments{ \item{x}{a univariate or multivariate (not Ccf) numeric time series object or a numeric vector or matrix.} \item{lag.max}{maximum lag at which to calculate the acf. Default is $10*log10(N/m)$ where $N$ is the number of observations and $m$ the number of series. Will be automatically limited to one less than the number of observations in the series.} \item{type}{character string giving the type of acf to be computed. Allowed values are \dQuote{\code{correlation}} (the default), \dQuote{\code{covariance}} or \dQuote{\code{partial}}.} \item{plot}{logical. If \code{TRUE} (the default) the resulting acf, pacf or ccf is plotted.} \item{na.action}{function to handle missing values. Default is \code{\link[stats]{na.contiguous}}. Useful alternatives are \code{\link[stats]{na.pass}} and \code{\link{na.interp}}.} \item{demean}{Should covariances be about the sample means?} \item{...}{Additional arguments passed to the plotting function.} \item{y}{a univariate numeric time series object or a numeric vector.} \item{calc.ci}{If \code{TRUE}, confidence intervals for the ACF/PACF estimates are calculated.} \item{level}{Percentage level used for the confidence intervals.} \item{nsim}{The number of bootstrap samples used in estimating the confidence intervals.} } \value{ The \code{Acf}, \code{Pacf} and \code{Ccf} functions return objects of class "acf" as described in \code{\link[stats]{acf}} from the stats package. The \code{taperedacf} and \code{taperedpacf} functions return objects of class "mpacf". } \description{ The function \code{Acf} computes (and by default plots) an estimate of the autocorrelation function of a (possibly multivariate) time series. Function \code{Pacf} computes (and by default plots) an estimate of the partial autocorrelation function of a (possibly multivariate) time series. Function \code{Ccf} computes the cross-correlation or cross-covariance of two univariate series. } \details{ The functions improve the \code{\link[stats]{acf}}, \code{\link[stats]{pacf}} and \code{\link[stats]{ccf}} functions. The main differences are that \code{Acf} does not plot a spike at lag 0 when \code{type=="correlation"} (which is redundant) and the horizontal axes show lags in time units rather than seasonal units. The tapered versions implement the ACF and PACF estimates and plots described in Hyndman (2015), based on the banded and tapered estimates of autocovariance proposed by McMurry and Politis (2010). } \examples{ Acf(wineind) Pacf(wineind) \dontrun{ taperedacf(wineind, nsim=50) taperedpacf(wineind, nsim=50) } } \references{ Hyndman, R.J. (2015). Discussion of ``High-dimensional autocovariance matrices and optimal linear prediction''. \emph{Electronic Journal of Statistics}, 9, 792-796. McMurry, T. L., & Politis, D. N. (2010). Banded and tapered estimates for autocovariance matrices and the linear process bootstrap. \emph{Journal of Time Series Analysis}, 31(6), 471-482. } \seealso{ \code{\link[stats]{acf}}, \code{\link[stats]{pacf}}, \code{\link[stats]{ccf}}, \code{\link{tsdisplay}} } \author{ Rob J Hyndman } \keyword{ts} forecast/man/bats.Rd0000644000176200001440000000713214150370574014064 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/bats.R \name{bats} \alias{bats} \alias{as.character.bats} \alias{print.bats} \title{BATS model (Exponential smoothing state space model with Box-Cox transformation, ARMA errors, Trend and Seasonal components)} \usage{ bats( y, use.box.cox = NULL, use.trend = NULL, use.damped.trend = NULL, seasonal.periods = NULL, use.arma.errors = TRUE, use.parallel = length(y) > 1000, num.cores = 2, bc.lower = 0, bc.upper = 1, biasadj = FALSE, model = NULL, ... ) } \arguments{ \item{y}{The time series to be forecast. Can be \code{numeric}, \code{msts} or \code{ts}. Only univariate time series are supported.} \item{use.box.cox}{\code{TRUE/FALSE} indicates whether to use the Box-Cox transformation or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.trend}{\code{TRUE/FALSE} indicates whether to include a trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{use.damped.trend}{\code{TRUE/FALSE} indicates whether to include a damping parameter in the trend or not. If \code{NULL} then both are tried and the best fit is selected by AIC.} \item{seasonal.periods}{If \code{y} is a numeric then seasonal periods can be specified with this parameter.} \item{use.arma.errors}{\code{TRUE/FALSE} indicates whether to include ARMA errors or not. If \code{TRUE} the best fit is selected by AIC. If \code{FALSE} then the selection algorithm does not consider ARMA errors.} \item{use.parallel}{\code{TRUE/FALSE} indicates whether or not to use parallel processing.} \item{num.cores}{The number of parallel processes to be used if using parallel processing. If \code{NULL} then the number of logical cores is detected and all available cores are used.} \item{bc.lower}{The lower limit (inclusive) for the Box-Cox transformation.} \item{bc.upper}{The upper limit (inclusive) for the Box-Cox transformation.} \item{biasadj}{Use adjusted back-transformed mean for Box-Cox transformations. If TRUE, point forecasts and fitted values are mean forecast. Otherwise, these points can be considered the median of the forecast densities.} \item{model}{Output from a previous call to \code{bats}. If model is passed, this same model is fitted to \code{y} without re-estimating any parameters.} \item{...}{Additional arguments to be passed to \code{auto.arima} when choose an ARMA(p, q) model for the errors. (Note that xreg will be ignored, as will any arguments concerning seasonality and differencing, but arguments controlling the values of p and q will be used.)} } \value{ An object of class "\code{bats}". The generic accessor functions \code{fitted.values} and \code{residuals} extract useful features of the value returned by \code{bats} and associated functions. The fitted model is designated BATS(omega, p,q, phi, m1,...mJ) where omega is the Box-Cox parameter and phi is the damping parameter; the error is modelled as an ARMA(p,q) process and m1,...,mJ list the seasonal periods used in the model. } \description{ Fits a BATS model applied to \code{y}, as described in De Livera, Hyndman & Snyder (2011). Parallel processing is used by default to speed up the computations. } \examples{ \dontrun{ fit <- bats(USAccDeaths) plot(forecast(fit)) taylor.fit <- bats(taylor) plot(forecast(taylor.fit)) } } \references{ De Livera, A.M., Hyndman, R.J., & Snyder, R. D. (2011), Forecasting time series with complex seasonal patterns using exponential smoothing, \emph{Journal of the American Statistical Association}, \bold{106}(496), 1513-1527. } \author{ Slava Razbash and Rob J Hyndman } \keyword{ts} forecast/man/getResponse.Rd0000644000176200001440000000261014150370574015425 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/getResponse.R \name{getResponse} \alias{getResponse} \alias{getResponse.default} \alias{getResponse.lm} \alias{getResponse.Arima} \alias{getResponse.fracdiff} \alias{getResponse.ar} \alias{getResponse.tbats} \alias{getResponse.bats} \alias{getResponse.mforecast} \alias{getResponse.baggedModel} \title{Get response variable from time series model.} \usage{ getResponse(object, ...) \method{getResponse}{default}(object, ...) \method{getResponse}{lm}(object, ...) \method{getResponse}{Arima}(object, ...) \method{getResponse}{fracdiff}(object, ...) \method{getResponse}{ar}(object, ...) \method{getResponse}{tbats}(object, ...) \method{getResponse}{bats}(object, ...) \method{getResponse}{mforecast}(object, ...) \method{getResponse}{baggedModel}(object, ...) } \arguments{ \item{object}{a time series model or forecast object.} \item{...}{Additional arguments that are ignored.} } \value{ A numerical vector or a time series object of class \code{ts}. } \description{ \code{getResponse} is a generic function for extracting the historical data from a time series model (including \code{Arima}, \code{ets}, \code{ar}, \code{fracdiff}), a linear model of class \code{lm}, or a forecast object. The function invokes particular \emph{methods} which depend on the class of the first argument. } \author{ Rob J Hyndman } \keyword{ts} forecast/DESCRIPTION0000644000176200001440000000650014634716616013605 0ustar liggesusersPackage: forecast Version: 8.23.0 Title: Forecasting Functions for Time Series and Linear Models Description: Methods and tools for displaying and analysing univariate time series forecasts including exponential smoothing via state space models and automatic ARIMA modelling. Depends: R (>= 3.5.0), Imports: colorspace, fracdiff, generics (>= 0.1.2), ggplot2 (>= 2.2.1), graphics, lmtest, magrittr, nnet, parallel, Rcpp (>= 0.11.0), stats, timeDate, tseries, urca, withr, zoo Suggests: forecTheta, knitr, methods, rmarkdown, rticles, scales, seasonal, testthat (>= 3.0.0), uroot LinkingTo: Rcpp (>= 0.11.0), RcppArmadillo (>= 0.2.35) LazyData: yes ByteCompile: TRUE Authors@R: c( person("Rob", "Hyndman", email = "Rob.Hyndman@monash.edu", role = c("aut", "cre", "cph"), comment = c(ORCID = "0000-0002-2140-5352")), person("George", "Athanasopoulos", role = "aut", comment = c(ORCID = "0000-0002-5389-2802")), person("Christoph", "Bergmeir", role = "aut", comment = c(ORCID = "0000-0002-3665-9021")), person("Gabriel", "Caceres", role = "aut", comment = c(ORCID = "0000-0002-2947-2023")), person("Leanne", "Chhay", role = "aut"), person("Kirill", "Kuroptev", role = "aut"), person("Mitchell", "O'Hara-Wild", role = "aut", comment = c(ORCID = "0000-0001-6729-7695")), person("Fotios", "Petropoulos", role = "aut", comment = c(ORCID = "0000-0003-3039-4955")), person("Slava", "Razbash", role = "aut"), person("Earo", "Wang", role = "aut", comment = c(ORCID = "0000-0001-6448-5260")), person("Farah", "Yasmeen", role = "aut", comment = c(ORCID = "0000-0002-1479-5401")), person("Federico", "Garza", role = "ctb"), person("Daniele", "Girolimetto", role = "ctb"), person("Ross", "Ihaka", role = c("ctb", "cph")), person("R Core Team", role = c("ctb", "cph")), person("Daniel", "Reid", role = "ctb"), person("David", "Shaub", role = "ctb"), person("Yuan", "Tang", role = "ctb", comment = c(ORCID = "0000-0001-5243-233X")), person("Xiaoqian", "Wang", role = "ctb"), person("Zhenyu", "Zhou", role = "ctb") ) BugReports: https://github.com/robjhyndman/forecast/issues License: GPL-3 URL: https://pkg.robjhyndman.com/forecast/, https://github.com/robjhyndman/forecast VignetteBuilder: knitr Encoding: UTF-8 RoxygenNote: 7.3.1 Config/testthat/edition: 3 NeedsCompilation: yes Packaged: 2024-06-20 01:21:29 UTC; hyndman Author: Rob Hyndman [aut, cre, cph] (), George Athanasopoulos [aut] (), Christoph Bergmeir [aut] (), Gabriel Caceres [aut] (), Leanne Chhay [aut], Kirill Kuroptev [aut], Mitchell O'Hara-Wild [aut] (), Fotios Petropoulos [aut] (), Slava Razbash [aut], Earo Wang [aut] (), Farah Yasmeen [aut] (), Federico Garza [ctb], Daniele Girolimetto [ctb], Ross Ihaka [ctb, cph], R Core Team [ctb, cph], Daniel Reid [ctb], David Shaub [ctb], Yuan Tang [ctb] (), Xiaoqian Wang [ctb], Zhenyu Zhou [ctb] Maintainer: Rob Hyndman Repository: CRAN Date/Publication: 2024-06-20 03:10:06 UTC